andrew-healey commited on
Commit
398cd3f
·
verified ·
1 Parent(s): 6e15265

Upload folder using huggingface_hub

Browse files
attention_kindselective_n_heads4_seed1342/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_9/attention_kindselective_n_heads4_seed1342", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_9", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1342, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 6e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "6e-5_61440_4_1342", "n_embd": 256}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_9/attention_kindselective_n_heads4_seed1342", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_9", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1342, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 5.5e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "5.5e-5_61440_4_1342", "n_embd": 256}
attention_kindselective_n_heads4_seed1342/log2.txt CHANGED
@@ -1,267 +1,267 @@
1
  max_steps: 8750
2
  0 val loss 11.2898
3
  0 val perplexity 80004.5469
4
- 0 train 11.289051 (lr=1.2000e-07) (hash(x)=150332693)
5
- 100 val loss 9.9615
6
- 100 val perplexity 21195.4160
7
- 100 train 9.912132 (lr=1.2120e-05) (hash(x)=149277319)
8
- 200 val loss 9.0154
9
- 200 val perplexity 8229.0254
10
- 200 train 9.076880 (lr=2.4120e-05) (hash(x)=155934075)
11
- 300 val loss 7.8700
12
- 300 val perplexity 2617.4729
13
- 300 train 7.893194 (lr=3.6120e-05) (hash(x)=165402628)
14
- 400 val loss 7.5153
15
- 400 val perplexity 1835.8740
16
- 400 train 7.302748 (lr=4.8120e-05) (hash(x)=139168939)
17
- 500 val loss 7.3772
18
- 500 val perplexity 1599.0566
19
- 500 train 7.295662 (lr=6.0000e-05) (hash(x)=147134338)
20
- 600 val loss 7.2687
21
- 600 val perplexity 1434.7521
22
- 600 train 7.172646 (lr=5.9980e-05) (hash(x)=159086459)
23
- 700 val loss 7.1673
24
- 700 val perplexity 1296.3660
25
- 700 train 7.202695 (lr=5.9922e-05) (hash(x)=152399099)
26
- 800 val loss 7.0257
27
- 800 val perplexity 1125.1729
28
- 800 train 7.027829 (lr=5.9824e-05) (hash(x)=156548843)
29
- 900 val loss 6.9291
30
- 900 val perplexity 1021.5352
31
- 900 train 6.900503 (lr=5.9687e-05) (hash(x)=157889911)
32
- 1000 val loss 6.8540
33
- 1000 val perplexity 947.6541
34
- 1000 train 6.645953 (lr=5.9512e-05) (hash(x)=142943707)
35
- 1100 val loss 6.7872
36
- 1100 val perplexity 886.4179
37
- 1100 train 6.784300 (lr=5.9298e-05) (hash(x)=148938689)
38
- 1200 val loss 6.6919
39
- 1200 val perplexity 805.8444
40
- 1200 train 6.832088 (lr=5.9046e-05) (hash(x)=161962903)
41
- 1300 val loss 6.6179
42
- 1300 val perplexity 748.3896
43
- 1300 train 6.603671 (lr=5.8757e-05) (hash(x)=145979235)
44
- 1400 val loss 6.5546
45
- 1400 val perplexity 702.4713
46
- 1400 train 6.501323 (lr=5.8430e-05) (hash(x)=159157507)
47
- 1500 val loss 6.5041
48
- 1500 val perplexity 667.8551
49
- 1500 train 6.537813 (lr=5.8066e-05) (hash(x)=145943130)
50
- 1600 val loss 6.4396
51
- 1600 val perplexity 626.1458
52
- 1600 train 6.243019 (lr=5.7666e-05) (hash(x)=143249173)
53
- 1700 val loss 6.3910
54
- 1700 val perplexity 596.4265
55
- 1700 train 6.449312 (lr=5.7230e-05) (hash(x)=173907143)
56
- 1800 val loss 6.3457
57
- 1800 val perplexity 570.0468
58
- 1800 train 6.273961 (lr=5.6759e-05) (hash(x)=138363755)
59
- 1900 val loss 6.3148
60
- 1900 val perplexity 552.6835
61
- 1900 train 6.127532 (lr=5.6253e-05) (hash(x)=146341390)
62
- 2000 val loss 6.2714
63
- 2000 val perplexity 529.2251
64
- 2000 train 6.300304 (lr=5.5714e-05) (hash(x)=154559671)
65
- 2100 val loss 6.2476
66
- 2100 val perplexity 516.7497
67
- 2100 train 6.296726 (lr=5.5142e-05) (hash(x)=155607137)
68
- 2200 val loss 6.2124
69
- 2200 val perplexity 498.9062
70
- 2200 train 6.073236 (lr=5.4537e-05) (hash(x)=153010221)
71
- 2300 val loss 6.1781
72
- 2300 val perplexity 482.0536
73
- 2300 train 6.189386 (lr=5.3902e-05) (hash(x)=160097777)
74
- 2400 val loss 6.1516
75
- 2400 val perplexity 469.4609
76
- 2400 train 6.133095 (lr=5.3236e-05) (hash(x)=135590094)
77
- 2500 val loss 6.1327
78
- 2500 val perplexity 460.6616
79
- 2500 train 6.251341 (lr=5.2541e-05) (hash(x)=147042929)
80
- 2600 val loss 6.1032
81
- 2600 val perplexity 447.2693
82
- 2600 train 6.064587 (lr=5.1817e-05) (hash(x)=163793881)
83
- 2700 val loss 6.0817
84
- 2700 val perplexity 437.7921
85
- 2700 train 6.115384 (lr=5.1067e-05) (hash(x)=153598698)
86
- 2800 val loss 6.0655
87
- 2800 val perplexity 430.7336
88
- 2800 train 5.855768 (lr=5.0290e-05) (hash(x)=133439803)
89
- 2900 val loss 6.0305
90
- 2900 val perplexity 415.9322
91
- 2900 train 5.968013 (lr=4.9487e-05) (hash(x)=148239158)
92
- 3000 val loss 6.0177
93
- 3000 val perplexity 410.6143
94
- 3000 train 6.027388 (lr=4.8662e-05) (hash(x)=150991971)
95
- 3100 val loss 6.0013
96
- 3100 val perplexity 403.9464
97
- 3100 train 6.011987 (lr=4.7813e-05) (hash(x)=157791832)
98
- 3200 val loss 5.9744
99
- 3200 val perplexity 393.2289
100
- 3200 train 6.042794 (lr=4.6943e-05) (hash(x)=163746305)
101
- 3300 val loss 5.9654
102
- 3300 val perplexity 389.7220
103
- 3300 train 5.886434 (lr=4.6052e-05) (hash(x)=155120269)
104
- 3400 val loss 5.9279
105
- 3400 val perplexity 375.3517
106
- 3400 train 5.871515 (lr=4.5143e-05) (hash(x)=152748451)
107
- 3500 val loss 5.9162
108
- 3500 val perplexity 371.0053
109
- 3500 train 5.804614 (lr=4.4216e-05) (hash(x)=147395004)
110
- 3600 val loss 5.9043
111
- 3600 val perplexity 366.6275
112
- 3600 train 5.918603 (lr=4.3273e-05) (hash(x)=157554250)
113
- 3700 val loss 5.8756
114
- 3700 val perplexity 356.2478
115
- 3700 train 5.730301 (lr=4.2315e-05) (hash(x)=147953321)
116
- 3800 val loss 5.8586
117
- 3800 val perplexity 350.2188
118
- 3800 train 5.712625 (lr=4.1343e-05) (hash(x)=146898609)
119
- 3900 val loss 5.8522
120
- 3900 val perplexity 347.9937
121
- 3900 train 5.886589 (lr=4.0360e-05) (hash(x)=150115282)
122
- 4000 val loss 5.8241
123
- 4000 val perplexity 338.3399
124
- 4000 train 5.707231 (lr=3.9365e-05) (hash(x)=142448619)
125
- 4100 val loss 5.8301
126
- 4100 val perplexity 340.3960
127
- 4100 train 5.576627 (lr=3.8362e-05) (hash(x)=141244123)
128
- 4200 val loss 5.8160
129
- 4200 val perplexity 335.6147
130
- 4200 train 5.818020 (lr=3.7351e-05) (hash(x)=165913661)
131
- 4300 val loss 5.7864
132
- 4300 val perplexity 325.8335
133
- 4300 train 5.816000 (lr=3.6333e-05) (hash(x)=155744823)
134
- 4400 val loss 5.7760
135
- 4400 val perplexity 322.4549
136
- 4400 train 5.771632 (lr=3.5311e-05) (hash(x)=160461243)
137
- 4500 val loss 5.7639
138
- 4500 val perplexity 318.5829
139
- 4500 train 5.661434 (lr=3.4285e-05) (hash(x)=156073129)
140
- 4600 val loss 5.7462
141
- 4600 val perplexity 312.9993
142
- 4600 train 5.798613 (lr=3.3257e-05) (hash(x)=152294132)
143
- 4700 val loss 5.7369
144
- 4700 val perplexity 310.0911
145
- 4700 train 5.671713 (lr=3.2229e-05) (hash(x)=140904569)
146
- 4800 val loss 5.7324
147
- 4800 val perplexity 308.7122
148
- 4800 train 5.635421 (lr=3.1202e-05) (hash(x)=156191635)
149
- 4900 val loss 5.7176
150
- 4900 val perplexity 304.1834
151
- 4900 train 5.841964 (lr=3.0178e-05) (hash(x)=145063976)
152
- 5000 val loss 5.7085
153
- 5000 val perplexity 301.4318
154
- 5000 train 5.654516 (lr=2.9157e-05) (hash(x)=160110619)
155
- 5100 val loss 5.6953
156
- 5100 val perplexity 297.4629
157
- 5100 train 5.728802 (lr=2.8143e-05) (hash(x)=156270070)
158
- 5200 val loss 5.6893
159
- 5200 val perplexity 295.6986
160
- 5200 train 5.585282 (lr=2.7135e-05) (hash(x)=138954242)
161
- 5300 val loss 5.6777
162
- 5300 val perplexity 292.2853
163
- 5300 train 5.522610 (lr=2.6136e-05) (hash(x)=146472367)
164
- 5400 val loss 5.6648
165
- 5400 val perplexity 288.5367
166
- 5400 train 5.598588 (lr=2.5147e-05) (hash(x)=146753405)
167
- 5500 val loss 5.6572
168
- 5500 val perplexity 286.3393
169
- 5500 train 5.606112 (lr=2.4169e-05) (hash(x)=147757398)
170
- 5600 val loss 5.6523
171
- 5600 val perplexity 284.9454
172
- 5600 train 5.521980 (lr=2.3204e-05) (hash(x)=151291757)
173
- 5700 val loss 5.6385
174
- 5700 val perplexity 281.0490
175
- 5700 train 5.692122 (lr=2.2253e-05) (hash(x)=156274392)
176
- 5800 val loss 5.6329
177
- 5800 val perplexity 279.4854
178
- 5800 train 5.609037 (lr=2.1318e-05) (hash(x)=152612266)
179
- 5900 val loss 5.6271
180
- 5900 val perplexity 277.8664
181
- 5900 train 5.795071 (lr=2.0400e-05) (hash(x)=148973201)
182
- 6000 val loss 5.6178
183
- 6000 val perplexity 275.2767
184
- 6000 train 5.579207 (lr=1.9500e-05) (hash(x)=158311490)
185
- 6100 val loss 5.6121
186
- 6100 val perplexity 273.7303
187
- 6100 train 5.554811 (lr=1.8620e-05) (hash(x)=147541319)
188
- 6200 val loss 5.6111
189
- 6200 val perplexity 273.4489
190
- 6200 train 5.679410 (lr=1.7760e-05) (hash(x)=154753980)
191
- 6300 val loss 5.6028
192
- 6300 val perplexity 271.1972
193
- 6300 train 5.494817 (lr=1.6923e-05) (hash(x)=140798998)
194
- 6400 val loss 5.5951
195
- 6400 val perplexity 269.1085
196
- 6400 train 5.487599 (lr=1.6108e-05) (hash(x)=149299516)
197
- 6500 val loss 5.5900
198
- 6500 val perplexity 267.7465
199
- 6500 train 5.562661 (lr=1.5319e-05) (hash(x)=148495806)
200
- 6600 val loss 5.5818
201
- 6600 val perplexity 265.5546
202
- 6600 train 5.563025 (lr=1.4555e-05) (hash(x)=153664387)
203
- 6700 val loss 5.5788
204
- 6700 val perplexity 264.7507
205
- 6700 train 5.554018 (lr=1.3817e-05) (hash(x)=153333491)
206
- 6800 val loss 5.5798
207
- 6800 val perplexity 265.0316
208
- 6800 train 5.324367 (lr=1.3108e-05) (hash(x)=150149382)
209
- 6900 val loss 5.5716
210
- 6900 val perplexity 262.8433
211
- 6900 train 5.661313 (lr=1.2427e-05) (hash(x)=152379767)
212
- 7000 val loss 5.5638
213
- 7000 val perplexity 260.8132
214
- 7000 train 5.611398 (lr=1.1777e-05) (hash(x)=150432537)
215
- 7100 val loss 5.5639
216
- 7100 val perplexity 260.8436
217
- 7100 train 5.440166 (lr=1.1157e-05) (hash(x)=148277836)
218
- 7200 val loss 5.5617
219
- 7200 val perplexity 260.2775
220
- 7200 train 5.550549 (lr=1.0568e-05) (hash(x)=168591659)
221
- 7300 val loss 5.5562
222
- 7300 val perplexity 258.8360
223
- 7300 train 5.538674 (lr=1.0012e-05) (hash(x)=152764851)
224
- 7400 val loss 5.5513
225
- 7400 val perplexity 257.5660
226
- 7400 train 5.519680 (lr=9.4899e-06) (hash(x)=142920131)
227
- 7500 val loss 5.5492
228
- 7500 val perplexity 257.0265
229
- 7500 train 5.550600 (lr=9.0014e-06) (hash(x)=146954649)
230
- 7600 val loss 5.5489
231
- 7600 val perplexity 256.9645
232
- 7600 train 5.316984 (lr=8.5478e-06) (hash(x)=144529566)
233
- 7700 val loss 5.5435
234
- 7700 val perplexity 255.5719
235
- 7700 train 5.502102 (lr=8.1297e-06) (hash(x)=142669945)
236
- 7800 val loss 5.5391
237
- 7800 val perplexity 254.4444
238
- 7800 train 5.590072 (lr=7.7476e-06) (hash(x)=161563764)
239
- 7900 val loss 5.5373
240
- 7900 val perplexity 253.9967
241
- 7900 train 5.333921 (lr=7.4021e-06) (hash(x)=141516532)
242
- 8000 val loss 5.5385
243
- 8000 val perplexity 254.3059
244
- 8000 train 5.403804 (lr=7.0937e-06) (hash(x)=152418457)
245
- 8100 val loss 5.5318
246
- 8100 val perplexity 252.5901
247
- 8100 train 5.520045 (lr=6.8229e-06) (hash(x)=161109470)
248
- 8200 val loss 5.5277
249
- 8200 val perplexity 251.5600
250
- 8200 train 5.655939 (lr=6.5900e-06) (hash(x)=154408822)
251
- 8300 val loss 5.5258
252
- 8300 val perplexity 251.0971
253
- 8300 train 5.419258 (lr=6.3954e-06) (hash(x)=146431691)
254
- 8400 val loss 5.5267
255
- 8400 val perplexity 251.3187
256
- 8400 train 5.305446 (lr=6.2395e-06) (hash(x)=150182527)
257
- 8500 val loss 5.5204
258
- 8500 val perplexity 249.7403
259
- 8500 train 5.583275 (lr=6.1223e-06) (hash(x)=154828709)
260
- 8600 val loss 5.5201
261
- 8600 val perplexity 249.6640
262
- 8600 train 5.437249 (lr=6.0440e-06) (hash(x)=154995428)
263
- 8700 val loss 5.5193
264
- 8700 val perplexity 249.4662
265
- 8700 train 5.418331 (lr=6.0049e-06) (hash(x)=142111043)
266
- 8749 val loss 5.5172
267
- 8749 val perplexity 248.9265
 
1
  max_steps: 8750
2
  0 val loss 11.2898
3
  0 val perplexity 80004.5469
4
+ 0 train 11.289022 (lr=1.1000e-07) (hash(x)=150332693)
5
+ 100 val loss 9.9785
6
+ 100 val perplexity 21557.6309
7
+ 100 train 9.928879 (lr=1.1110e-05) (hash(x)=149277319)
8
+ 200 val loss 9.0998
9
+ 200 val perplexity 8953.7422
10
+ 200 train 9.162449 (lr=2.2110e-05) (hash(x)=155934075)
11
+ 300 val loss 7.9244
12
+ 300 val perplexity 2764.0276
13
+ 300 train 7.947826 (lr=3.3110e-05) (hash(x)=165402628)
14
+ 400 val loss 7.5156
15
+ 400 val perplexity 1836.4186
16
+ 400 train 7.303900 (lr=4.4110e-05) (hash(x)=139168939)
17
+ 500 val loss 7.3663
18
+ 500 val perplexity 1581.7880
19
+ 500 train 7.281931 (lr=5.5000e-05) (hash(x)=147134338)
20
+ 600 val loss 7.2562
21
+ 600 val perplexity 1416.9106
22
+ 600 train 7.163249 (lr=5.4982e-05) (hash(x)=159086459)
23
+ 700 val loss 7.1584
24
+ 700 val perplexity 1284.9026
25
+ 700 train 7.195020 (lr=5.4928e-05) (hash(x)=152399099)
26
+ 800 val loss 7.0560
27
+ 800 val perplexity 1159.8362
28
+ 800 train 7.059469 (lr=5.4839e-05) (hash(x)=156548843)
29
+ 900 val loss 6.9738
30
+ 900 val perplexity 1068.2446
31
+ 900 train 6.945956 (lr=5.4713e-05) (hash(x)=157889911)
32
+ 1000 val loss 6.8933
33
+ 1000 val perplexity 985.6544
34
+ 1000 train 6.688016 (lr=5.4553e-05) (hash(x)=142943707)
35
+ 1100 val loss 6.8225
36
+ 1100 val perplexity 918.3061
37
+ 1100 train 6.816679 (lr=5.4357e-05) (hash(x)=148938689)
38
+ 1200 val loss 6.7309
39
+ 1200 val perplexity 837.9177
40
+ 1200 train 6.869128 (lr=5.4126e-05) (hash(x)=161962903)
41
+ 1300 val loss 6.6476
42
+ 1300 val perplexity 770.9232
43
+ 1300 train 6.630394 (lr=5.3860e-05) (hash(x)=145979235)
44
+ 1400 val loss 6.5794
45
+ 1400 val perplexity 720.1292
46
+ 1400 train 6.523628 (lr=5.3561e-05) (hash(x)=159157507)
47
+ 1500 val loss 6.5286
48
+ 1500 val perplexity 684.4431
49
+ 1500 train 6.563924 (lr=5.3227e-05) (hash(x)=145943130)
50
+ 1600 val loss 6.4595
51
+ 1600 val perplexity 638.7619
52
+ 1600 train 6.259445 (lr=5.2860e-05) (hash(x)=143249173)
53
+ 1700 val loss 6.4041
54
+ 1700 val perplexity 604.3295
55
+ 1700 train 6.467296 (lr=5.2461e-05) (hash(x)=173907143)
56
+ 1800 val loss 6.3679
57
+ 1800 val perplexity 582.8306
58
+ 1800 train 6.292572 (lr=5.2029e-05) (hash(x)=138363755)
59
+ 1900 val loss 6.3335
60
+ 1900 val perplexity 563.1382
61
+ 1900 train 6.143126 (lr=5.1565e-05) (hash(x)=146341390)
62
+ 2000 val loss 6.2870
63
+ 2000 val perplexity 537.5458
64
+ 2000 train 6.311212 (lr=5.1071e-05) (hash(x)=154559671)
65
+ 2100 val loss 6.2628
66
+ 2100 val perplexity 524.6801
67
+ 2100 train 6.310268 (lr=5.0547e-05) (hash(x)=155607137)
68
+ 2200 val loss 6.2281
69
+ 2200 val perplexity 506.7824
70
+ 2200 train 6.084721 (lr=4.9993e-05) (hash(x)=153010221)
71
+ 2300 val loss 6.1992
72
+ 2300 val perplexity 492.3499
73
+ 2300 train 6.212214 (lr=4.9410e-05) (hash(x)=160097777)
74
+ 2400 val loss 6.1760
75
+ 2400 val perplexity 481.0809
76
+ 2400 train 6.155311 (lr=4.8800e-05) (hash(x)=135590094)
77
+ 2500 val loss 6.1477
78
+ 2500 val perplexity 467.6450
79
+ 2500 train 6.267031 (lr=4.8162e-05) (hash(x)=147042929)
80
+ 2600 val loss 6.1151
81
+ 2600 val perplexity 452.6300
82
+ 2600 train 6.078469 (lr=4.7499e-05) (hash(x)=163793881)
83
+ 2700 val loss 6.0944
84
+ 2700 val perplexity 443.3677
85
+ 2700 train 6.127896 (lr=4.6811e-05) (hash(x)=153598698)
86
+ 2800 val loss 6.0819
87
+ 2800 val perplexity 437.8585
88
+ 2800 train 5.872927 (lr=4.6099e-05) (hash(x)=133439803)
89
+ 2900 val loss 6.0548
90
+ 2900 val perplexity 426.1370
91
+ 2900 train 5.988275 (lr=4.5364e-05) (hash(x)=148239158)
92
+ 3000 val loss 6.0374
93
+ 3000 val perplexity 418.8012
94
+ 3000 train 6.044248 (lr=4.4606e-05) (hash(x)=150991971)
95
+ 3100 val loss 6.0165
96
+ 3100 val perplexity 410.1316
97
+ 3100 train 6.021707 (lr=4.3828e-05) (hash(x)=157791832)
98
+ 3200 val loss 5.9870
99
+ 3200 val perplexity 398.2261
100
+ 3200 train 6.056021 (lr=4.3031e-05) (hash(x)=163746305)
101
+ 3300 val loss 5.9798
102
+ 3300 val perplexity 395.3527
103
+ 3300 train 5.901730 (lr=4.2215e-05) (hash(x)=155120269)
104
+ 3400 val loss 5.9487
105
+ 3400 val perplexity 383.2661
106
+ 3400 train 5.896263 (lr=4.1381e-05) (hash(x)=152748451)
107
+ 3500 val loss 5.9354
108
+ 3500 val perplexity 378.2086
109
+ 3500 train 5.818613 (lr=4.0532e-05) (hash(x)=147395004)
110
+ 3600 val loss 5.9183
111
+ 3600 val perplexity 371.7606
112
+ 3600 train 5.936450 (lr=3.9667e-05) (hash(x)=157554250)
113
+ 3700 val loss 5.8993
114
+ 3700 val perplexity 364.7847
115
+ 3700 train 5.752002 (lr=3.8789e-05) (hash(x)=147953321)
116
+ 3800 val loss 5.8818
117
+ 3800 val perplexity 358.4565
118
+ 3800 train 5.737122 (lr=3.7898e-05) (hash(x)=146898609)
119
+ 3900 val loss 5.8784
120
+ 3900 val perplexity 357.2249
121
+ 3900 train 5.921919 (lr=3.6996e-05) (hash(x)=150115282)
122
+ 4000 val loss 5.8495
123
+ 4000 val perplexity 347.0485
124
+ 4000 train 5.720140 (lr=3.6085e-05) (hash(x)=142448619)
125
+ 4100 val loss 5.8489
126
+ 4100 val perplexity 346.8653
127
+ 4100 train 5.590994 (lr=3.5165e-05) (hash(x)=141244123)
128
+ 4200 val loss 5.8404
129
+ 4200 val perplexity 343.9145
130
+ 4200 train 5.850804 (lr=3.4238e-05) (hash(x)=165913661)
131
+ 4300 val loss 5.8065
132
+ 4300 val perplexity 332.4451
133
+ 4300 train 5.839415 (lr=3.3305e-05) (hash(x)=155744823)
134
+ 4400 val loss 5.7989
135
+ 4400 val perplexity 329.9486
136
+ 4400 train 5.796937 (lr=3.2368e-05) (hash(x)=160461243)
137
+ 4500 val loss 5.7862
138
+ 4500 val perplexity 325.7645
139
+ 4500 train 5.693707 (lr=3.1428e-05) (hash(x)=156073129)
140
+ 4600 val loss 5.7709
141
+ 4600 val perplexity 320.8156
142
+ 4600 train 5.820288 (lr=3.0486e-05) (hash(x)=152294132)
143
+ 4700 val loss 5.7620
144
+ 4700 val perplexity 317.9967
145
+ 4700 train 5.701264 (lr=2.9543e-05) (hash(x)=140904569)
146
+ 4800 val loss 5.7532
147
+ 4800 val perplexity 315.2115
148
+ 4800 train 5.662505 (lr=2.8602e-05) (hash(x)=156191635)
149
+ 4900 val loss 5.7442
150
+ 4900 val perplexity 312.3885
151
+ 4900 train 5.856668 (lr=2.7663e-05) (hash(x)=145063976)
152
+ 5000 val loss 5.7323
153
+ 5000 val perplexity 308.6840
154
+ 5000 train 5.685281 (lr=2.6728e-05) (hash(x)=160110619)
155
+ 5100 val loss 5.7166
156
+ 5100 val perplexity 303.8726
157
+ 5100 train 5.745248 (lr=2.5798e-05) (hash(x)=156270070)
158
+ 5200 val loss 5.7070
159
+ 5200 val perplexity 300.9782
160
+ 5200 train 5.602131 (lr=2.4874e-05) (hash(x)=138954242)
161
+ 5300 val loss 5.6998
162
+ 5300 val perplexity 298.8035
163
+ 5300 train 5.549146 (lr=2.3958e-05) (hash(x)=146472367)
164
+ 5400 val loss 5.6868
165
+ 5400 val perplexity 294.9346
166
+ 5400 train 5.617336 (lr=2.3051e-05) (hash(x)=146753405)
167
+ 5500 val loss 5.6801
168
+ 5500 val perplexity 292.9851
169
+ 5500 train 5.628195 (lr=2.2155e-05) (hash(x)=147757398)
170
+ 5600 val loss 5.6737
171
+ 5600 val perplexity 291.1140
172
+ 5600 train 5.540354 (lr=2.1271e-05) (hash(x)=151291757)
173
+ 5700 val loss 5.6622
174
+ 5700 val perplexity 287.7889
175
+ 5700 train 5.716890 (lr=2.0399e-05) (hash(x)=156274392)
176
+ 5800 val loss 5.6544
177
+ 5800 val perplexity 285.5433
178
+ 5800 train 5.625447 (lr=1.9542e-05) (hash(x)=152612266)
179
+ 5900 val loss 5.6490
180
+ 5900 val perplexity 284.0074
181
+ 5900 train 5.817714 (lr=1.8700e-05) (hash(x)=148973201)
182
+ 6000 val loss 5.6398
183
+ 6000 val perplexity 281.4183
184
+ 6000 train 5.600852 (lr=1.7875e-05) (hash(x)=158311490)
185
+ 6100 val loss 5.6321
186
+ 6100 val perplexity 279.2485
187
+ 6100 train 5.568927 (lr=1.7068e-05) (hash(x)=147541319)
188
+ 6200 val loss 5.6330
189
+ 6200 val perplexity 279.5024
190
+ 6200 train 5.703368 (lr=1.6280e-05) (hash(x)=154753980)
191
+ 6300 val loss 5.6232
192
+ 6300 val perplexity 276.7728
193
+ 6300 train 5.516932 (lr=1.5512e-05) (hash(x)=140798998)
194
+ 6400 val loss 5.6174
195
+ 6400 val perplexity 275.1835
196
+ 6400 train 5.505464 (lr=1.4766e-05) (hash(x)=149299516)
197
+ 6500 val loss 5.6123
198
+ 6500 val perplexity 273.7734
199
+ 6500 train 5.594793 (lr=1.4042e-05) (hash(x)=148495806)
200
+ 6600 val loss 5.6055
201
+ 6600 val perplexity 271.9237
202
+ 6600 train 5.585682 (lr=1.3342e-05) (hash(x)=153664387)
203
+ 6700 val loss 5.6028
204
+ 6700 val perplexity 271.1919
205
+ 6700 train 5.585989 (lr=1.2666e-05) (hash(x)=153333491)
206
+ 6800 val loss 5.6044
207
+ 6800 val perplexity 271.6152
208
+ 6800 train 5.347058 (lr=1.2016e-05) (hash(x)=150149382)
209
+ 6900 val loss 5.5961
210
+ 6900 val perplexity 269.3717
211
+ 6900 train 5.685107 (lr=1.1392e-05) (hash(x)=152379767)
212
+ 7000 val loss 5.5877
213
+ 7000 val perplexity 267.1243
214
+ 7000 train 5.639709 (lr=1.0795e-05) (hash(x)=150432537)
215
+ 7100 val loss 5.5885
216
+ 7100 val perplexity 267.3259
217
+ 7100 train 5.467541 (lr=1.0227e-05) (hash(x)=148277836)
218
+ 7200 val loss 5.5859
219
+ 7200 val perplexity 266.6448
220
+ 7200 train 5.576658 (lr=9.6875e-06) (hash(x)=168591659)
221
+ 7300 val loss 5.5788
222
+ 7300 val perplexity 264.7546
223
+ 7300 train 5.557188 (lr=9.1780e-06) (hash(x)=152764851)
224
+ 7400 val loss 5.5746
225
+ 7400 val perplexity 263.6494
226
+ 7400 train 5.541438 (lr=8.6990e-06) (hash(x)=142920131)
227
+ 7500 val loss 5.5725
228
+ 7500 val perplexity 263.0795
229
+ 7500 train 5.570638 (lr=8.2513e-06) (hash(x)=146954649)
230
+ 7600 val loss 5.5725
231
+ 7600 val perplexity 263.0981
232
+ 7600 train 5.347485 (lr=7.8355e-06) (hash(x)=144529566)
233
+ 7700 val loss 5.5669
234
+ 7700 val perplexity 261.6309
235
+ 7700 train 5.527046 (lr=7.4522e-06) (hash(x)=142669945)
236
+ 7800 val loss 5.5628
237
+ 7800 val perplexity 260.5567
238
+ 7800 train 5.617598 (lr=7.1019e-06) (hash(x)=161563764)
239
+ 7900 val loss 5.5607
240
+ 7900 val perplexity 260.0027
241
+ 7900 train 5.355335 (lr=6.7852e-06) (hash(x)=141516532)
242
+ 8000 val loss 5.5610
243
+ 8000 val perplexity 260.0852
244
+ 8000 train 5.424737 (lr=6.5025e-06) (hash(x)=152418457)
245
+ 8100 val loss 5.5556
246
+ 8100 val perplexity 258.6718
247
+ 8100 train 5.545348 (lr=6.2543e-06) (hash(x)=161109470)
248
+ 8200 val loss 5.5520
249
+ 8200 val perplexity 257.7588
250
+ 8200 train 5.681771 (lr=6.0408e-06) (hash(x)=154408822)
251
+ 8300 val loss 5.5499
252
+ 8300 val perplexity 257.2069
253
+ 8300 train 5.438511 (lr=5.8625e-06) (hash(x)=146431691)
254
+ 8400 val loss 5.5507
255
+ 8400 val perplexity 257.4096
256
+ 8400 train 5.328045 (lr=5.7195e-06) (hash(x)=150182527)
257
+ 8500 val loss 5.5443
258
+ 8500 val perplexity 255.7633
259
+ 8500 train 5.612017 (lr=5.6121e-06) (hash(x)=154828709)
260
+ 8600 val loss 5.5440
261
+ 8600 val perplexity 255.7007
262
+ 8600 train 5.459042 (lr=5.5404e-06) (hash(x)=154995428)
263
+ 8700 val loss 5.5439
264
+ 8700 val perplexity 255.6694
265
+ 8700 train 5.444529 (lr=5.5045e-06) (hash(x)=142111043)
266
+ 8749 val loss 5.5424
267
+ 8749 val perplexity 255.2784
attention_kindselective_n_heads4_seed1342/model_08749.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ed254524176e23c51719c63044e63e56c50e88d2fa6b66c49078d9e232a82ccd
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:957e725a82dcd9ed1bd037691b360b3021a01371484023e99935b6d8a8215591
3
  size 92843394
attention_kindselective_n_heads4_seed1342/optimizer_08749.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4ec491cf0d3e027e7f68059046708aaa88b97538de7373c8531f904bf4345641
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:72bda2c05a567a2bc11a95bfe22019cdc84340ef8797165189edcf66c1a6746e
3
  size 179406214