andrew-healey commited on
Commit
6e15265
·
verified ·
1 Parent(s): 8dc6b51

Upload folder using huggingface_hub

Browse files
attention_kindselective_n_heads4_seed1345/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_9/attention_kindselective_n_heads4_seed1345", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_9", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1345, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 6e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "6e-5_61440_4_1345", "n_embd": 256}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_9/attention_kindselective_n_heads4_seed1345", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_9", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1345, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 5.5e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "5.5e-5_61440_4_1345", "n_embd": 256}
attention_kindselective_n_heads4_seed1345/log2.txt CHANGED
@@ -1,267 +1,267 @@
1
  max_steps: 8750
2
  0 val loss 11.2669
3
- 0 val perplexity 78187.7734
4
- 0 train 11.258430 (lr=1.2000e-07) (hash(x)=134751525)
5
- 100 val loss 9.9839
6
- 100 val perplexity 21675.4258
7
- 100 train 10.008630 (lr=1.2120e-05) (hash(x)=150358957)
8
- 200 val loss 9.1733
9
- 200 val perplexity 9635.9756
10
- 200 train 9.176708 (lr=2.4120e-05) (hash(x)=126319983)
11
- 300 val loss 7.9611
12
- 300 val perplexity 2867.2952
13
- 300 train 7.902992 (lr=3.6120e-05) (hash(x)=159305874)
14
- 400 val loss 7.5449
15
- 400 val perplexity 1891.1628
16
- 400 train 7.521968 (lr=4.8120e-05) (hash(x)=149855631)
17
- 500 val loss 7.3894
18
- 500 val perplexity 1618.7214
19
- 500 train 7.420051 (lr=6.0000e-05) (hash(x)=150706760)
20
- 600 val loss 7.2891
21
- 600 val perplexity 1464.2700
22
- 600 train 7.219808 (lr=5.9980e-05) (hash(x)=146858163)
23
- 700 val loss 7.2209
24
- 700 val perplexity 1367.7762
25
- 700 train 7.094349 (lr=5.9922e-05) (hash(x)=144262072)
26
- 800 val loss 7.1403
27
- 800 val perplexity 1261.8079
28
- 800 train 7.109925 (lr=5.9824e-05) (hash(x)=135443880)
29
- 900 val loss 7.0661
30
- 900 val perplexity 1171.6042
31
- 900 train 7.008464 (lr=5.9687e-05) (hash(x)=153147388)
32
- 1000 val loss 7.0039
33
- 1000 val perplexity 1100.8654
34
- 1000 train 6.929332 (lr=5.9512e-05) (hash(x)=151448445)
35
- 1100 val loss 6.9303
36
- 1100 val perplexity 1022.8132
37
- 1100 train 6.935490 (lr=5.9298e-05) (hash(x)=153161010)
38
- 1200 val loss 6.8744
39
- 1200 val perplexity 967.1544
40
- 1200 train 6.700087 (lr=5.9046e-05) (hash(x)=143227423)
41
- 1300 val loss 6.8295
42
- 1300 val perplexity 924.6844
43
- 1300 train 7.299239 (lr=5.8757e-05) (hash(x)=176373796)
44
- 1400 val loss 6.7762
45
- 1400 val perplexity 876.6993
46
- 1400 train 6.702026 (lr=5.8430e-05) (hash(x)=155989503)
47
- 1500 val loss 6.7162
48
- 1500 val perplexity 825.6920
49
- 1500 train 6.698337 (lr=5.8066e-05) (hash(x)=156507542)
50
- 1600 val loss 6.6620
51
- 1600 val perplexity 782.1313
52
- 1600 train 6.663013 (lr=5.7666e-05) (hash(x)=156078901)
53
- 1700 val loss 6.6072
54
- 1700 val perplexity 740.3943
55
- 1700 train 6.513906 (lr=5.7230e-05) (hash(x)=156889457)
56
- 1800 val loss 6.5688
57
- 1800 val perplexity 712.5136
58
- 1800 train 6.808304 (lr=5.6759e-05) (hash(x)=162477906)
59
- 1900 val loss 6.5196
60
- 1900 val perplexity 678.3251
61
- 1900 train 6.565875 (lr=5.6253e-05) (hash(x)=158648033)
62
- 2000 val loss 6.4788
63
- 2000 val perplexity 651.1573
64
- 2000 train 6.401828 (lr=5.5714e-05) (hash(x)=156732586)
65
- 2100 val loss 6.4433
66
- 2100 val perplexity 628.4625
67
- 2100 train 6.465071 (lr=5.5142e-05) (hash(x)=159721084)
68
- 2200 val loss 6.3954
69
- 2200 val perplexity 599.0789
70
- 2200 train 6.372261 (lr=5.4537e-05) (hash(x)=144907001)
71
- 2300 val loss 6.3756
72
- 2300 val perplexity 587.3514
73
- 2300 train 6.315606 (lr=5.3902e-05) (hash(x)=154541837)
74
- 2400 val loss 6.3586
75
- 2400 val perplexity 577.4647
76
- 2400 train 6.445983 (lr=5.3236e-05) (hash(x)=161706790)
77
- 2500 val loss 6.3219
78
- 2500 val perplexity 556.6255
79
- 2500 train 6.244786 (lr=5.2541e-05) (hash(x)=143556162)
80
- 2600 val loss 6.2845
81
- 2600 val perplexity 536.1711
82
- 2600 train 6.136559 (lr=5.1817e-05) (hash(x)=142425078)
83
- 2700 val loss 6.2612
84
- 2700 val perplexity 523.8307
85
- 2700 train 6.278818 (lr=5.1067e-05) (hash(x)=163195606)
86
- 2800 val loss 6.2290
87
- 2800 val perplexity 507.2432
88
- 2800 train 6.131935 (lr=5.0290e-05) (hash(x)=154078337)
89
- 2900 val loss 6.2198
90
- 2900 val perplexity 502.5972
91
- 2900 train 6.084310 (lr=4.9487e-05) (hash(x)=145186687)
92
- 3000 val loss 6.1924
93
- 3000 val perplexity 489.0109
94
- 3000 train 6.149713 (lr=4.8662e-05) (hash(x)=149726716)
95
- 3100 val loss 6.1746
96
- 3100 val perplexity 480.3961
97
- 3100 train 6.102753 (lr=4.7813e-05) (hash(x)=151122509)
98
- 3200 val loss 6.1655
99
- 3200 val perplexity 476.0258
100
- 3200 train 6.135470 (lr=4.6943e-05) (hash(x)=152172187)
101
- 3300 val loss 6.1460
102
- 3300 val perplexity 466.8445
103
- 3300 train 6.080579 (lr=4.6052e-05) (hash(x)=150581974)
104
- 3400 val loss 6.1241
105
- 3400 val perplexity 456.7542
106
- 3400 train 6.051082 (lr=4.5143e-05) (hash(x)=160061666)
107
- 3500 val loss 6.1072
108
- 3500 val perplexity 449.0678
109
- 3500 train 6.054682 (lr=4.4216e-05) (hash(x)=150556913)
110
- 3600 val loss 6.0908
111
- 3600 val perplexity 441.7588
112
- 3600 train 5.995741 (lr=4.3273e-05) (hash(x)=148311961)
113
- 3700 val loss 6.0760
114
- 3700 val perplexity 435.2701
115
- 3700 train 5.968667 (lr=4.2315e-05) (hash(x)=150305284)
116
- 3800 val loss 6.0523
117
- 3800 val perplexity 425.0813
118
- 3800 train 6.064778 (lr=4.1343e-05) (hash(x)=148428531)
119
- 3900 val loss 6.0355
120
- 3900 val perplexity 418.0211
121
- 3900 train 5.900431 (lr=4.0360e-05) (hash(x)=142448374)
122
- 4000 val loss 6.0270
123
- 4000 val perplexity 414.4525
124
- 4000 train 5.948363 (lr=3.9365e-05) (hash(x)=158002288)
125
- 4100 val loss 6.0058
126
- 4100 val perplexity 405.7936
127
- 4100 train 5.948852 (lr=3.8362e-05) (hash(x)=151388626)
128
- 4200 val loss 5.9921
129
- 4200 val perplexity 400.2397
130
- 4200 train 5.912879 (lr=3.7351e-05) (hash(x)=145123294)
131
- 4300 val loss 5.9760
132
- 4300 val perplexity 393.8557
133
- 4300 train 5.848454 (lr=3.6333e-05) (hash(x)=145869150)
134
- 4400 val loss 5.9646
135
- 4400 val perplexity 389.3833
136
- 4400 train 5.987700 (lr=3.5311e-05) (hash(x)=146923320)
137
- 4500 val loss 5.9459
138
- 4500 val perplexity 382.1999
139
- 4500 train 5.768764 (lr=3.4285e-05) (hash(x)=146720819)
140
- 4600 val loss 5.9323
141
- 4600 val perplexity 377.0094
142
- 4600 train 5.676044 (lr=3.3257e-05) (hash(x)=140342909)
143
- 4700 val loss 5.9234
144
- 4700 val perplexity 373.6711
145
- 4700 train 6.046507 (lr=3.2229e-05) (hash(x)=163866463)
146
- 4800 val loss 5.9108
147
- 4800 val perplexity 369.0188
148
- 4800 train 5.595890 (lr=3.1202e-05) (hash(x)=133459145)
149
- 4900 val loss 5.8975
150
- 4900 val perplexity 364.1420
151
- 4900 train 5.693576 (lr=3.0178e-05) (hash(x)=143144356)
152
- 5000 val loss 5.9056
153
- 5000 val perplexity 367.0866
154
- 5000 train 5.802285 (lr=2.9157e-05) (hash(x)=132636494)
155
- 5100 val loss 5.8668
156
- 5100 val perplexity 353.1207
157
- 5100 train 5.786910 (lr=2.8143e-05) (hash(x)=157278728)
158
- 5200 val loss 5.8582
159
- 5200 val perplexity 350.0958
160
- 5200 train 5.717509 (lr=2.7135e-05) (hash(x)=148198434)
161
- 5300 val loss 5.8443
162
- 5300 val perplexity 345.2503
163
- 5300 train 5.772146 (lr=2.6136e-05) (hash(x)=138380906)
164
- 5400 val loss 5.8313
165
- 5400 val perplexity 340.8168
166
- 5400 train 5.920127 (lr=2.5147e-05) (hash(x)=163441464)
167
- 5500 val loss 5.8272
168
- 5500 val perplexity 339.4170
169
- 5500 train 5.771481 (lr=2.4169e-05) (hash(x)=154347714)
170
- 5600 val loss 5.8127
171
- 5600 val perplexity 334.5254
172
- 5600 train 5.842416 (lr=2.3204e-05) (hash(x)=148449981)
173
- 5700 val loss 5.8027
174
- 5700 val perplexity 331.1774
175
- 5700 train 5.693919 (lr=2.2253e-05) (hash(x)=139300274)
176
- 5800 val loss 5.7951
177
- 5800 val perplexity 328.6917
178
- 5800 train 5.783244 (lr=2.1318e-05) (hash(x)=154857144)
179
- 5900 val loss 5.7837
180
- 5900 val perplexity 324.9723
181
- 5900 train 5.845992 (lr=2.0400e-05) (hash(x)=151756013)
182
- 6000 val loss 5.7775
183
- 6000 val perplexity 322.9481
184
- 6000 train 5.678282 (lr=1.9500e-05) (hash(x)=145414657)
185
- 6100 val loss 5.7698
186
- 6100 val perplexity 320.4596
187
- 6100 train 5.592708 (lr=1.8620e-05) (hash(x)=139501217)
188
- 6200 val loss 5.7579
189
- 6200 val perplexity 316.6911
190
- 6200 train 5.798489 (lr=1.7760e-05) (hash(x)=155783358)
191
- 6300 val loss 5.7509
192
- 6300 val perplexity 314.4768
193
- 6300 train 5.809797 (lr=1.6923e-05) (hash(x)=157033091)
194
- 6400 val loss 5.7465
195
- 6400 val perplexity 313.1020
196
- 6400 train 5.552113 (lr=1.6108e-05) (hash(x)=144475330)
197
- 6500 val loss 5.7366
198
- 6500 val perplexity 310.0059
199
- 6500 train 5.773449 (lr=1.5319e-05) (hash(x)=124041822)
200
- 6600 val loss 5.7329
201
- 6600 val perplexity 308.8680
202
- 6600 train 5.667017 (lr=1.4555e-05) (hash(x)=142131981)
203
- 6700 val loss 5.7213
204
- 6700 val perplexity 305.3057
205
- 6700 train 5.823481 (lr=1.3817e-05) (hash(x)=154916248)
206
- 6800 val loss 5.7211
207
- 6800 val perplexity 305.2263
208
- 6800 train 5.719387 (lr=1.3108e-05) (hash(x)=147996387)
209
- 6900 val loss 5.7133
210
- 6900 val perplexity 302.8786
211
- 6900 train 5.747306 (lr=1.2427e-05) (hash(x)=153422428)
212
- 7000 val loss 5.7054
213
- 7000 val perplexity 300.4824
214
- 7000 train 5.728880 (lr=1.1777e-05) (hash(x)=174135078)
215
- 7100 val loss 5.7028
216
- 7100 val perplexity 299.7173
217
- 7100 train 5.778639 (lr=1.1157e-05) (hash(x)=160382475)
218
- 7200 val loss 5.6978
219
- 7200 val perplexity 298.2030
220
- 7200 train 5.788217 (lr=1.0568e-05) (hash(x)=155310085)
221
- 7300 val loss 5.6913
222
- 7300 val perplexity 296.2772
223
- 7300 train 5.824478 (lr=1.0012e-05) (hash(x)=159329031)
224
- 7400 val loss 5.6893
225
- 7400 val perplexity 295.6850
226
- 7400 train 5.633309 (lr=9.4899e-06) (hash(x)=151577269)
227
- 7500 val loss 5.6847
228
- 7500 val perplexity 294.3328
229
- 7500 train 5.734163 (lr=9.0014e-06) (hash(x)=160620287)
230
- 7600 val loss 5.6801
231
- 7600 val perplexity 292.9817
232
- 7600 train 5.569936 (lr=8.5478e-06) (hash(x)=143711597)
233
- 7700 val loss 5.6764
234
- 7700 val perplexity 291.8844
235
- 7700 train 5.676998 (lr=8.1297e-06) (hash(x)=149243577)
236
- 7800 val loss 5.6739
237
- 7800 val perplexity 291.1786
238
- 7800 train 5.664146 (lr=7.7476e-06) (hash(x)=147677247)
239
- 7900 val loss 5.6663
240
- 7900 val perplexity 288.9625
241
- 7900 train 5.614513 (lr=7.4021e-06) (hash(x)=145801236)
242
- 8000 val loss 5.6648
243
- 8000 val perplexity 288.5190
244
- 8000 train 5.780615 (lr=7.0937e-06) (hash(x)=158007655)
245
- 8100 val loss 5.6641
246
- 8100 val perplexity 288.3269
247
- 8100 train 5.724568 (lr=6.8229e-06) (hash(x)=158030048)
248
- 8200 val loss 5.6592
249
- 8200 val perplexity 286.9110
250
- 8200 train 5.510582 (lr=6.5900e-06) (hash(x)=150674725)
251
- 8300 val loss 5.6571
252
- 8300 val perplexity 286.3263
253
- 8300 train 5.562442 (lr=6.3954e-06) (hash(x)=150805711)
254
- 8400 val loss 5.6521
255
- 8400 val perplexity 284.8779
256
- 8400 train 5.526485 (lr=6.2395e-06) (hash(x)=138396210)
257
- 8500 val loss 5.6513
258
- 8500 val perplexity 284.6625
259
- 8500 train 5.528487 (lr=6.1223e-06) (hash(x)=156172740)
260
- 8600 val loss 5.6482
261
- 8600 val perplexity 283.7753
262
- 8600 train 5.684916 (lr=6.0440e-06) (hash(x)=154911352)
263
- 8700 val loss 5.6456
264
- 8700 val perplexity 283.0405
265
- 8700 train 5.704347 (lr=6.0049e-06) (hash(x)=153446789)
266
- 8749 val loss 5.6443
267
- 8749 val perplexity 282.6697
 
1
  max_steps: 8750
2
  0 val loss 11.2669
3
+ 0 val perplexity 78187.8516
4
+ 0 train 11.258394 (lr=1.1000e-07) (hash(x)=134751525)
5
+ 100 val loss 9.9976
6
+ 100 val perplexity 21974.2227
7
+ 100 train 10.021976 (lr=1.1110e-05) (hash(x)=150358957)
8
+ 200 val loss 9.2695
9
+ 200 val perplexity 10609.2920
10
+ 200 train 9.278807 (lr=2.2110e-05) (hash(x)=126319983)
11
+ 300 val loss 8.0105
12
+ 300 val perplexity 3012.2820
13
+ 300 train 7.949356 (lr=3.3110e-05) (hash(x)=159305874)
14
+ 400 val loss 7.5227
15
+ 400 val perplexity 1849.6167
16
+ 400 train 7.498507 (lr=4.4110e-05) (hash(x)=149855631)
17
+ 500 val loss 7.3664
18
+ 500 val perplexity 1581.9810
19
+ 500 train 7.399071 (lr=5.5000e-05) (hash(x)=150706760)
20
+ 600 val loss 7.2326
21
+ 600 val perplexity 1383.7897
22
+ 600 train 7.160625 (lr=5.4982e-05) (hash(x)=146858163)
23
+ 700 val loss 7.1190
24
+ 700 val perplexity 1235.2375
25
+ 700 train 6.990807 (lr=5.4928e-05) (hash(x)=144262072)
26
+ 800 val loss 6.9961
27
+ 800 val perplexity 1092.3348
28
+ 800 train 6.965110 (lr=5.4839e-05) (hash(x)=135443880)
29
+ 900 val loss 6.8929
30
+ 900 val perplexity 985.2578
31
+ 900 train 6.839639 (lr=5.4713e-05) (hash(x)=153147388)
32
+ 1000 val loss 6.8104
33
+ 1000 val perplexity 907.2111
34
+ 1000 train 6.732587 (lr=5.4553e-05) (hash(x)=151448445)
35
+ 1100 val loss 6.7108
36
+ 1100 val perplexity 821.2675
37
+ 1100 train 6.725699 (lr=5.4357e-05) (hash(x)=153161010)
38
+ 1200 val loss 6.6303
39
+ 1200 val perplexity 757.6766
40
+ 1200 train 6.457338 (lr=5.4126e-05) (hash(x)=143227423)
41
+ 1300 val loss 6.5620
42
+ 1300 val perplexity 707.6649
43
+ 1300 train 7.021236 (lr=5.3860e-05) (hash(x)=176373796)
44
+ 1400 val loss 6.5022
45
+ 1400 val perplexity 666.6238
46
+ 1400 train 6.430148 (lr=5.3561e-05) (hash(x)=155989503)
47
+ 1500 val loss 6.4467
48
+ 1500 val perplexity 630.6347
49
+ 1500 train 6.422615 (lr=5.3227e-05) (hash(x)=156507542)
50
+ 1600 val loss 6.4011
51
+ 1600 val perplexity 602.5052
52
+ 1600 train 6.387425 (lr=5.2860e-05) (hash(x)=156078901)
53
+ 1700 val loss 6.3629
54
+ 1700 val perplexity 579.9485
55
+ 1700 train 6.269570 (lr=5.2461e-05) (hash(x)=156889457)
56
+ 1800 val loss 6.3314
57
+ 1800 val perplexity 561.9170
58
+ 1800 train 6.587677 (lr=5.2029e-05) (hash(x)=162477906)
59
+ 1900 val loss 6.2904
60
+ 1900 val perplexity 539.3497
61
+ 1900 train 6.326413 (lr=5.1565e-05) (hash(x)=158648033)
62
+ 2000 val loss 6.2677
63
+ 2000 val perplexity 527.2448
64
+ 2000 train 6.195924 (lr=5.1071e-05) (hash(x)=156732586)
65
+ 2100 val loss 6.2429
66
+ 2100 val perplexity 514.3626
67
+ 2100 train 6.257056 (lr=5.0547e-05) (hash(x)=159721084)
68
+ 2200 val loss 6.2077
69
+ 2200 val perplexity 496.5814
70
+ 2200 train 6.200092 (lr=4.9993e-05) (hash(x)=144907001)
71
+ 2300 val loss 6.2001
72
+ 2300 val perplexity 492.7748
73
+ 2300 train 6.144407 (lr=4.9410e-05) (hash(x)=154541837)
74
+ 2400 val loss 6.1663
75
+ 2400 val perplexity 476.4173
76
+ 2400 train 6.260852 (lr=4.8800e-05) (hash(x)=161706790)
77
+ 2500 val loss 6.1369
78
+ 2500 val perplexity 462.6117
79
+ 2500 train 6.061900 (lr=4.8162e-05) (hash(x)=143556162)
80
+ 2600 val loss 6.1219
81
+ 2600 val perplexity 455.7222
82
+ 2600 train 5.966206 (lr=4.7499e-05) (hash(x)=142425078)
83
+ 2700 val loss 6.0938
84
+ 2700 val perplexity 443.0906
85
+ 2700 train 6.117737 (lr=4.6811e-05) (hash(x)=163195606)
86
+ 2800 val loss 6.0680
87
+ 2800 val perplexity 431.8334
88
+ 2800 train 5.982263 (lr=4.6099e-05) (hash(x)=154078337)
89
+ 2900 val loss 6.0549
90
+ 2900 val perplexity 426.1941
91
+ 2900 train 5.913872 (lr=4.5364e-05) (hash(x)=145186687)
92
+ 3000 val loss 6.0264
93
+ 3000 val perplexity 414.2376
94
+ 3000 train 5.994296 (lr=4.4606e-05) (hash(x)=149726716)
95
+ 3100 val loss 6.0056
96
+ 3100 val perplexity 405.6932
97
+ 3100 train 5.927385 (lr=4.3828e-05) (hash(x)=151122509)
98
+ 3200 val loss 5.9966
99
+ 3200 val perplexity 402.0695
100
+ 3200 train 5.966908 (lr=4.3031e-05) (hash(x)=152172187)
101
+ 3300 val loss 5.9800
102
+ 3300 val perplexity 395.4515
103
+ 3300 train 5.916352 (lr=4.2215e-05) (hash(x)=150581974)
104
+ 3400 val loss 5.9623
105
+ 3400 val perplexity 388.5039
106
+ 3400 train 5.884938 (lr=4.1381e-05) (hash(x)=160061666)
107
+ 3500 val loss 5.9419
108
+ 3500 val perplexity 380.6590
109
+ 3500 train 5.894887 (lr=4.0532e-05) (hash(x)=150556913)
110
+ 3600 val loss 5.9305
111
+ 3600 val perplexity 376.3459
112
+ 3600 train 5.835590 (lr=3.9667e-05) (hash(x)=148311961)
113
+ 3700 val loss 5.9138
114
+ 3700 val perplexity 370.1077
115
+ 3700 train 5.802270 (lr=3.8789e-05) (hash(x)=150305284)
116
+ 3800 val loss 5.8978
117
+ 3800 val perplexity 364.2344
118
+ 3800 train 5.900220 (lr=3.7898e-05) (hash(x)=148428531)
119
+ 3900 val loss 5.8802
120
+ 3900 val perplexity 357.8729
121
+ 3900 train 5.757717 (lr=3.6996e-05) (hash(x)=142448374)
122
+ 4000 val loss 5.8676
123
+ 4000 val perplexity 353.4018
124
+ 4000 train 5.796079 (lr=3.6085e-05) (hash(x)=158002288)
125
+ 4100 val loss 5.8497
126
+ 4100 val perplexity 347.1451
127
+ 4100 train 5.784432 (lr=3.5165e-05) (hash(x)=151388626)
128
+ 4200 val loss 5.8321
129
+ 4200 val perplexity 341.0719
130
+ 4200 train 5.756764 (lr=3.4238e-05) (hash(x)=145123294)
131
+ 4300 val loss 5.8195
132
+ 4300 val perplexity 336.8081
133
+ 4300 train 5.703814 (lr=3.3305e-05) (hash(x)=145869150)
134
+ 4400 val loss 5.8130
135
+ 4400 val perplexity 334.6342
136
+ 4400 train 5.843296 (lr=3.2368e-05) (hash(x)=146923320)
137
+ 4500 val loss 5.7975
138
+ 4500 val perplexity 329.4873
139
+ 4500 train 5.628051 (lr=3.1428e-05) (hash(x)=146720819)
140
+ 4600 val loss 5.7863
141
+ 4600 val perplexity 325.7993
142
+ 4600 train 5.543585 (lr=3.0486e-05) (hash(x)=140342909)
143
+ 4700 val loss 5.7761
144
+ 4700 val perplexity 322.4923
145
+ 4700 train 5.914660 (lr=2.9543e-05) (hash(x)=163866463)
146
+ 4800 val loss 5.7574
147
+ 4800 val perplexity 316.5242
148
+ 4800 train 5.449148 (lr=2.8602e-05) (hash(x)=133459145)
149
+ 4900 val loss 5.7496
150
+ 4900 val perplexity 314.0524
151
+ 4900 train 5.549150 (lr=2.7663e-05) (hash(x)=143144356)
152
+ 5000 val loss 5.7556
153
+ 5000 val perplexity 315.9481
154
+ 5000 train 5.664907 (lr=2.6728e-05) (hash(x)=132636494)
155
+ 5100 val loss 5.7271
156
+ 5100 val perplexity 307.0887
157
+ 5100 train 5.646737 (lr=2.5798e-05) (hash(x)=157278728)
158
+ 5200 val loss 5.7195
159
+ 5200 val perplexity 304.7547
160
+ 5200 train 5.568845 (lr=2.4874e-05) (hash(x)=148198434)
161
+ 5300 val loss 5.7074
162
+ 5300 val perplexity 301.0816
163
+ 5300 train 5.628862 (lr=2.3958e-05) (hash(x)=138380906)
164
+ 5400 val loss 5.6969
165
+ 5400 val perplexity 297.9376
166
+ 5400 train 5.790673 (lr=2.3051e-05) (hash(x)=163441464)
167
+ 5500 val loss 5.6898
168
+ 5500 val perplexity 295.8444
169
+ 5500 train 5.624358 (lr=2.2155e-05) (hash(x)=154347714)
170
+ 5600 val loss 5.6831
171
+ 5600 val perplexity 293.8553
172
+ 5600 train 5.732700 (lr=2.1271e-05) (hash(x)=148449981)
173
+ 5700 val loss 5.6700
174
+ 5700 val perplexity 290.0321
175
+ 5700 train 5.560979 (lr=2.0399e-05) (hash(x)=139300274)
176
+ 5800 val loss 5.6666
177
+ 5800 val perplexity 289.0583
178
+ 5800 train 5.662941 (lr=1.9542e-05) (hash(x)=154857144)
179
+ 5900 val loss 5.6567
180
+ 5900 val perplexity 286.1906
181
+ 5900 train 5.713377 (lr=1.8700e-05) (hash(x)=151756013)
182
+ 6000 val loss 5.6536
183
+ 6000 val perplexity 285.3265
184
+ 6000 train 5.558116 (lr=1.7875e-05) (hash(x)=145414657)
185
+ 6100 val loss 5.6455
186
+ 6100 val perplexity 283.0208
187
+ 6100 train 5.462373 (lr=1.7068e-05) (hash(x)=139501217)
188
+ 6200 val loss 5.6348
189
+ 6200 val perplexity 279.9936
190
+ 6200 train 5.671405 (lr=1.6280e-05) (hash(x)=155783358)
191
+ 6300 val loss 5.6287
192
+ 6300 val perplexity 278.2988
193
+ 6300 train 5.696047 (lr=1.5512e-05) (hash(x)=157033091)
194
+ 6400 val loss 5.6260
195
+ 6400 val perplexity 277.5588
196
+ 6400 train 5.424657 (lr=1.4766e-05) (hash(x)=144475330)
197
+ 6500 val loss 5.6181
198
+ 6500 val perplexity 275.3672
199
+ 6500 train 5.671980 (lr=1.4042e-05) (hash(x)=124041822)
200
+ 6600 val loss 5.6151
201
+ 6600 val perplexity 274.5500
202
+ 6600 train 5.548001 (lr=1.3342e-05) (hash(x)=142131981)
203
+ 6700 val loss 5.6043
204
+ 6700 val perplexity 271.5820
205
+ 6700 train 5.703641 (lr=1.2666e-05) (hash(x)=154916248)
206
+ 6800 val loss 5.6080
207
+ 6800 val perplexity 272.5901
208
+ 6800 train 5.602819 (lr=1.2016e-05) (hash(x)=147996387)
209
+ 6900 val loss 5.5998
210
+ 6900 val perplexity 270.3646
211
+ 6900 train 5.625703 (lr=1.1392e-05) (hash(x)=153422428)
212
+ 7000 val loss 5.5910
213
+ 7000 val perplexity 268.0145
214
+ 7000 train 5.609641 (lr=1.0795e-05) (hash(x)=174135078)
215
+ 7100 val loss 5.5899
216
+ 7100 val perplexity 267.7013
217
+ 7100 train 5.663852 (lr=1.0227e-05) (hash(x)=160382475)
218
+ 7200 val loss 5.5857
219
+ 7200 val perplexity 266.5980
220
+ 7200 train 5.676028 (lr=9.6875e-06) (hash(x)=155310085)
221
+ 7300 val loss 5.5798
222
+ 7300 val perplexity 265.0145
223
+ 7300 train 5.710726 (lr=9.1780e-06) (hash(x)=159329031)
224
+ 7400 val loss 5.5787
225
+ 7400 val perplexity 264.7280
226
+ 7400 train 5.517731 (lr=8.6990e-06) (hash(x)=151577269)
227
+ 7500 val loss 5.5763
228
+ 7500 val perplexity 264.0891
229
+ 7500 train 5.629300 (lr=8.2513e-06) (hash(x)=160620287)
230
+ 7600 val loss 5.5709
231
+ 7600 val perplexity 262.6826
232
+ 7600 train 5.452636 (lr=7.8355e-06) (hash(x)=143711597)
233
+ 7700 val loss 5.5694
234
+ 7700 val perplexity 262.2819
235
+ 7700 train 5.560287 (lr=7.4522e-06) (hash(x)=149243577)
236
+ 7800 val loss 5.5679
237
+ 7800 val perplexity 261.8705
238
+ 7800 train 5.552493 (lr=7.1019e-06) (hash(x)=147677247)
239
+ 7900 val loss 5.5609
240
+ 7900 val perplexity 260.0510
241
+ 7900 train 5.508999 (lr=6.7852e-06) (hash(x)=145801236)
242
+ 8000 val loss 5.5601
243
+ 8000 val perplexity 259.8535
244
+ 8000 train 5.676331 (lr=6.5025e-06) (hash(x)=158007655)
245
+ 8100 val loss 5.5592
246
+ 8100 val perplexity 259.6133
247
+ 8100 train 5.619223 (lr=6.2543e-06) (hash(x)=158030048)
248
+ 8200 val loss 5.5549
249
+ 8200 val perplexity 258.5128
250
+ 8200 train 5.401045 (lr=6.0408e-06) (hash(x)=150674725)
251
+ 8300 val loss 5.5543
252
+ 8300 val perplexity 258.3459
253
+ 8300 train 5.462399 (lr=5.8625e-06) (hash(x)=150805711)
254
+ 8400 val loss 5.5496
255
+ 8400 val perplexity 257.1231
256
+ 8400 train 5.412580 (lr=5.7195e-06) (hash(x)=138396210)
257
+ 8500 val loss 5.5492
258
+ 8500 val perplexity 257.0238
259
+ 8500 train 5.425281 (lr=5.6121e-06) (hash(x)=156172740)
260
+ 8600 val loss 5.5468
261
+ 8600 val perplexity 256.4105
262
+ 8600 train 5.577955 (lr=5.5404e-06) (hash(x)=154911352)
263
+ 8700 val loss 5.5442
264
+ 8700 val perplexity 255.7428
265
+ 8700 train 5.603814 (lr=5.5045e-06) (hash(x)=153446789)
266
+ 8749 val loss 5.5424
267
+ 8749 val perplexity 255.2849
attention_kindselective_n_heads4_seed1345/model_08749.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cbe65aa68ce8a1b132bdcc93dd306f10bd3d23a968a6861cbaed251ab68445e7
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a1de0ab893d2a408a9264a4952538b0c26197f2b53386e7e3c4c157047ba65d4
3
  size 92843394
attention_kindselective_n_heads4_seed1345/optimizer_08749.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0c491b43071fa7701cbaa61eb611374932299ce3707e387499ccfc8d42d33b12
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:85575ed77760a9c620403c462e5192c0729b3c617206a46c275ae26232b8f68b
3
  size 179406214