andrew-healey commited on
Commit
a2aba29
·
verified ·
1 Parent(s): 31410e6

Upload folder using huggingface_hub

Browse files
attention_kindselective_n_heads4_seed1343/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_9/attention_kindselective_n_heads4_seed1343", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_9", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1343, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 6e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "6e-5_61440_4_1343", "n_embd": 256}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_9/attention_kindselective_n_heads4_seed1343", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_9", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1343, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 4e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "4e-5_61440_4_1343", "n_embd": 256}
attention_kindselective_n_heads4_seed1343/log2.txt CHANGED
@@ -1,267 +1,353 @@
1
  max_steps: 8750
2
  0 val loss 11.3014
3
- 0 val perplexity 80933.0312
4
- 0 train 11.305981 (lr=1.2000e-07) (hash(x)=153418542)
5
- 100 val loss 9.9101
6
- 100 val perplexity 20132.2070
7
- 100 train 9.915108 (lr=1.2120e-05) (hash(x)=143722001)
8
- 200 val loss 8.9741
9
- 200 val perplexity 7895.8794
10
- 200 train 8.961443 (lr=2.4120e-05) (hash(x)=152989689)
11
- 300 val loss 7.8379
12
- 300 val perplexity 2534.8982
13
- 300 train 7.849168 (lr=3.6120e-05) (hash(x)=150071018)
14
- 400 val loss 7.4978
15
- 400 val perplexity 1804.0533
16
- 400 train 7.537123 (lr=4.8120e-05) (hash(x)=153358238)
17
- 500 val loss 7.3770
18
- 500 val perplexity 1598.7753
19
- 500 train 7.246671 (lr=6.0000e-05) (hash(x)=148983354)
20
- 600 val loss 7.2622
21
- 600 val perplexity 1425.3378
22
- 600 train 7.142297 (lr=5.9980e-05) (hash(x)=150770333)
23
- 700 val loss 7.1333
24
- 700 val perplexity 1253.0239
25
- 700 train 7.044869 (lr=5.9922e-05) (hash(x)=145246201)
26
- 800 val loss 7.0512
27
- 800 val perplexity 1154.1879
28
- 800 train 6.876640 (lr=5.9824e-05) (hash(x)=142635842)
29
- 900 val loss 6.9450
30
- 900 val perplexity 1037.9427
31
- 900 train 6.968613 (lr=5.9687e-05) (hash(x)=148299832)
32
- 1000 val loss 6.8522
33
- 1000 val perplexity 945.9458
34
- 1000 train 6.811312 (lr=5.9512e-05) (hash(x)=152887721)
35
- 1100 val loss 6.7728
36
- 1100 val perplexity 873.7812
37
- 1100 train 6.665625 (lr=5.9298e-05) (hash(x)=153721194)
38
- 1200 val loss 6.6789
39
- 1200 val perplexity 795.4495
40
- 1200 train 6.671553 (lr=5.9046e-05) (hash(x)=153047184)
41
- 1300 val loss 6.6015
42
- 1300 val perplexity 736.1836
43
- 1300 train 6.502261 (lr=5.8757e-05) (hash(x)=152466045)
44
- 1400 val loss 6.5517
45
- 1400 val perplexity 700.4123
46
- 1400 train 6.346848 (lr=5.8430e-05) (hash(x)=151991552)
47
- 1500 val loss 6.4958
48
- 1500 val perplexity 662.3223
49
- 1500 train 6.439677 (lr=5.8066e-05) (hash(x)=147892594)
50
- 1600 val loss 6.4459
51
- 1600 val perplexity 630.1219
52
- 1600 train 6.254763 (lr=5.7666e-05) (hash(x)=135678663)
53
- 1700 val loss 6.4038
54
- 1700 val perplexity 604.1632
55
- 1700 train 6.425565 (lr=5.7230e-05) (hash(x)=143909276)
56
- 1800 val loss 6.3652
57
- 1800 val perplexity 581.2531
58
- 1800 train 6.514690 (lr=5.6759e-05) (hash(x)=155393468)
59
- 1900 val loss 6.3237
60
- 1900 val perplexity 557.6194
61
- 1900 train 6.362476 (lr=5.6253e-05) (hash(x)=150957388)
62
- 2000 val loss 6.2864
63
- 2000 val perplexity 537.2301
64
- 2000 train 6.555749 (lr=5.5714e-05) (hash(x)=144030938)
65
- 2100 val loss 6.2681
66
- 2100 val perplexity 527.4645
67
- 2100 train 6.144263 (lr=5.5142e-05) (hash(x)=146254754)
68
- 2200 val loss 6.2279
69
- 2200 val perplexity 506.7155
70
- 2200 train 6.260343 (lr=5.4537e-05) (hash(x)=150630461)
71
- 2300 val loss 6.2063
72
- 2300 val perplexity 495.8635
73
- 2300 train 6.343796 (lr=5.3902e-05) (hash(x)=165208942)
74
- 2400 val loss 6.1864
75
- 2400 val perplexity 486.0691
76
- 2400 train 6.104856 (lr=5.3236e-05) (hash(x)=153448706)
77
- 2500 val loss 6.1644
78
- 2500 val perplexity 475.5321
79
- 2500 train 6.181049 (lr=5.2541e-05) (hash(x)=148505056)
80
- 2600 val loss 6.1333
81
- 2600 val perplexity 460.9561
82
- 2600 train 6.090554 (lr=5.1817e-05) (hash(x)=142911960)
83
- 2700 val loss 6.1207
84
- 2700 val perplexity 455.1940
85
- 2700 train 6.125944 (lr=5.1067e-05) (hash(x)=149951660)
86
- 2800 val loss 6.0935
87
- 2800 val perplexity 442.9666
88
- 2800 train 6.076572 (lr=5.0290e-05) (hash(x)=152956713)
89
- 2900 val loss 6.0703
90
- 2900 val perplexity 432.7994
91
- 2900 train 5.983719 (lr=4.9487e-05) (hash(x)=147247056)
92
- 3000 val loss 6.0504
93
- 3000 val perplexity 424.2618
94
- 3000 train 5.831758 (lr=4.8662e-05) (hash(x)=146911716)
95
- 3100 val loss 6.0237
96
- 3100 val perplexity 413.1187
97
- 3100 train 6.048563 (lr=4.7813e-05) (hash(x)=153282809)
98
- 3200 val loss 6.0134
99
- 3200 val perplexity 408.8848
100
- 3200 train 5.916239 (lr=4.6943e-05) (hash(x)=152009984)
101
- 3300 val loss 5.9970
102
- 3300 val perplexity 402.2332
103
- 3300 train 5.908694 (lr=4.6052e-05) (hash(x)=150012952)
104
- 3400 val loss 5.9796
105
- 3400 val perplexity 395.2637
106
- 3400 train 5.812621 (lr=4.5143e-05) (hash(x)=146217477)
107
- 3500 val loss 5.9550
108
- 3500 val perplexity 385.6905
109
- 3500 train 6.121484 (lr=4.4216e-05) (hash(x)=180156144)
110
- 3600 val loss 5.9456
111
- 3600 val perplexity 382.0807
112
- 3600 train 5.974609 (lr=4.3273e-05) (hash(x)=154243319)
113
- 3700 val loss 5.9268
114
- 3700 val perplexity 374.9348
115
- 3700 train 5.830237 (lr=4.2315e-05) (hash(x)=154330476)
116
- 3800 val loss 5.9061
117
- 3800 val perplexity 367.2632
118
- 3800 train 5.844427 (lr=4.1343e-05) (hash(x)=145988858)
119
- 3900 val loss 5.8908
120
- 3900 val perplexity 361.6909
121
- 3900 train 5.896978 (lr=4.0360e-05) (hash(x)=149648609)
122
- 4000 val loss 5.8791
123
- 4000 val perplexity 357.4788
124
- 4000 train 5.851080 (lr=3.9365e-05) (hash(x)=154149272)
125
- 4100 val loss 5.8564
126
- 4100 val perplexity 349.4548
127
- 4100 train 5.763773 (lr=3.8362e-05) (hash(x)=132608538)
128
- 4200 val loss 5.8378
129
- 4200 val perplexity 343.0103
130
- 4200 train 5.789722 (lr=3.7351e-05) (hash(x)=140443636)
131
- 4300 val loss 5.8315
132
- 4300 val perplexity 340.8677
133
- 4300 train 5.767340 (lr=3.6333e-05) (hash(x)=138919540)
134
- 4400 val loss 5.8177
135
- 4400 val perplexity 336.1942
136
- 4400 train 5.808780 (lr=3.5311e-05) (hash(x)=153594684)
137
- 4500 val loss 5.7994
138
- 4500 val perplexity 330.0956
139
- 4500 train 5.749821 (lr=3.4285e-05) (hash(x)=144084750)
140
- 4600 val loss 5.7861
141
- 4600 val perplexity 325.7362
142
- 4600 train 5.742219 (lr=3.3257e-05) (hash(x)=147423675)
143
- 4700 val loss 5.7725
144
- 4700 val perplexity 321.3294
145
- 4700 train 5.839430 (lr=3.2229e-05) (hash(x)=162157039)
146
- 4800 val loss 5.7589
147
- 4800 val perplexity 317.0058
148
- 4800 train 5.664342 (lr=3.1202e-05) (hash(x)=134970942)
149
- 4900 val loss 5.7494
150
- 4900 val perplexity 313.9998
151
- 4900 train 5.656826 (lr=3.0178e-05) (hash(x)=160093370)
152
- 5000 val loss 5.7386
153
- 5000 val perplexity 310.6271
154
- 5000 train 5.623245 (lr=2.9157e-05) (hash(x)=154986299)
155
- 5100 val loss 5.7228
156
- 5100 val perplexity 305.7704
157
- 5100 train 5.571209 (lr=2.8143e-05) (hash(x)=145406582)
158
- 5200 val loss 5.7165
159
- 5200 val perplexity 303.8425
160
- 5200 train 5.779271 (lr=2.7135e-05) (hash(x)=148029261)
161
- 5300 val loss 5.7034
162
- 5300 val perplexity 299.8776
163
- 5300 train 5.703271 (lr=2.6136e-05) (hash(x)=157909233)
164
- 5400 val loss 5.6925
165
- 5400 val perplexity 296.6449
166
- 5400 train 5.554035 (lr=2.5147e-05) (hash(x)=142250860)
167
- 5500 val loss 5.6859
168
- 5500 val perplexity 294.6699
169
- 5500 train 5.566572 (lr=2.4169e-05) (hash(x)=145694703)
170
- 5600 val loss 5.6744
171
- 5600 val perplexity 291.3115
172
- 5600 train 5.617038 (lr=2.3204e-05) (hash(x)=141788252)
173
- 5700 val loss 5.6694
174
- 5700 val perplexity 289.8573
175
- 5700 train 5.570825 (lr=2.2253e-05) (hash(x)=150886692)
176
- 5800 val loss 5.6587
177
- 5800 val perplexity 286.7671
178
- 5800 train 5.573063 (lr=2.1318e-05) (hash(x)=151814419)
179
- 5900 val loss 5.6524
180
- 5900 val perplexity 284.9771
181
- 5900 train 5.664570 (lr=2.0400e-05) (hash(x)=146972539)
182
- 6000 val loss 5.6406
183
- 6000 val perplexity 281.6454
184
- 6000 train 5.531297 (lr=1.9500e-05) (hash(x)=150219954)
185
- 6100 val loss 5.6350
186
- 6100 val perplexity 280.0710
187
- 6100 train 5.625792 (lr=1.8620e-05) (hash(x)=161131435)
188
- 6200 val loss 5.6274
189
- 6200 val perplexity 277.9515
190
- 6200 train 5.640399 (lr=1.7760e-05) (hash(x)=150226893)
191
- 6300 val loss 5.6196
192
- 6300 val perplexity 275.7768
193
- 6300 train 5.457455 (lr=1.6923e-05) (hash(x)=144389552)
194
- 6400 val loss 5.6179
195
- 6400 val perplexity 275.3232
196
- 6400 train 5.391958 (lr=1.6108e-05) (hash(x)=147757943)
197
- 6500 val loss 5.6075
198
- 6500 val perplexity 272.4593
199
- 6500 train 5.596219 (lr=1.5319e-05) (hash(x)=144616373)
200
- 6600 val loss 5.6045
201
- 6600 val perplexity 271.6555
202
- 6600 train 5.424347 (lr=1.4555e-05) (hash(x)=145266452)
203
- 6700 val loss 5.5996
204
- 6700 val perplexity 270.3231
205
- 6700 train 5.604519 (lr=1.3817e-05) (hash(x)=149952261)
206
- 6800 val loss 5.5948
207
- 6800 val perplexity 269.0241
208
- 6800 train 5.484454 (lr=1.3108e-05) (hash(x)=143351199)
209
- 6900 val loss 5.5908
210
- 6900 val perplexity 267.9626
211
- 6900 train 5.557255 (lr=1.2427e-05) (hash(x)=156055618)
212
- 7000 val loss 5.5847
213
- 7000 val perplexity 266.3182
214
- 7000 train 5.654865 (lr=1.1777e-05) (hash(x)=164290908)
215
- 7100 val loss 5.5816
216
- 7100 val perplexity 265.5033
217
- 7100 train 5.678857 (lr=1.1157e-05) (hash(x)=150263832)
218
- 7200 val loss 5.5769
219
- 7200 val perplexity 264.2620
220
- 7200 train 5.263807 (lr=1.0568e-05) (hash(x)=139219680)
221
- 7300 val loss 5.5723
222
- 7300 val perplexity 263.0417
223
- 7300 train 5.514708 (lr=1.0012e-05) (hash(x)=150662994)
224
- 7400 val loss 5.5682
225
- 7400 val perplexity 261.9719
226
- 7400 train 5.406182 (lr=9.4899e-06) (hash(x)=139007967)
227
- 7500 val loss 5.5652
228
- 7500 val perplexity 261.1846
229
- 7500 train 5.395241 (lr=9.0014e-06) (hash(x)=138142461)
230
- 7600 val loss 5.5618
231
- 7600 val perplexity 260.2889
232
- 7600 train 5.477636 (lr=8.5478e-06) (hash(x)=150023998)
233
- 7700 val loss 5.5564
234
- 7700 val perplexity 258.8764
235
- 7700 train 5.451140 (lr=8.1297e-06) (hash(x)=143393355)
236
- 7800 val loss 5.5564
237
- 7800 val perplexity 258.8807
238
- 7800 train 5.420924 (lr=7.7476e-06) (hash(x)=144916472)
239
- 7900 val loss 5.5514
240
- 7900 val perplexity 257.5973
241
- 7900 train 5.509583 (lr=7.4021e-06) (hash(x)=150236934)
242
- 8000 val loss 5.5484
243
- 8000 val perplexity 256.8155
244
- 8000 train 5.562771 (lr=7.0937e-06) (hash(x)=146536422)
245
- 8100 val loss 5.5483
246
- 8100 val perplexity 256.7911
247
- 8100 train 5.384810 (lr=6.8229e-06) (hash(x)=151300857)
248
- 8200 val loss 5.5454
249
- 8200 val perplexity 256.0693
250
- 8200 train 5.573177 (lr=6.5900e-06) (hash(x)=160351956)
251
- 8300 val loss 5.5422
252
- 8300 val perplexity 255.2382
253
- 8300 train 5.604126 (lr=6.3954e-06) (hash(x)=150514540)
254
- 8400 val loss 5.5397
255
- 8400 val perplexity 254.6083
256
- 8400 train 5.485884 (lr=6.2395e-06) (hash(x)=155904762)
257
- 8500 val loss 5.5399
258
- 8500 val perplexity 254.6590
259
- 8500 train 5.350772 (lr=6.1223e-06) (hash(x)=146923196)
260
- 8600 val loss 5.5368
261
- 8600 val perplexity 253.8695
262
- 8600 train 5.517137 (lr=6.0440e-06) (hash(x)=138977080)
263
- 8700 val loss 5.5346
264
- 8700 val perplexity 253.2973
265
- 8700 train 5.343773 (lr=6.0049e-06) (hash(x)=146442792)
266
- 8749 val loss 5.5336
267
- 8749 val perplexity 253.0503
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  max_steps: 8750
2
  0 val loss 11.3014
3
+ 0 val perplexity 80932.9531
4
+ 6000 val loss 5.6529
5
+ 6000 val perplexity 285.1119
6
+ 6000 train 5.542874 (lr=1.6250e-05) (hash(x)=150219954)
7
+ 0 train 11.305982 (lr=8.0000e-08) (hash(x)=153418542)
8
+ 6100 val loss 5.6490
9
+ 6100 val perplexity 283.9971
10
+ 6100 train 5.636328 (lr=1.5516e-05) (hash(x)=161131435)
11
+ 6200 val loss 5.6382
12
+ 6200 val perplexity 280.9560
13
+ 6200 train 5.653806 (lr=1.4800e-05) (hash(x)=150226893)
14
+ 100 val loss 9.9962
15
+ 100 val perplexity 21942.0781
16
+ 100 train 10.002850 (lr=8.0800e-06) (hash(x)=143722001)
17
+ 6300 val loss 5.6331
18
+ 6300 val perplexity 279.5193
19
+ 6300 train 5.468653 (lr=1.4102e-05) (hash(x)=144389552)
20
+ 6400 val loss 5.6300
21
+ 6400 val perplexity 278.6560
22
+ 6400 train 5.409243 (lr=1.3424e-05) (hash(x)=147757943)
23
+ 200 val loss 9.4169
24
+ 200 val perplexity 12295.0049
25
+ 200 train 9.418180 (lr=1.6080e-05) (hash(x)=152989689)
26
+ 6500 val loss 5.6202
27
+ 6500 val perplexity 275.9365
28
+ 6500 train 5.610387 (lr=1.2766e-05) (hash(x)=144616373)
29
+ 300 val loss 8.3178
30
+ 300 val perplexity 4096.1445
31
+ 300 train 8.331709 (lr=2.4080e-05) (hash(x)=150071018)
32
+ 6600 val loss 5.6194
33
+ 6600 val perplexity 275.7311
34
+ 6600 train 5.440672 (lr=1.2129e-05) (hash(x)=145266452)
35
+ 6700 val loss 5.6127
36
+ 6700 val perplexity 273.8852
37
+ 6700 train 5.611516 (lr=1.1515e-05) (hash(x)=149952261)
38
+ 400 val loss 7.6307
39
+ 400 val perplexity 2060.5608
40
+ 400 train 7.661986 (lr=3.2080e-05) (hash(x)=153358238)
41
+ 6800 val loss 5.6070
42
+ 6800 val perplexity 272.3298
43
+ 6800 train 5.494902 (lr=1.0923e-05) (hash(x)=143351199)
44
+ 6900 val loss 5.6058
45
+ 6900 val perplexity 272.0022
46
+ 6900 train 5.571501 (lr=1.0356e-05) (hash(x)=156055618)
47
+ 500 val loss 7.4628
48
+ 500 val perplexity 1742.0289
49
+ 500 train 7.339062 (lr=4.0000e-05) (hash(x)=148983354)
50
+ 7000 val loss 5.5986
51
+ 7000 val perplexity 270.0468
52
+ 7000 train 5.672305 (lr=9.8138e-06) (hash(x)=164290908)
53
+ 600 val loss 7.3476
54
+ 600 val perplexity 1552.4720
55
+ 600 train 7.236389 (lr=3.9987e-05) (hash(x)=150770333)
56
+ 7100 val loss 5.5952
57
+ 7100 val perplexity 269.1347
58
+ 7100 train 5.695919 (lr=9.2971e-06) (hash(x)=150263832)
59
+ 7200 val loss 5.5914
60
+ 7200 val perplexity 268.1207
61
+ 7200 train 5.276465 (lr=8.8068e-06) (hash(x)=139219680)
62
+ 700 val loss 7.2495
63
+ 700 val perplexity 1407.3933
64
+ 700 train 7.167737 (lr=3.9948e-05) (hash(x)=145246201)
65
+ 7300 val loss 5.5855
66
+ 7300 val perplexity 266.5415
67
+ 7300 train 5.529749 (lr=8.3436e-06) (hash(x)=150662994)
68
+ 7400 val loss 5.5824
69
+ 7400 val perplexity 265.7208
70
+ 7400 train 5.425246 (lr=7.9082e-06) (hash(x)=139007967)
71
+ 800 val loss 7.1616
72
+ 800 val perplexity 1289.0026
73
+ 800 train 6.990556 (lr=3.9883e-05) (hash(x)=142635842)
74
+ 7500 val loss 5.5810
75
+ 7500 val perplexity 265.3427
76
+ 7500 train 5.413968 (lr=7.5012e-06) (hash(x)=138142461)
77
+ 900 val loss 7.0502
78
+ 900 val perplexity 1153.1173
79
+ 900 train 7.074839 (lr=3.9792e-05) (hash(x)=148299832)
80
+ 7600 val loss 5.5758
81
+ 7600 val perplexity 263.9535
82
+ 7600 train 5.490452 (lr=7.1232e-06) (hash(x)=150023998)
83
+ 7700 val loss 5.5721
84
+ 7700 val perplexity 262.9965
85
+ 7700 train 5.465677 (lr=6.7747e-06) (hash(x)=143393355)
86
+ 1000 val loss 6.9682
87
+ 1000 val perplexity 1062.2701
88
+ 1000 train 6.929266 (lr=3.9675e-05) (hash(x)=152887721)
89
+ 7800 val loss 5.5718
90
+ 7800 val perplexity 262.9009
91
+ 7800 train 5.440393 (lr=6.4563e-06) (hash(x)=144916472)
92
+ 7900 val loss 5.5671
93
+ 7900 val perplexity 261.6836
94
+ 7900 train 5.529100 (lr=6.1684e-06) (hash(x)=150236934)
95
+ 1100 val loss 6.8864
96
+ 1100 val perplexity 978.9152
97
+ 1100 train 6.777247 (lr=3.9532e-05) (hash(x)=153721194)
98
+ 8000 val loss 5.5639
99
+ 8000 val perplexity 260.8326
100
+ 8000 train 5.572845 (lr=5.9114e-06) (hash(x)=146536422)
101
+ 8100 val loss 5.5644
102
+ 8100 val perplexity 260.9579
103
+ 8100 train 5.402884 (lr=5.6857e-06) (hash(x)=151300857)
104
+ 1200 val loss 6.8108
105
+ 1200 val perplexity 907.5820
106
+ 1200 train 6.800976 (lr=3.9364e-05) (hash(x)=153047184)
107
+ 8200 val loss 5.5621
108
+ 8200 val perplexity 260.3580
109
+ 8200 train 5.586649 (lr=5.4917e-06) (hash(x)=160351956)
110
+ 1300 val loss 6.7215
111
+ 1300 val perplexity 830.0660
112
+ 1300 train 6.630025 (lr=3.9171e-05) (hash(x)=152466045)
113
+ 8300 val loss 5.5577
114
+ 8300 val perplexity 259.2144
115
+ 8300 train 5.623133 (lr=5.3295e-06) (hash(x)=150514540)
116
+ 8400 val loss 5.5558
117
+ 8400 val perplexity 258.7418
118
+ 8400 train 5.504392 (lr=5.1995e-06) (hash(x)=155904762)
119
+ 1400 val loss 6.6611
120
+ 1400 val perplexity 781.4297
121
+ 1400 train 6.460868 (lr=3.8953e-05) (hash(x)=151991552)
122
+ 8500 val loss 5.5558
123
+ 8500 val perplexity 258.7216
124
+ 8500 train 5.361553 (lr=5.1019e-06) (hash(x)=146923196)
125
+ 8600 val loss 5.5521
126
+ 8600 val perplexity 257.7683
127
+ 8600 train 5.532177 (lr=5.0367e-06) (hash(x)=138977080)
128
+ 1500 val loss 6.5935
129
+ 1500 val perplexity 730.3278
130
+ 1500 train 6.540410 (lr=3.8711e-05) (hash(x)=147892594)
131
+ 8700 val loss 5.5503
132
+ 8700 val perplexity 257.3115
133
+ 8700 train 5.357503 (lr=5.0041e-06) (hash(x)=146442792)
134
+ 8749 val loss 5.5496
135
+ 8749 val perplexity 257.1397
136
+ 1600 val loss 6.5371
137
+ 1600 val perplexity 690.3153
138
+ 1600 train 6.339073 (lr=3.8444e-05) (hash(x)=135678663)
139
+ 1700 val loss 6.5037
140
+ 1700 val perplexity 667.5813
141
+ 1700 train 6.526124 (lr=3.8153e-05) (hash(x)=143909276)
142
+ 1800 val loss 6.4623
143
+ 1800 val perplexity 640.5242
144
+ 1800 train 6.610961 (lr=3.7839e-05) (hash(x)=155393468)
145
+ 1900 val loss 6.4146
146
+ 1900 val perplexity 610.7124
147
+ 1900 train 6.459574 (lr=3.7502e-05) (hash(x)=150957388)
148
+ 2000 val loss 6.3774
149
+ 2000 val perplexity 588.4093
150
+ 2000 train 6.625291 (lr=3.7143e-05) (hash(x)=144030938)
151
+ 2100 val loss 6.3564
152
+ 2100 val perplexity 576.1835
153
+ 2100 train 6.233661 (lr=3.6761e-05) (hash(x)=146254754)
154
+ 2200 val loss 6.3163
155
+ 2200 val perplexity 553.5404
156
+ 2200 train 6.347804 (lr=3.6358e-05) (hash(x)=150630461)
157
+ 2300 val loss 6.2920
158
+ 2300 val perplexity 540.2537
159
+ 2300 train 6.437610 (lr=3.5935e-05) (hash(x)=165208942)
160
+ 2400 val loss 6.2780
161
+ 2400 val perplexity 532.7427
162
+ 2400 train 6.199164 (lr=3.5491e-05) (hash(x)=153448706)
163
+ 2500 val loss 6.2434
164
+ 2500 val perplexity 514.6158
165
+ 2500 train 6.257586 (lr=3.5027e-05) (hash(x)=148505056)
166
+ 2600 val loss 6.2218
167
+ 2600 val perplexity 503.6213
168
+ 2600 train 6.170326 (lr=3.4545e-05) (hash(x)=142911960)
169
+ 2700 val loss 6.2032
170
+ 2700 val perplexity 494.3231
171
+ 2700 train 6.207232 (lr=3.4044e-05) (hash(x)=149951660)
172
+ 2800 val loss 6.1781
173
+ 2800 val perplexity 482.0936
174
+ 2800 train 6.171800 (lr=3.3526e-05) (hash(x)=152956713)
175
+ 2900 val loss 6.1582
176
+ 2900 val perplexity 472.5585
177
+ 2900 train 6.072066 (lr=3.2992e-05) (hash(x)=147247056)
178
+ 3000 val loss 6.1433
179
+ 3000 val perplexity 465.5686
180
+ 3000 train 5.947751 (lr=3.2441e-05) (hash(x)=146911716)
181
+ 3100 val loss 6.1201
182
+ 3100 val perplexity 454.9093
183
+ 3100 train 6.136861 (lr=3.1875e-05) (hash(x)=153282809)
184
+ 3200 val loss 6.1069
185
+ 3200 val perplexity 448.9299
186
+ 3200 train 6.006924 (lr=3.1295e-05) (hash(x)=152009984)
187
+ 3300 val loss 6.0921
188
+ 3300 val perplexity 442.3499
189
+ 3300 train 5.998622 (lr=3.0702e-05) (hash(x)=150012952)
190
+ 3400 val loss 6.0824
191
+ 3400 val perplexity 438.0655
192
+ 3400 train 5.915511 (lr=3.0095e-05) (hash(x)=146217477)
193
+ 3500 val loss 6.0570
194
+ 3500 val perplexity 427.0984
195
+ 3500 train 6.229527 (lr=2.9477e-05) (hash(x)=180156144)
196
+ 3600 val loss 6.0477
197
+ 3600 val perplexity 423.1463
198
+ 3600 train 6.082186 (lr=2.8849e-05) (hash(x)=154243319)
199
+ 3700 val loss 6.0347
200
+ 3700 val perplexity 417.6617
201
+ 3700 train 5.936630 (lr=2.8210e-05) (hash(x)=154330476)
202
+ 3800 val loss 6.0179
203
+ 3800 val perplexity 410.7197
204
+ 3800 train 5.950444 (lr=2.7562e-05) (hash(x)=145988858)
205
+ 3900 val loss 6.0028
206
+ 3900 val perplexity 404.5569
207
+ 3900 train 6.004784 (lr=2.6907e-05) (hash(x)=149648609)
208
+ 4000 val loss 5.9906
209
+ 4000 val perplexity 399.6580
210
+ 4000 train 5.962393 (lr=2.6244e-05) (hash(x)=154149272)
211
+ 4100 val loss 5.9751
212
+ 4100 val perplexity 393.4893
213
+ 4100 train 5.871179 (lr=2.5575e-05) (hash(x)=132608538)
214
+ 4200 val loss 5.9643
215
+ 4200 val perplexity 389.2924
216
+ 4200 train 5.910713 (lr=2.4900e-05) (hash(x)=140443636)
217
+ 4300 val loss 5.9543
218
+ 4300 val perplexity 385.3971
219
+ 4300 train 5.875401 (lr=2.4222e-05) (hash(x)=138919540)
220
+ 4400 val loss 5.9438
221
+ 4400 val perplexity 381.3652
222
+ 4400 train 5.925539 (lr=2.3540e-05) (hash(x)=153594684)
223
+ 4500 val loss 5.9275
224
+ 4500 val perplexity 375.2261
225
+ 4500 train 5.877662 (lr=2.2856e-05) (hash(x)=144084750)
226
+ 4600 val loss 5.9175
227
+ 4600 val perplexity 371.4693
228
+ 4600 train 5.874401 (lr=2.2171e-05) (hash(x)=147423675)
229
+ 4700 val loss 5.9032
230
+ 4700 val perplexity 366.2040
231
+ 4700 train 5.972774 (lr=2.1486e-05) (hash(x)=162157039)
232
+ 4800 val loss 5.8954
233
+ 4800 val perplexity 363.3736
234
+ 4800 train 5.795979 (lr=2.0801e-05) (hash(x)=134970942)
235
+ 4900 val loss 5.8887
236
+ 4900 val perplexity 360.9260
237
+ 4900 train 5.803387 (lr=2.0118e-05) (hash(x)=160093370)
238
+ 5000 val loss 5.8781
239
+ 5000 val perplexity 357.1143
240
+ 5000 train 5.771273 (lr=1.9438e-05) (hash(x)=154986299)
241
+ 5100 val loss 5.8641
242
+ 5100 val perplexity 352.1793
243
+ 5100 train 5.719149 (lr=1.8762e-05) (hash(x)=145406582)
244
+ 5200 val loss 5.8597
245
+ 5200 val perplexity 350.6254
246
+ 5200 train 5.913215 (lr=1.8090e-05) (hash(x)=148029261)
247
+ 5300 val loss 5.8437
248
+ 5300 val perplexity 345.0680
249
+ 5300 train 5.842875 (lr=1.7424e-05) (hash(x)=157909233)
250
+ 5400 val loss 5.8393
251
+ 5400 val perplexity 343.5403
252
+ 5400 train 5.714415 (lr=1.6765e-05) (hash(x)=142250860)
253
+ 5500 val loss 5.8289
254
+ 5500 val perplexity 339.9943
255
+ 5500 train 5.703356 (lr=1.6113e-05) (hash(x)=145694703)
256
+ 5600 val loss 5.8204
257
+ 5600 val perplexity 337.1230
258
+ 5600 train 5.756215 (lr=1.5469e-05) (hash(x)=141788252)
259
+ 5700 val loss 5.8137
260
+ 5700 val perplexity 334.8548
261
+ 5700 train 5.716033 (lr=1.4836e-05) (hash(x)=150886692)
262
+ 5800 val loss 5.8071
263
+ 5800 val perplexity 332.6516
264
+ 5800 train 5.713255 (lr=1.4212e-05) (hash(x)=151814419)
265
+ 5900 val loss 5.7999
266
+ 5900 val perplexity 330.2577
267
+ 5900 train 5.809623 (lr=1.3600e-05) (hash(x)=146972539)
268
+ 6000 val loss 5.7908
269
+ 6000 val perplexity 327.2866
270
+ 6000 train 5.675955 (lr=1.3000e-05) (hash(x)=150219954)
271
+ 6100 val loss 5.7875
272
+ 6100 val perplexity 326.2109
273
+ 6100 train 5.785079 (lr=1.2413e-05) (hash(x)=161131435)
274
+ 6200 val loss 5.7786
275
+ 6200 val perplexity 323.3111
276
+ 6200 train 5.787489 (lr=1.1840e-05) (hash(x)=150226893)
277
+ 6300 val loss 5.7730
278
+ 6300 val perplexity 321.5074
279
+ 6300 train 5.608102 (lr=1.1282e-05) (hash(x)=144389552)
280
+ 6400 val loss 5.7687
281
+ 6400 val perplexity 320.1277
282
+ 6400 train 5.549685 (lr=1.0739e-05) (hash(x)=147757943)
283
+ 6500 val loss 5.7602
284
+ 6500 val perplexity 317.4023
285
+ 6500 train 5.748319 (lr=1.0213e-05) (hash(x)=144616373)
286
+ 6600 val loss 5.7591
287
+ 6600 val perplexity 317.0594
288
+ 6600 train 5.587925 (lr=9.7032e-06) (hash(x)=145266452)
289
+ 6700 val loss 5.7530
290
+ 6700 val perplexity 315.1310
291
+ 6700 train 5.747368 (lr=9.2116e-06) (hash(x)=149952261)
292
+ 6800 val loss 5.7482
293
+ 6800 val perplexity 313.6341
294
+ 6800 train 5.633492 (lr=8.7387e-06) (hash(x)=143351199)
295
+ 6900 val loss 5.7475
296
+ 6900 val perplexity 313.4069
297
+ 6900 train 5.705395 (lr=8.2849e-06) (hash(x)=156055618)
298
+ 7000 val loss 5.7401
299
+ 7000 val perplexity 311.0891
300
+ 7000 train 5.817177 (lr=7.8510e-06) (hash(x)=164290908)
301
+ 7100 val loss 5.7362
302
+ 7100 val perplexity 309.8936
303
+ 7100 train 5.831888 (lr=7.4377e-06) (hash(x)=150263832)
304
+ 7200 val loss 5.7341
305
+ 7200 val perplexity 309.2273
306
+ 7200 train 5.421633 (lr=7.0455e-06) (hash(x)=139219680)
307
+ 7300 val loss 5.7285
308
+ 7300 val perplexity 307.4960
309
+ 7300 train 5.672938 (lr=6.6749e-06) (hash(x)=150662994)
310
+ 7400 val loss 5.7251
311
+ 7400 val perplexity 306.4661
312
+ 7400 train 5.564129 (lr=6.3266e-06) (hash(x)=139007967)
313
+ 7500 val loss 5.7235
314
+ 7500 val perplexity 305.9802
315
+ 7500 train 5.551204 (lr=6.0010e-06) (hash(x)=138142461)
316
+ 7600 val loss 5.7201
317
+ 7600 val perplexity 304.9451
318
+ 7600 train 5.636363 (lr=5.6986e-06) (hash(x)=150023998)
319
+ 7700 val loss 5.7159
320
+ 7700 val perplexity 303.6624
321
+ 7700 train 5.611104 (lr=5.4198e-06) (hash(x)=143393355)
322
+ 7800 val loss 5.7150
323
+ 7800 val perplexity 303.3935
324
+ 7800 train 5.585992 (lr=5.1650e-06) (hash(x)=144916472)
325
+ 7900 val loss 5.7112
326
+ 7900 val perplexity 302.2332
327
+ 7900 train 5.664940 (lr=4.9347e-06) (hash(x)=150236934)
328
+ 8000 val loss 5.7080
329
+ 8000 val perplexity 301.2645
330
+ 8000 train 5.698518 (lr=4.7291e-06) (hash(x)=146536422)
331
+ 8100 val loss 5.7083
332
+ 8100 val perplexity 301.3717
333
+ 8100 train 5.542465 (lr=4.5486e-06) (hash(x)=151300857)
334
+ 8200 val loss 5.7064
335
+ 8200 val perplexity 300.7751
336
+ 8200 train 5.749476 (lr=4.3933e-06) (hash(x)=160351956)
337
+ 8300 val loss 5.7028
338
+ 8300 val perplexity 299.6952
339
+ 8300 train 5.746905 (lr=4.2636e-06) (hash(x)=150514540)
340
+ 8400 val loss 5.7007
341
+ 8400 val perplexity 299.0894
342
+ 8400 train 5.652898 (lr=4.1596e-06) (hash(x)=155904762)
343
+ 8500 val loss 5.7014
344
+ 8500 val perplexity 299.2841
345
+ 8500 train 5.507021 (lr=4.0815e-06) (hash(x)=146923196)
346
+ 8600 val loss 5.6978
347
+ 8600 val perplexity 298.2102
348
+ 8600 train 5.675440 (lr=4.0294e-06) (hash(x)=138977080)
349
+ 8700 val loss 5.6952
350
+ 8700 val perplexity 297.4482
351
+ 8700 train 5.503180 (lr=4.0033e-06) (hash(x)=146442792)
352
+ 8749 val loss 5.6954
353
+ 8749 val perplexity 297.4845
attention_kindselective_n_heads4_seed1343/model_08749.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5e2881551dc153a5a5f5a84e446f2c231cc746aedb22500ac0af55e7e197bd64
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e1c338d0c928c41d00334bf6860b1f3def80cd607c2223f21881c24d3d752895
3
  size 92843394
attention_kindselective_n_heads4_seed1343/optimizer_08749.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:81142c99afd4e4a43846af07a607dbf6a77e7ff90d8be150963d09be6498448f
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:263f36a68651452dea4c83809f12ec85d5fc7ef3148f72a69eb6d173e4ca9650
3
  size 179406214