andrew-healey commited on
Commit
fd53f19
·
verified ·
1 Parent(s): 50bcb04

Upload folder using huggingface_hub

Browse files
attention_kindselective_n_heads4_seed1340/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_9/attention_kindselective_n_heads4_seed1340", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_9", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1340, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 5.5e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "5.5e-5_61440_4_1340", "n_embd": 256}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_9/attention_kindselective_n_heads4_seed1340", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_9", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1340, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 4.5e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "4.5e-5_61440_4_1340", "n_embd": 256}
attention_kindselective_n_heads4_seed1340/log2.txt CHANGED
@@ -1,267 +1,267 @@
1
  max_steps: 8750
2
  0 val loss 11.2709
3
- 0 val perplexity 78502.8516
4
- 0 train 11.277068 (lr=1.1000e-07) (hash(x)=164406924)
5
- 100 val loss 10.0085
6
- 100 val perplexity 22213.9395
7
- 100 train 9.970803 (lr=1.1110e-05) (hash(x)=144903932)
8
- 200 val loss 9.2914
9
- 200 val perplexity 10844.2412
10
- 200 train 9.357403 (lr=2.2110e-05) (hash(x)=167734596)
11
- 300 val loss 8.0630
12
- 300 val perplexity 3174.7661
13
- 300 train 8.075590 (lr=3.3110e-05) (hash(x)=149619098)
14
- 400 val loss 7.5692
15
- 400 val perplexity 1937.4963
16
- 400 train 7.450818 (lr=4.4110e-05) (hash(x)=153710890)
17
- 500 val loss 7.4143
18
- 500 val perplexity 1659.5359
19
- 500 train 7.504872 (lr=5.5000e-05) (hash(x)=145450636)
20
- 600 val loss 7.2988
21
- 600 val perplexity 1478.4788
22
- 600 train 7.166701 (lr=5.4982e-05) (hash(x)=145249251)
23
- 700 val loss 7.1882
24
- 700 val perplexity 1323.7411
25
- 700 train 7.318045 (lr=5.4928e-05) (hash(x)=150475545)
26
- 800 val loss 7.0767
27
- 800 val perplexity 1184.1125
28
- 800 train 7.045276 (lr=5.4839e-05) (hash(x)=144483776)
29
- 900 val loss 6.9781
30
- 900 val perplexity 1072.9233
31
- 900 train 7.080307 (lr=5.4713e-05) (hash(x)=157916369)
32
- 1000 val loss 6.9082
33
- 1000 val perplexity 1000.4556
34
- 1000 train 6.925345 (lr=5.4553e-05) (hash(x)=154856891)
35
- 1100 val loss 6.8256
36
- 1100 val perplexity 921.1058
37
- 1100 train 6.522852 (lr=5.4357e-05) (hash(x)=136191502)
38
- 1200 val loss 6.7440
39
- 1200 val perplexity 848.9789
40
- 1200 train 6.700300 (lr=5.4126e-05) (hash(x)=148021541)
41
- 1300 val loss 6.6680
42
- 1300 val perplexity 786.7943
43
- 1300 train 6.599460 (lr=5.3860e-05) (hash(x)=146005217)
44
- 1400 val loss 6.6030
45
- 1400 val perplexity 737.3328
46
- 1400 train 6.514089 (lr=5.3561e-05) (hash(x)=146019502)
47
- 1500 val loss 6.5557
48
- 1500 val perplexity 703.2271
49
- 1500 train 6.492791 (lr=5.3227e-05) (hash(x)=150127281)
50
- 1600 val loss 6.5065
51
- 1600 val perplexity 669.4506
52
- 1600 train 6.472455 (lr=5.2860e-05) (hash(x)=154120875)
53
- 1700 val loss 6.4733
54
- 1700 val perplexity 647.6284
55
- 1700 train 6.692543 (lr=5.2461e-05) (hash(x)=155797680)
56
- 1800 val loss 6.4172
57
- 1800 val perplexity 612.2867
58
- 1800 train 6.403757 (lr=5.2029e-05) (hash(x)=156809396)
59
- 1900 val loss 6.3877
60
- 1900 val perplexity 594.5070
61
- 1900 train 6.274245 (lr=5.1565e-05) (hash(x)=144640294)
62
- 2000 val loss 6.3473
63
- 2000 val perplexity 570.9570
64
- 2000 train 6.208043 (lr=5.1071e-05) (hash(x)=162831106)
65
- 2100 val loss 6.3462
66
- 2100 val perplexity 570.3412
67
- 2100 train 6.336015 (lr=5.0547e-05) (hash(x)=158239484)
68
- 2200 val loss 6.2871
69
- 2200 val perplexity 537.5853
70
- 2200 train 6.161128 (lr=4.9993e-05) (hash(x)=140504180)
71
- 2300 val loss 6.2516
72
- 2300 val perplexity 518.8192
73
- 2300 train 6.238686 (lr=4.9410e-05) (hash(x)=142234024)
74
- 2400 val loss 6.2270
75
- 2400 val perplexity 506.2221
76
- 2400 train 6.225477 (lr=4.8800e-05) (hash(x)=143091562)
77
- 2500 val loss 6.2183
78
- 2500 val perplexity 501.8651
79
- 2500 train 6.118163 (lr=4.8162e-05) (hash(x)=149857456)
80
- 2600 val loss 6.1767
81
- 2600 val perplexity 481.3983
82
- 2600 train 6.211945 (lr=4.7499e-05) (hash(x)=146191551)
83
- 2700 val loss 6.1543
84
- 2700 val perplexity 470.7182
85
- 2700 train 6.071248 (lr=4.6811e-05) (hash(x)=145375752)
86
- 2800 val loss 6.1304
87
- 2800 val perplexity 459.5979
88
- 2800 train 6.014522 (lr=4.6099e-05) (hash(x)=151568014)
89
- 2900 val loss 6.1218
90
- 2900 val perplexity 455.7011
91
- 2900 train 5.945108 (lr=4.5364e-05) (hash(x)=149366597)
92
- 3000 val loss 6.0883
93
- 3000 val perplexity 440.6820
94
- 3000 train 6.013472 (lr=4.4606e-05) (hash(x)=150464442)
95
- 3100 val loss 6.0724
96
- 3100 val perplexity 433.7043
97
- 3100 train 6.195862 (lr=4.3828e-05) (hash(x)=182449036)
98
- 3200 val loss 6.0531
99
- 3200 val perplexity 425.4502
100
- 3200 train 6.008684 (lr=4.3031e-05) (hash(x)=140141286)
101
- 3300 val loss 6.0351
102
- 3300 val perplexity 417.8260
103
- 3300 train 6.008709 (lr=4.2215e-05) (hash(x)=148099414)
104
- 3400 val loss 6.0145
105
- 3400 val perplexity 409.3233
106
- 3400 train 5.879248 (lr=4.1381e-05) (hash(x)=142633951)
107
- 3500 val loss 6.0038
108
- 3500 val perplexity 404.9803
109
- 3500 train 5.866429 (lr=4.0532e-05) (hash(x)=148368965)
110
- 3600 val loss 5.9817
111
- 3600 val perplexity 396.1001
112
- 3600 train 6.005436 (lr=3.9667e-05) (hash(x)=152372067)
113
- 3700 val loss 5.9637
114
- 3700 val perplexity 389.0613
115
- 3700 train 6.042872 (lr=3.8789e-05) (hash(x)=168885609)
116
- 3800 val loss 5.9535
117
- 3800 val perplexity 385.1143
118
- 3800 train 5.766492 (lr=3.7898e-05) (hash(x)=141633734)
119
- 3900 val loss 5.9283
120
- 3900 val perplexity 375.5027
121
- 3900 train 5.815371 (lr=3.6996e-05) (hash(x)=153141007)
122
- 4000 val loss 5.9161
123
- 4000 val perplexity 370.9533
124
- 4000 train 5.847968 (lr=3.6085e-05) (hash(x)=160577202)
125
- 4100 val loss 5.9108
126
- 4100 val perplexity 368.9956
127
- 4100 train 5.976177 (lr=3.5165e-05) (hash(x)=153858169)
128
- 4200 val loss 5.8859
129
- 4200 val perplexity 359.9236
130
- 4200 train 5.900618 (lr=3.4238e-05) (hash(x)=155889149)
131
- 4300 val loss 5.8756
132
- 4300 val perplexity 356.2298
133
- 4300 train 5.686732 (lr=3.3305e-05) (hash(x)=152294662)
134
- 4400 val loss 5.8614
135
- 4400 val perplexity 351.2089
136
- 4400 train 5.759041 (lr=3.2368e-05) (hash(x)=141804386)
137
- 4500 val loss 5.8419
138
- 4500 val perplexity 344.4301
139
- 4500 train 5.738522 (lr=3.1428e-05) (hash(x)=151095242)
140
- 4600 val loss 5.8479
141
- 4600 val perplexity 346.4965
142
- 4600 train 5.792981 (lr=3.0486e-05) (hash(x)=156414699)
143
- 4700 val loss 5.8213
144
- 4700 val perplexity 337.4196
145
- 4700 train 5.835704 (lr=2.9543e-05) (hash(x)=161556686)
146
- 4800 val loss 5.8061
147
- 4800 val perplexity 332.3093
148
- 4800 train 5.715901 (lr=2.8602e-05) (hash(x)=149000293)
149
- 4900 val loss 5.7968
150
- 4900 val perplexity 329.2426
151
- 4900 train 5.584097 (lr=2.7663e-05) (hash(x)=154349989)
152
- 5000 val loss 5.7843
153
- 5000 val perplexity 325.1438
154
- 5000 train 5.614993 (lr=2.6728e-05) (hash(x)=131475967)
155
- 5100 val loss 5.7728
156
- 5100 val perplexity 321.4481
157
- 5100 train 5.674039 (lr=2.5798e-05) (hash(x)=149717902)
158
- 5200 val loss 5.7695
159
- 5200 val perplexity 320.3890
160
- 5200 train 5.634620 (lr=2.4874e-05) (hash(x)=151407999)
161
- 5300 val loss 5.7545
162
- 5300 val perplexity 315.6197
163
- 5300 train 5.843950 (lr=2.3958e-05) (hash(x)=168602728)
164
- 5400 val loss 5.7444
165
- 5400 val perplexity 312.4250
166
- 5400 train 5.700167 (lr=2.3051e-05) (hash(x)=158344511)
167
- 5500 val loss 5.7369
168
- 5500 val perplexity 310.0940
169
- 5500 train 5.696602 (lr=2.2155e-05) (hash(x)=148350057)
170
- 5600 val loss 5.7255
171
- 5600 val perplexity 306.5844
172
- 5600 train 5.792315 (lr=2.1271e-05) (hash(x)=153847323)
173
- 5700 val loss 5.7168
174
- 5700 val perplexity 303.9281
175
- 5700 train 5.660481 (lr=2.0399e-05) (hash(x)=156607405)
176
- 5800 val loss 5.7113
177
- 5800 val perplexity 302.2715
178
- 5800 train 5.476274 (lr=1.9542e-05) (hash(x)=145115031)
179
- 5900 val loss 5.7032
180
- 5900 val perplexity 299.8224
181
- 5900 train 5.585124 (lr=1.8700e-05) (hash(x)=141584622)
182
- 6000 val loss 5.6926
183
- 6000 val perplexity 296.6544
184
- 6000 train 5.623785 (lr=1.7875e-05) (hash(x)=146613857)
185
- 6100 val loss 5.6872
186
- 6100 val perplexity 295.0699
187
- 6100 train 5.629707 (lr=1.7068e-05) (hash(x)=144621768)
188
- 6200 val loss 5.6809
189
- 6200 val perplexity 293.2248
190
- 6200 train 5.612484 (lr=1.6280e-05) (hash(x)=146521760)
191
- 6300 val loss 5.6744
192
- 6300 val perplexity 291.3021
193
- 6300 train 5.505092 (lr=1.5512e-05) (hash(x)=161378136)
194
- 6400 val loss 5.6678
195
- 6400 val perplexity 289.3883
196
- 6400 train 5.434224 (lr=1.4766e-05) (hash(x)=141624235)
197
- 6500 val loss 5.6614
198
- 6500 val perplexity 287.5368
199
- 6500 train 5.581383 (lr=1.4042e-05) (hash(x)=151197095)
200
- 6600 val loss 5.6591
201
- 6600 val perplexity 286.9044
202
- 6600 train 5.677541 (lr=1.3342e-05) (hash(x)=153269571)
203
- 6700 val loss 5.6526
204
- 6700 val perplexity 285.0374
205
- 6700 train 5.618601 (lr=1.2666e-05) (hash(x)=146111181)
206
- 6800 val loss 5.6434
207
- 6800 val perplexity 282.4169
208
- 6800 train 5.672344 (lr=1.2016e-05) (hash(x)=147269760)
209
- 6900 val loss 5.6390
210
- 6900 val perplexity 281.1924
211
- 6900 train 5.721029 (lr=1.1392e-05) (hash(x)=152912762)
212
- 7000 val loss 5.6363
213
- 7000 val perplexity 280.4270
214
- 7000 train 5.724804 (lr=1.0795e-05) (hash(x)=165412343)
215
- 7100 val loss 5.6272
216
- 7100 val perplexity 277.8830
217
- 7100 train 5.702348 (lr=1.0227e-05) (hash(x)=162866028)
218
- 7200 val loss 5.6242
219
- 7200 val perplexity 277.0432
220
- 7200 train 5.594518 (lr=9.6875e-06) (hash(x)=142998115)
221
- 7300 val loss 5.6216
222
- 7300 val perplexity 276.3317
223
- 7300 train 5.392981 (lr=9.1780e-06) (hash(x)=145486999)
224
- 7400 val loss 5.6144
225
- 7400 val perplexity 274.3558
226
- 7400 train 5.591548 (lr=8.6990e-06) (hash(x)=155325873)
227
- 7500 val loss 5.6108
228
- 7500 val perplexity 273.3524
229
- 7500 train 5.574790 (lr=8.2513e-06) (hash(x)=145131256)
230
- 7600 val loss 5.6154
231
- 7600 val perplexity 274.6144
232
- 7600 train 5.539998 (lr=7.8355e-06) (hash(x)=144008365)
233
- 7700 val loss 5.6057
234
- 7700 val perplexity 271.9604
235
- 7700 train 5.708950 (lr=7.4522e-06) (hash(x)=148848532)
236
- 7800 val loss 5.6015
237
- 7800 val perplexity 270.8214
238
- 7800 train 5.587547 (lr=7.1019e-06) (hash(x)=150391642)
239
- 7900 val loss 5.6012
240
- 7900 val perplexity 270.7429
241
- 7900 train 5.359458 (lr=6.7852e-06) (hash(x)=152191414)
242
- 8000 val loss 5.5965
243
- 8000 val perplexity 269.4868
244
- 8000 train 5.674829 (lr=6.5025e-06) (hash(x)=159755587)
245
- 8100 val loss 5.5929
246
- 8100 val perplexity 268.5056
247
- 8100 train 5.618392 (lr=6.2543e-06) (hash(x)=156664468)
248
- 8200 val loss 5.5916
249
- 8200 val perplexity 268.1588
250
- 8200 train 5.440843 (lr=6.0408e-06) (hash(x)=139457379)
251
- 8300 val loss 5.5967
252
- 8300 val perplexity 269.5344
253
- 8300 train 5.369544 (lr=5.8625e-06) (hash(x)=145478564)
254
- 8400 val loss 5.5865
255
- 8400 val perplexity 266.8061
256
- 8400 train 5.525924 (lr=5.7195e-06) (hash(x)=154982769)
257
- 8500 val loss 5.5846
258
- 8500 val perplexity 266.2822
259
- 8500 train 5.552999 (lr=5.6121e-06) (hash(x)=145798118)
260
- 8600 val loss 5.5844
261
- 8600 val perplexity 266.2398
262
- 8600 train 5.494684 (lr=5.5404e-06) (hash(x)=143231551)
263
- 8700 val loss 5.5872
264
- 8700 val perplexity 266.9780
265
- 8700 train 5.486710 (lr=5.5045e-06) (hash(x)=154780112)
266
- 8749 val loss 5.5793
267
- 8749 val perplexity 264.8789
 
1
  max_steps: 8750
2
  0 val loss 11.2709
3
+ 0 val perplexity 78502.7734
4
+ 0 train 11.277069 (lr=9.0000e-08) (hash(x)=164406924)
5
+ 100 val loss 10.0557
6
+ 100 val perplexity 23287.3145
7
+ 100 train 10.019081 (lr=9.0900e-06) (hash(x)=144903932)
8
+ 200 val loss 9.4995
9
+ 200 val perplexity 13352.6445
10
+ 200 train 9.559769 (lr=1.8090e-05) (hash(x)=167734596)
11
+ 300 val loss 8.3418
12
+ 300 val perplexity 4195.8223
13
+ 300 train 8.368614 (lr=2.7090e-05) (hash(x)=149619098)
14
+ 400 val loss 7.7028
15
+ 400 val perplexity 2214.4700
16
+ 400 train 7.609691 (lr=3.6090e-05) (hash(x)=153710890)
17
+ 500 val loss 7.5150
18
+ 500 val perplexity 1835.3279
19
+ 500 train 7.606066 (lr=4.5000e-05) (hash(x)=145450636)
20
+ 600 val loss 7.3772
21
+ 600 val perplexity 1599.0978
22
+ 600 train 7.234566 (lr=4.4985e-05) (hash(x)=145249251)
23
+ 700 val loss 7.2820
24
+ 700 val perplexity 1453.9449
25
+ 700 train 7.401809 (lr=4.4941e-05) (hash(x)=150475545)
26
+ 800 val loss 7.1954
27
+ 800 val perplexity 1333.3405
28
+ 800 train 7.163744 (lr=4.4868e-05) (hash(x)=144483776)
29
+ 900 val loss 7.1004
30
+ 900 val perplexity 1212.4415
31
+ 900 train 7.208779 (lr=4.4766e-05) (hash(x)=157916369)
32
+ 1000 val loss 7.0209
33
+ 1000 val perplexity 1119.8268
34
+ 1000 train 7.043412 (lr=4.4634e-05) (hash(x)=154856891)
35
+ 1100 val loss 6.9413
36
+ 1100 val perplexity 1034.0712
37
+ 1100 train 6.648606 (lr=4.4474e-05) (hash(x)=136191502)
38
+ 1200 val loss 6.8589
39
+ 1200 val perplexity 952.3258
40
+ 1200 train 6.815243 (lr=4.4285e-05) (hash(x)=148021541)
41
+ 1300 val loss 6.7793
42
+ 1300 val perplexity 879.4661
43
+ 1300 train 6.710860 (lr=4.4068e-05) (hash(x)=146005217)
44
+ 1400 val loss 6.7046
45
+ 1400 val perplexity 816.1409
46
+ 1400 train 6.612201 (lr=4.3822e-05) (hash(x)=146019502)
47
+ 1500 val loss 6.6382
48
+ 1500 val perplexity 763.7422
49
+ 1500 train 6.578260 (lr=4.3549e-05) (hash(x)=150127281)
50
+ 1600 val loss 6.5751
51
+ 1600 val perplexity 716.9893
52
+ 1600 train 6.544516 (lr=4.3249e-05) (hash(x)=154120875)
53
+ 1700 val loss 6.5345
54
+ 1700 val perplexity 688.4567
55
+ 1700 train 6.748356 (lr=4.2922e-05) (hash(x)=155797680)
56
+ 1800 val loss 6.4693
57
+ 1800 val perplexity 645.0002
58
+ 1800 train 6.460943 (lr=4.2569e-05) (hash(x)=156809396)
59
+ 1900 val loss 6.4339
60
+ 1900 val perplexity 622.6234
61
+ 1900 train 6.314532 (lr=4.2190e-05) (hash(x)=144640294)
62
+ 2000 val loss 6.3959
63
+ 2000 val perplexity 599.3755
64
+ 2000 train 6.258166 (lr=4.1785e-05) (hash(x)=162831106)
65
+ 2100 val loss 6.3713
66
+ 2100 val perplexity 584.7957
67
+ 2100 train 6.357996 (lr=4.1356e-05) (hash(x)=158239484)
68
+ 2200 val loss 6.3303
69
+ 2200 val perplexity 561.3037
70
+ 2200 train 6.209711 (lr=4.0903e-05) (hash(x)=140504180)
71
+ 2300 val loss 6.2935
72
+ 2300 val perplexity 541.0540
73
+ 2300 train 6.283567 (lr=4.0426e-05) (hash(x)=142234024)
74
+ 2400 val loss 6.2662
75
+ 2400 val perplexity 526.4846
76
+ 2400 train 6.264317 (lr=3.9927e-05) (hash(x)=143091562)
77
+ 2500 val loss 6.2603
78
+ 2500 val perplexity 523.3643
79
+ 2500 train 6.165597 (lr=3.9406e-05) (hash(x)=149857456)
80
+ 2600 val loss 6.2165
81
+ 2600 val perplexity 500.9319
82
+ 2600 train 6.254084 (lr=3.8863e-05) (hash(x)=146191551)
83
+ 2700 val loss 6.1897
84
+ 2700 val perplexity 487.6854
85
+ 2700 train 6.109442 (lr=3.8300e-05) (hash(x)=145375752)
86
+ 2800 val loss 6.1660
87
+ 2800 val perplexity 476.2601
88
+ 2800 train 6.046856 (lr=3.7717e-05) (hash(x)=151568014)
89
+ 2900 val loss 6.1587
90
+ 2900 val perplexity 472.7968
91
+ 2900 train 5.982612 (lr=3.7116e-05) (hash(x)=149366597)
92
+ 3000 val loss 6.1218
93
+ 3000 val perplexity 455.6800
94
+ 3000 train 6.048753 (lr=3.6496e-05) (hash(x)=150464442)
95
+ 3100 val loss 6.1017
96
+ 3100 val perplexity 446.6161
97
+ 3100 train 6.231345 (lr=3.5860e-05) (hash(x)=182449036)
98
+ 3200 val loss 6.0852
99
+ 3200 val perplexity 439.3019
100
+ 3200 train 6.040099 (lr=3.5207e-05) (hash(x)=140141286)
101
+ 3300 val loss 6.0630
102
+ 3300 val perplexity 429.6579
103
+ 3300 train 6.028925 (lr=3.4539e-05) (hash(x)=148099414)
104
+ 3400 val loss 6.0475
105
+ 3400 val perplexity 423.0616
106
+ 3400 train 5.908727 (lr=3.3857e-05) (hash(x)=142633951)
107
+ 3500 val loss 6.0416
108
+ 3500 val perplexity 420.5567
109
+ 3500 train 5.899892 (lr=3.3162e-05) (hash(x)=148368965)
110
+ 3600 val loss 6.0151
111
+ 3600 val perplexity 409.5803
112
+ 3600 train 6.040414 (lr=3.2455e-05) (hash(x)=152372067)
113
+ 3700 val loss 6.0002
114
+ 3700 val perplexity 403.4992
115
+ 3700 train 6.086949 (lr=3.1736e-05) (hash(x)=168885609)
116
+ 3800 val loss 5.9895
117
+ 3800 val perplexity 399.2296
118
+ 3800 train 5.802959 (lr=3.1008e-05) (hash(x)=141633734)
119
+ 3900 val loss 5.9654
120
+ 3900 val perplexity 389.6895
121
+ 3900 train 5.856780 (lr=3.0270e-05) (hash(x)=153141007)
122
+ 4000 val loss 5.9524
123
+ 4000 val perplexity 384.6906
124
+ 4000 train 5.884473 (lr=2.9524e-05) (hash(x)=160577202)
125
+ 4100 val loss 5.9442
126
+ 4100 val perplexity 381.5477
127
+ 4100 train 6.007852 (lr=2.8771e-05) (hash(x)=153858169)
128
+ 4200 val loss 5.9247
129
+ 4200 val perplexity 374.1573
130
+ 4200 train 5.942840 (lr=2.8013e-05) (hash(x)=155889149)
131
+ 4300 val loss 5.9165
132
+ 4300 val perplexity 371.1189
133
+ 4300 train 5.729906 (lr=2.7250e-05) (hash(x)=152294662)
134
+ 4400 val loss 5.8993
135
+ 4400 val perplexity 364.7923
136
+ 4400 train 5.796800 (lr=2.6483e-05) (hash(x)=141804386)
137
+ 4500 val loss 5.8835
138
+ 4500 val perplexity 359.0594
139
+ 4500 train 5.779256 (lr=2.5714e-05) (hash(x)=151095242)
140
+ 4600 val loss 5.8884
141
+ 4600 val perplexity 360.8189
142
+ 4600 train 5.833579 (lr=2.4943e-05) (hash(x)=156414699)
143
+ 4700 val loss 5.8623
144
+ 4700 val perplexity 351.5250
145
+ 4700 train 5.885815 (lr=2.4172e-05) (hash(x)=161556686)
146
+ 4800 val loss 5.8483
147
+ 4800 val perplexity 346.6370
148
+ 4800 train 5.760074 (lr=2.3402e-05) (hash(x)=149000293)
149
+ 4900 val loss 5.8378
150
+ 4900 val perplexity 343.0105
151
+ 4900 train 5.630357 (lr=2.2633e-05) (hash(x)=154349989)
152
+ 5000 val loss 5.8264
153
+ 5000 val perplexity 339.1286
154
+ 5000 train 5.662440 (lr=2.1868e-05) (hash(x)=131475967)
155
+ 5100 val loss 5.8129
156
+ 5100 val perplexity 334.5789
157
+ 5100 train 5.719586 (lr=2.1107e-05) (hash(x)=149717902)
158
+ 5200 val loss 5.8060
159
+ 5200 val perplexity 332.2750
160
+ 5200 train 5.670409 (lr=2.0351e-05) (hash(x)=151407999)
161
+ 5300 val loss 5.7981
162
+ 5300 val perplexity 329.6660
163
+ 5300 train 5.900123 (lr=1.9602e-05) (hash(x)=168602728)
164
+ 5400 val loss 5.7835
165
+ 5400 val perplexity 324.8809
166
+ 5400 train 5.742907 (lr=1.8860e-05) (hash(x)=158344511)
167
+ 5500 val loss 5.7781
168
+ 5500 val perplexity 323.1493
169
+ 5500 train 5.736913 (lr=1.8127e-05) (hash(x)=148350057)
170
+ 5600 val loss 5.7689
171
+ 5600 val perplexity 320.1697
172
+ 5600 train 5.838607 (lr=1.7403e-05) (hash(x)=153847323)
173
+ 5700 val loss 5.7593
174
+ 5700 val perplexity 317.1218
175
+ 5700 train 5.706427 (lr=1.6690e-05) (hash(x)=156607405)
176
+ 5800 val loss 5.7527
177
+ 5800 val perplexity 315.0317
178
+ 5800 train 5.520199 (lr=1.5989e-05) (hash(x)=145115031)
179
+ 5900 val loss 5.7456
180
+ 5900 val perplexity 312.8043
181
+ 5900 train 5.628171 (lr=1.5300e-05) (hash(x)=141584622)
182
+ 6000 val loss 5.7354
183
+ 6000 val perplexity 309.6448
184
+ 6000 train 5.666881 (lr=1.4625e-05) (hash(x)=146613857)
185
+ 6100 val loss 5.7322
186
+ 6100 val perplexity 308.6477
187
+ 6100 train 5.669149 (lr=1.3965e-05) (hash(x)=144621768)
188
+ 6200 val loss 5.7294
189
+ 6200 val perplexity 307.7820
190
+ 6200 train 5.664270 (lr=1.3320e-05) (hash(x)=146521760)
191
+ 6300 val loss 5.7176
192
+ 6300 val perplexity 304.1604
193
+ 6300 train 5.547387 (lr=1.2692e-05) (hash(x)=161378136)
194
+ 6400 val loss 5.7128
195
+ 6400 val perplexity 302.7064
196
+ 6400 train 5.478628 (lr=1.2081e-05) (hash(x)=141624235)
197
+ 6500 val loss 5.7064
198
+ 6500 val perplexity 300.7927
199
+ 6500 train 5.628902 (lr=1.1489e-05) (hash(x)=151197095)
200
+ 6600 val loss 5.7043
201
+ 6600 val perplexity 300.1533
202
+ 6600 train 5.721037 (lr=1.0916e-05) (hash(x)=153269571)
203
+ 6700 val loss 5.6970
204
+ 6700 val perplexity 297.9862
205
+ 6700 train 5.661511 (lr=1.0363e-05) (hash(x)=146111181)
206
+ 6800 val loss 5.6897
207
+ 6800 val perplexity 295.8090
208
+ 6800 train 5.722671 (lr=9.8310e-06) (hash(x)=147269760)
209
+ 6900 val loss 5.6870
210
+ 6900 val perplexity 295.0048
211
+ 6900 train 5.759125 (lr=9.3205e-06) (hash(x)=152912762)
212
+ 7000 val loss 5.6839
213
+ 7000 val perplexity 294.0924
214
+ 7000 train 5.766317 (lr=8.8324e-06) (hash(x)=165412343)
215
+ 7100 val loss 5.6741
216
+ 7100 val perplexity 291.2299
217
+ 7100 train 5.744351 (lr=8.3674e-06) (hash(x)=162866028)
218
+ 7200 val loss 5.6704
219
+ 7200 val perplexity 290.1535
220
+ 7200 train 5.638232 (lr=7.9261e-06) (hash(x)=142998115)
221
+ 7300 val loss 5.6689
222
+ 7300 val perplexity 289.7291
223
+ 7300 train 5.438991 (lr=7.5093e-06) (hash(x)=145486999)
224
+ 7400 val loss 5.6625
225
+ 7400 val perplexity 287.8556
226
+ 7400 train 5.632413 (lr=7.1174e-06) (hash(x)=155325873)
227
+ 7500 val loss 5.6589
228
+ 7500 val perplexity 286.8464
229
+ 7500 train 5.621664 (lr=6.7511e-06) (hash(x)=145131256)
230
+ 7600 val loss 5.6627
231
+ 7600 val perplexity 287.9161
232
+ 7600 train 5.585990 (lr=6.4109e-06) (hash(x)=144008365)
233
+ 7700 val loss 5.6527
234
+ 7700 val perplexity 285.0522
235
+ 7700 train 5.748820 (lr=6.0972e-06) (hash(x)=148848532)
236
+ 7800 val loss 5.6501
237
+ 7800 val perplexity 284.3308
238
+ 7800 train 5.638553 (lr=5.8107e-06) (hash(x)=150391642)
239
+ 7900 val loss 5.6506
240
+ 7900 val perplexity 284.4709
241
+ 7900 train 5.408098 (lr=5.5515e-06) (hash(x)=152191414)
242
+ 8000 val loss 5.6456
243
+ 8000 val perplexity 283.0550
244
+ 8000 train 5.726780 (lr=5.3203e-06) (hash(x)=159755587)
245
+ 8100 val loss 5.6430
246
+ 8100 val perplexity 282.2966
247
+ 8100 train 5.665362 (lr=5.1172e-06) (hash(x)=156664468)
248
+ 8200 val loss 5.6412
249
+ 8200 val perplexity 281.7932
250
+ 8200 train 5.488082 (lr=4.9425e-06) (hash(x)=139457379)
251
+ 8300 val loss 5.6457
252
+ 8300 val perplexity 283.0753
253
+ 8300 train 5.412615 (lr=4.7966e-06) (hash(x)=145478564)
254
+ 8400 val loss 5.6364
255
+ 8400 val perplexity 280.4597
256
+ 8400 train 5.574693 (lr=4.6796e-06) (hash(x)=154982769)
257
+ 8500 val loss 5.6342
258
+ 8500 val perplexity 279.8371
259
+ 8500 train 5.599327 (lr=4.5917e-06) (hash(x)=145798118)
260
+ 8600 val loss 5.6341
261
+ 8600 val perplexity 279.8044
262
+ 8600 train 5.540295 (lr=4.5330e-06) (hash(x)=143231551)
263
+ 8700 val loss 5.6379
264
+ 8700 val perplexity 280.8729
265
+ 8700 train 5.537607 (lr=4.5037e-06) (hash(x)=154780112)
266
+ 8749 val loss 5.6298
267
+ 8749 val perplexity 278.5986
attention_kindselective_n_heads4_seed1340/model_08749.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a2b41aec4b5aa02ea394f501290423bcd9f8ce3f46e72eca126939a6364eda72
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d8e2b0468ad004e4a3dca37b6af504c947a49de6a4cfea81d9bb18d2376aedc4
3
  size 92843394
attention_kindselective_n_heads4_seed1340/optimizer_08749.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:77641ea9c6c521b32f7a6a6ed7c39d1f0621a3ef114f834ef2d5b923b8687f0b
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f1d9d9ebb3d1513f4cd781465ebb779d8b4ced4d2890de951f3517c540179754
3
  size 179406214