andrew-healey commited on
Commit
4af7a38
·
verified ·
1 Parent(s): 8183a90

Upload folder using huggingface_hub

Browse files
attention_kindselective_n_heads4_seed1339/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_9/attention_kindselective_n_heads4_seed1339", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_9", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1339, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 3e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "3e-5_61440_4_1339", "n_embd": 256}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_9/attention_kindselective_n_heads4_seed1339", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_9", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1339, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 5e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "5e-5_61440_4_1339", "n_embd": 256}
attention_kindselective_n_heads4_seed1339/log2.txt CHANGED
@@ -1,502 +1,49 @@
1
- max_steps: 8750
2
  max_steps: 8750
3
  0 val loss 11.2808
4
  0 val perplexity 79287.6797
5
- 0 val loss 11.2808
6
- 0 val perplexity 79287.5234
7
- 0 train 11.289339 (lr=6.0000e-08) (hash(x)=150724848)
8
- 0 train 11.289339 (lr=7.0000e-08) (hash(x)=150724848)
9
- 100 val loss 10.1249
10
- 100 val perplexity 24955.8281
11
- 100 train 10.136632 (lr=7.0700e-06) (hash(x)=149910534)
12
- 100 val loss 10.1868
13
- 100 val perplexity 26549.2832
14
- 100 train 10.199195 (lr=6.0600e-06) (hash(x)=149910534)
15
- 200 val loss 9.5679
16
- 200 val perplexity 14297.8906
17
- 200 train 9.597668 (lr=1.4070e-05) (hash(x)=148123706)
18
- 200 val loss 9.6580
19
- 200 val perplexity 15646.8330
20
- 200 train 9.688334 (lr=1.2060e-05) (hash(x)=148123706)
21
- 300 val loss 8.4851
22
- 300 val perplexity 4841.9927
23
- 300 train 8.403298 (lr=2.1070e-05) (hash(x)=146678221)
24
- 300 val loss 8.7199
25
- 300 val perplexity 6123.2988
26
- 300 train 8.652300 (lr=1.8060e-05) (hash(x)=146678221)
27
- 400 val loss 7.7157
28
- 400 val perplexity 2243.2341
29
- 400 train 7.622499 (lr=2.8070e-05) (hash(x)=151700982)
30
- 400 val loss 7.8706
31
- 400 val perplexity 2619.0610
32
- 400 train 7.789970 (lr=2.4060e-05) (hash(x)=151700982)
33
- 500 val loss 7.4807
34
- 500 val perplexity 1773.4902
35
- 500 train 7.566009 (lr=3.5000e-05) (hash(x)=156182087)
36
- 500 val loss 7.5420
37
- 500 val perplexity 1885.5305
38
- 500 train 7.629263 (lr=3.0000e-05) (hash(x)=156182087)
39
- 600 val loss 7.3659
40
- 600 val perplexity 1581.0912
41
- 600 train 7.345613 (lr=3.4989e-05) (hash(x)=149318660)
42
- 600 val loss 7.4137
43
- 600 val perplexity 1658.5233
44
- 600 train 7.403398 (lr=2.9990e-05) (hash(x)=149318660)
45
- 700 val loss 7.2842
46
- 700 val perplexity 1457.1127
47
- 700 train 7.324867 (lr=3.4954e-05) (hash(x)=150482428)
48
- 800 val loss 7.1606
49
- 800 val perplexity 1287.6595
50
- 800 train 7.034153 (lr=3.4897e-05) (hash(x)=143268605)
51
- 700 val loss 7.3358
52
- 700 val perplexity 1534.3062
53
- 700 train 7.378386 (lr=2.9961e-05) (hash(x)=150482428)
54
- 900 val loss 7.0731
55
- 900 val perplexity 1179.7480
56
- 900 train 7.036097 (lr=3.4818e-05) (hash(x)=152322423)
57
- 800 val loss 7.2311
58
- 800 val perplexity 1381.7266
59
- 800 train 7.106897 (lr=2.9912e-05) (hash(x)=143268605)
60
- 1000 val loss 7.0052
61
- 1000 val perplexity 1102.3225
62
- 1000 train 6.872244 (lr=3.4715e-05) (hash(x)=147904298)
63
- 900 val loss 7.1499
64
- 900 val perplexity 1273.9365
65
- 900 train 7.112676 (lr=2.9844e-05) (hash(x)=152322423)
66
- 1100 val loss 6.9231
67
- 1100 val perplexity 1015.4374
68
- 1100 train 7.109494 (lr=3.4591e-05) (hash(x)=154343147)
69
- 1000 val loss 7.0856
70
- 1000 val perplexity 1194.6191
71
- 1000 train 6.954570 (lr=2.9756e-05) (hash(x)=147904298)
72
- 1200 val loss 6.8335
73
- 1200 val perplexity 928.4275
74
- 1200 train 6.822800 (lr=3.4444e-05) (hash(x)=141843115)
75
- 1100 val loss 7.0176
76
- 1100 val perplexity 1116.0782
77
- 1100 train 7.204097 (lr=2.9649e-05) (hash(x)=154343147)
78
- 1300 val loss 6.7614
79
- 1300 val perplexity 863.8369
80
- 1300 train 6.647630 (lr=3.4275e-05) (hash(x)=145279030)
81
- 1200 val loss 6.9380
82
- 1200 val perplexity 1030.7321
83
- 1200 train 6.928824 (lr=2.9523e-05) (hash(x)=141843115)
84
- 1400 val loss 6.7027
85
- 1400 val perplexity 814.5911
86
- 1400 train 6.650844 (lr=3.4084e-05) (hash(x)=152507639)
87
- 1300 val loss 6.8701
88
- 1300 val perplexity 963.0128
89
- 1300 train 6.756791 (lr=2.9378e-05) (hash(x)=145279030)
90
- 1500 val loss 6.6486
91
- 1500 val perplexity 771.6702
92
- 1500 train 6.681236 (lr=3.3872e-05) (hash(x)=148473774)
93
- 1600 val loss 6.5658
94
- 1600 val perplexity 710.3974
95
- 1600 train 6.604722 (lr=3.3638e-05) (hash(x)=151117002)
96
- 1400 val loss 6.8108
97
- 1400 val perplexity 907.5634
98
- 1400 train 6.767691 (lr=2.9215e-05) (hash(x)=152507639)
99
- 1700 val loss 6.5219
100
- 1700 val perplexity 679.8840
101
- 1700 train 6.456680 (lr=3.3384e-05) (hash(x)=138011335)
102
- 1500 val loss 6.7521
103
- 1500 val perplexity 855.8281
104
- 1500 train 6.789307 (lr=2.9033e-05) (hash(x)=148473774)
105
- 1800 val loss 6.4774
106
- 1800 val perplexity 650.2515
107
- 1800 train 6.617409 (lr=3.3109e-05) (hash(x)=171180926)
108
- 1600 val loss 6.6831
109
- 1600 val perplexity 798.8001
110
- 1600 train 6.716653 (lr=2.8833e-05) (hash(x)=151117002)
111
- 1900 val loss 6.4318
112
- 1900 val perplexity 621.3072
113
- 1900 train 6.355664 (lr=3.2814e-05) (hash(x)=141769419)
114
- 1700 val loss 6.6285
115
- 1700 val perplexity 756.3746
116
- 1700 train 6.554468 (lr=2.8615e-05) (hash(x)=138011335)
117
- 2000 val loss 6.3922
118
- 2000 val perplexity 597.1834
119
- 2000 train 6.329804 (lr=3.2500e-05) (hash(x)=151963443)
120
- 1800 val loss 6.5782
121
- 1800 val perplexity 719.2757
122
- 1800 train 6.725643 (lr=2.8379e-05) (hash(x)=171180926)
123
- 2100 val loss 6.3737
124
- 2100 val perplexity 586.1983
125
- 2100 train 6.427935 (lr=3.2166e-05) (hash(x)=162947470)
126
- 1900 val loss 6.5278
127
- 1900 val perplexity 683.8735
128
- 1900 train 6.439614 (lr=2.8127e-05) (hash(x)=141769419)
129
- 2200 val loss 6.3265
130
- 2200 val perplexity 559.2034
131
- 2200 train 6.572281 (lr=3.1813e-05) (hash(x)=154954810)
132
- 2000 val loss 6.4865
133
- 2000 val perplexity 656.2449
134
- 2000 train 6.420126 (lr=2.7857e-05) (hash(x)=151963443)
135
- 2300 val loss 6.3022
136
- 2300 val perplexity 545.7751
137
- 2300 train 6.293201 (lr=3.1443e-05) (hash(x)=151878111)
138
- 2400 val loss 6.2787
139
- 2400 val perplexity 533.1205
140
- 2400 train 6.169630 (lr=3.1054e-05) (hash(x)=158661057)
141
- 2100 val loss 6.4588
142
- 2100 val perplexity 638.2729
143
- 2100 train 6.514755 (lr=2.7571e-05) (hash(x)=162947470)
144
- 2500 val loss 6.2441
145
- 2500 val perplexity 514.9452
146
- 2500 train 6.266413 (lr=3.0649e-05) (hash(x)=150925584)
147
- 2200 val loss 6.4110
148
- 2200 val perplexity 608.5148
149
- 2200 train 6.664954 (lr=2.7269e-05) (hash(x)=154954810)
150
- 2600 val loss 6.2241
151
- 2600 val perplexity 504.7674
152
- 2600 train 6.132840 (lr=3.0227e-05) (hash(x)=144515755)
153
- 2300 val loss 6.3852
154
- 2300 val perplexity 593.0313
155
- 2300 train 6.376594 (lr=2.6951e-05) (hash(x)=151878111)
156
- 2700 val loss 6.2047
157
- 2700 val perplexity 495.0883
158
- 2700 train 6.193225 (lr=2.9789e-05) (hash(x)=153109144)
159
- 2400 val loss 6.3573
160
- 2400 val perplexity 576.6909
161
- 2400 train 6.246971 (lr=2.6618e-05) (hash(x)=158661057)
162
- 2800 val loss 6.1741
163
- 2800 val perplexity 480.1690
164
- 2800 train 6.078516 (lr=2.9336e-05) (hash(x)=151152897)
165
- 2500 val loss 6.3226
166
- 2500 val perplexity 557.0036
167
- 2500 train 6.344519 (lr=2.6270e-05) (hash(x)=150925584)
168
- 2900 val loss 6.1540
169
- 2900 val perplexity 470.5836
170
- 2900 train 6.096991 (lr=2.8868e-05) (hash(x)=145800210)
171
- 2600 val loss 6.3012
172
- 2600 val perplexity 545.2101
173
- 2600 train 6.207327 (lr=2.5909e-05) (hash(x)=144515755)
174
- 3000 val loss 6.1405
175
- 3000 val perplexity 464.3056
176
- 3000 train 5.938540 (lr=2.8386e-05) (hash(x)=141997485)
177
- 2700 val loss 6.2843
178
- 2700 val perplexity 536.0944
179
- 2700 train 6.276767 (lr=2.5533e-05) (hash(x)=153109144)
180
- 3100 val loss 6.1141
181
- 3100 val perplexity 452.2033
182
- 3100 train 6.023589 (lr=2.7891e-05) (hash(x)=154049740)
183
- 3200 val loss 6.0990
184
- 3200 val perplexity 445.3922
185
- 3200 train 6.033494 (lr=2.7383e-05) (hash(x)=150471842)
186
- 2800 val loss 6.2540
187
- 2800 val perplexity 520.0851
188
- 2800 train 6.155468 (lr=2.5145e-05) (hash(x)=151152897)
189
- 3300 val loss 6.0854
190
- 3300 val perplexity 439.4073
191
- 3300 train 6.027749 (lr=2.6864e-05) (hash(x)=149048126)
192
- 2900 val loss 6.2297
193
- 2900 val perplexity 507.5872
194
- 2900 train 6.172598 (lr=2.4744e-05) (hash(x)=145800210)
195
- 3400 val loss 6.0636
196
- 3400 val perplexity 429.9261
197
- 3400 train 6.185919 (lr=2.6333e-05) (hash(x)=161261339)
198
- 3000 val loss 6.2173
199
- 3000 val perplexity 501.3599
200
- 3000 train 6.008087 (lr=2.4331e-05) (hash(x)=141997485)
201
- 3500 val loss 6.0527
202
- 3500 val perplexity 425.2686
203
- 3500 train 6.020202 (lr=2.5793e-05) (hash(x)=157495564)
204
- 3100 val loss 6.1885
205
- 3100 val perplexity 487.1272
206
- 3100 train 6.090694 (lr=2.3906e-05) (hash(x)=154049740)
207
- 3600 val loss 6.0389
208
- 3600 val perplexity 419.4500
209
- 3600 train 5.936598 (lr=2.5243e-05) (hash(x)=144352932)
210
- 3200 val loss 6.1727
211
- 3200 val perplexity 479.4807
212
- 3200 train 6.118541 (lr=2.3471e-05) (hash(x)=150471842)
213
- 3700 val loss 6.0198
214
- 3700 val perplexity 411.4905
215
- 3700 train 6.000595 (lr=2.4684e-05) (hash(x)=149389012)
216
- 3300 val loss 6.1624
217
- 3300 val perplexity 474.5893
218
- 3300 train 6.099824 (lr=2.3026e-05) (hash(x)=149048126)
219
- 3800 val loss 6.0058
220
- 3800 val perplexity 405.7742
221
- 3800 train 5.947519 (lr=2.4117e-05) (hash(x)=146607620)
222
- 3400 val loss 6.1404
223
- 3400 val perplexity 464.2498
224
- 3400 train 6.267547 (lr=2.2572e-05) (hash(x)=161261339)
225
- 3900 val loss 5.9940
226
- 3900 val perplexity 401.0047
227
- 3900 train 5.909438 (lr=2.3543e-05) (hash(x)=143754617)
228
- 3500 val loss 6.1301
229
- 3500 val perplexity 459.4873
230
- 3500 train 6.102514 (lr=2.2108e-05) (hash(x)=157495564)
231
- 4000 val loss 5.9769
232
- 4000 val perplexity 394.2093
233
- 4000 train 5.956694 (lr=2.2963e-05) (hash(x)=156930722)
234
- 4100 val loss 5.9657
235
- 4100 val perplexity 389.8218
236
- 4100 train 5.784899 (lr=2.2378e-05) (hash(x)=147500519)
237
- 3600 val loss 6.1144
238
- 3600 val perplexity 452.3314
239
- 3600 train 6.008862 (lr=2.1637e-05) (hash(x)=144352932)
240
- 4200 val loss 5.9562
241
- 4200 val perplexity 386.1222
242
- 4200 train 5.796459 (lr=2.1788e-05) (hash(x)=143232237)
243
- 3700 val loss 6.0986
244
- 3700 val perplexity 445.2134
245
- 3700 train 6.078876 (lr=2.1158e-05) (hash(x)=149389012)
246
- 4300 val loss 5.9433
247
- 4300 val perplexity 381.2018
248
- 4300 train 5.855104 (lr=2.1194e-05) (hash(x)=146811670)
249
- 3800 val loss 6.0828
250
- 3800 val perplexity 438.2579
251
- 3800 train 6.024745 (lr=2.0672e-05) (hash(x)=146607620)
252
- 4400 val loss 5.9278
253
- 4400 val perplexity 375.3206
254
- 4400 train 5.963883 (lr=2.0598e-05) (hash(x)=158418746)
255
- 3900 val loss 6.0744
256
- 3900 val perplexity 434.6102
257
- 3900 train 5.987369 (lr=2.0180e-05) (hash(x)=143754617)
258
- 4500 val loss 5.9224
259
- 4500 val perplexity 373.2968
260
- 4500 train 5.888974 (lr=1.9999e-05) (hash(x)=156695778)
261
- 4000 val loss 6.0584
262
- 4000 val perplexity 427.6898
263
- 4000 train 6.038101 (lr=1.9683e-05) (hash(x)=156930722)
264
- 4600 val loss 5.9099
265
- 4600 val perplexity 368.6750
266
- 4600 train 5.934970 (lr=1.9400e-05) (hash(x)=147791497)
267
- 4100 val loss 6.0466
268
- 4100 val perplexity 422.6849
269
- 4100 train 5.858241 (lr=1.9181e-05) (hash(x)=147500519)
270
- 4700 val loss 5.8958
271
- 4700 val perplexity 363.5157
272
- 4700 train 5.775061 (lr=1.8800e-05) (hash(x)=155533088)
273
- 4200 val loss 6.0418
274
- 4200 val perplexity 420.6553
275
- 4200 train 5.880073 (lr=1.8675e-05) (hash(x)=143232237)
276
- 4800 val loss 5.8900
277
- 4800 val perplexity 361.4218
278
- 4800 train 5.741070 (lr=1.8201e-05) (hash(x)=138350044)
279
- 4300 val loss 6.0258
280
- 4300 val perplexity 413.9633
281
- 4300 train 5.939585 (lr=1.8166e-05) (hash(x)=146811670)
282
- 4900 val loss 5.8761
283
- 4900 val perplexity 356.4004
284
- 4900 train 5.877145 (lr=1.7604e-05) (hash(x)=143735284)
285
- 5000 val loss 5.8655
286
- 5000 val perplexity 352.6599
287
- 5000 train 5.829748 (lr=1.7009e-05) (hash(x)=154976463)
288
- 4400 val loss 6.0129
289
- 4400 val perplexity 408.6630
290
- 4400 train 6.047963 (lr=1.7655e-05) (hash(x)=158418746)
291
- 5100 val loss 5.8599
292
- 5100 val perplexity 350.6802
293
- 5100 train 5.741970 (lr=1.6417e-05) (hash(x)=149894982)
294
- 4500 val loss 6.0092
295
- 4500 val perplexity 407.1400
296
- 4500 train 5.970369 (lr=1.7142e-05) (hash(x)=156695778)
297
- 5200 val loss 5.8555
298
- 5200 val perplexity 349.1558
299
- 5200 train 5.647627 (lr=1.5829e-05) (hash(x)=159326689)
300
- 4600 val loss 5.9965
301
- 4600 val perplexity 402.0312
302
- 4600 train 6.023885 (lr=1.6629e-05) (hash(x)=147791497)
303
- 5300 val loss 5.8430
304
- 5300 val perplexity 344.8275
305
- 5300 train 5.846123 (lr=1.5246e-05) (hash(x)=159484800)
306
- 4700 val loss 5.9832
307
- 4700 val perplexity 396.7080
308
- 4700 train 5.854864 (lr=1.6114e-05) (hash(x)=155533088)
309
- 5400 val loss 5.8314
310
- 5400 val perplexity 340.8238
311
- 5400 train 5.731322 (lr=1.4669e-05) (hash(x)=140385615)
312
- 4800 val loss 5.9774
313
- 4800 val perplexity 394.3975
314
- 4800 train 5.825758 (lr=1.5601e-05) (hash(x)=138350044)
315
- 5500 val loss 5.8240
316
- 5500 val perplexity 338.3344
317
- 5500 train 5.657810 (lr=1.4099e-05) (hash(x)=148498335)
318
- 4900 val loss 5.9659
319
- 4900 val perplexity 389.9207
320
- 4900 train 5.970723 (lr=1.5089e-05) (hash(x)=143735284)
321
- 5600 val loss 5.8222
322
- 5600 val perplexity 337.7233
323
- 5600 train 5.556942 (lr=1.3536e-05) (hash(x)=151907614)
324
- 5000 val loss 5.9558
325
- 5000 val perplexity 385.9966
326
- 5000 train 5.919913 (lr=1.4579e-05) (hash(x)=154976463)
327
- 5700 val loss 5.8141
328
- 5700 val perplexity 334.9905
329
- 5700 train 5.779860 (lr=1.2981e-05) (hash(x)=155192267)
330
- 5800 val loss 5.8026
331
- 5800 val perplexity 331.1440
332
- 5800 train 5.849533 (lr=1.2436e-05) (hash(x)=153132158)
333
- 5100 val loss 5.9527
334
- 5100 val perplexity 384.7841
335
- 5100 train 5.830037 (lr=1.4071e-05) (hash(x)=149894982)
336
- 5900 val loss 5.7982
337
- 5900 val perplexity 329.7009
338
- 5900 train 5.772695 (lr=1.1900e-05) (hash(x)=161446764)
339
- 5200 val loss 5.9438
340
- 5200 val perplexity 381.3849
341
- 5200 train 5.748489 (lr=1.3568e-05) (hash(x)=159326689)
342
- 6000 val loss 5.7969
343
- 6000 val perplexity 329.2916
344
- 6000 train 5.613126 (lr=1.1375e-05) (hash(x)=151512446)
345
- 5300 val loss 5.9373
346
- 5300 val perplexity 378.8938
347
- 5300 train 5.933059 (lr=1.3068e-05) (hash(x)=159484800)
348
- 6100 val loss 5.7852
349
- 6100 val perplexity 325.4444
350
- 6100 train 5.915392 (lr=1.0861e-05) (hash(x)=188094053)
351
- 5400 val loss 5.9255
352
- 5400 val perplexity 374.4638
353
- 5400 train 5.812152 (lr=1.2573e-05) (hash(x)=140385615)
354
- 6200 val loss 5.7787
355
- 6200 val perplexity 323.3293
356
- 6200 train 5.666321 (lr=1.0360e-05) (hash(x)=149389789)
357
- 5500 val loss 5.9199
358
- 5500 val perplexity 372.3792
359
- 5500 train 5.751431 (lr=1.2085e-05) (hash(x)=148498335)
360
- 6300 val loss 5.7735
361
- 6300 val perplexity 321.6672
362
- 6300 train 5.593270 (lr=9.8715e-06) (hash(x)=138212820)
363
- 5600 val loss 5.9173
364
- 5600 val perplexity 371.4073
365
- 5600 train 5.648520 (lr=1.1602e-05) (hash(x)=151907614)
366
- 6400 val loss 5.7708
367
- 6400 val perplexity 320.7969
368
- 6400 train 5.569121 (lr=9.3966e-06) (hash(x)=146535423)
369
- 5700 val loss 5.9118
370
- 5700 val perplexity 369.3690
371
- 5700 train 5.872494 (lr=1.1127e-05) (hash(x)=155192267)
372
- 6500 val loss 5.7620
373
- 6500 val perplexity 317.9776
374
- 6500 train 5.634068 (lr=8.9359e-06) (hash(x)=145950843)
375
- 6600 val loss 5.7580
376
- 6600 val perplexity 316.7046
377
- 6600 train 5.652801 (lr=8.4903e-06) (hash(x)=141162902)
378
- 5800 val loss 5.9006
379
- 5800 val perplexity 365.2532
380
- 5800 train 5.951333 (lr=1.0659e-05) (hash(x)=153132158)
381
- 6700 val loss 5.7561
382
- 6700 val perplexity 316.1148
383
- 6700 train 5.670326 (lr=8.0602e-06) (hash(x)=153018737)
384
- 5900 val loss 5.8966
385
- 5900 val perplexity 363.7935
386
- 5900 train 5.868530 (lr=1.0200e-05) (hash(x)=161446764)
387
- 6800 val loss 5.7493
388
- 6800 val perplexity 313.9696
389
- 6800 train 5.676184 (lr=7.6463e-06) (hash(x)=155640155)
390
- 6000 val loss 5.8915
391
- 6000 val perplexity 361.9300
392
- 6000 train 5.715796 (lr=9.7500e-06) (hash(x)=151512446)
393
- 6900 val loss 5.7452
394
- 6900 val perplexity 312.6940
395
- 6900 train 5.712743 (lr=7.2493e-06) (hash(x)=153722115)
396
- 6100 val loss 5.8828
397
- 6100 val perplexity 358.8126
398
- 6100 train 6.019364 (lr=9.3098e-06) (hash(x)=188094053)
399
- 7000 val loss 5.7401
400
- 7000 val perplexity 311.0814
401
- 7000 train 5.646478 (lr=6.8697e-06) (hash(x)=146953450)
402
- 6200 val loss 5.8757
403
- 6200 val perplexity 356.2753
404
- 6200 train 5.769423 (lr=8.8800e-06) (hash(x)=149389789)
405
- 7100 val loss 5.7406
406
- 7100 val perplexity 311.2574
407
- 7100 train 5.574067 (lr=6.5080e-06) (hash(x)=137663885)
408
- 6300 val loss 5.8715
409
- 6300 val perplexity 354.7726
410
- 6300 train 5.685650 (lr=8.4613e-06) (hash(x)=138212820)
411
- 7200 val loss 5.7313
412
- 7200 val perplexity 308.3727
413
- 7200 train 5.794419 (lr=6.1648e-06) (hash(x)=146172950)
414
- 6400 val loss 5.8699
415
- 6400 val perplexity 354.1973
416
- 6400 train 5.674243 (lr=8.0542e-06) (hash(x)=146535423)
417
- 7300 val loss 5.7278
418
- 7300 val perplexity 307.2869
419
- 7300 train 5.623385 (lr=5.8405e-06) (hash(x)=150018163)
420
- 6500 val loss 5.8620
421
- 6500 val perplexity 351.4325
422
- 6500 train 5.728920 (lr=7.6594e-06) (hash(x)=145950843)
423
- 7400 val loss 5.7251
424
- 7400 val perplexity 306.4714
425
- 7400 train 5.720573 (lr=5.5357e-06) (hash(x)=145351166)
426
- 7500 val loss 5.7261
427
- 7500 val perplexity 306.7808
428
- 7500 train 5.509435 (lr=5.2508e-06) (hash(x)=145292116)
429
- 6600 val loss 5.8585
430
- 6600 val perplexity 350.2053
431
- 6600 train 5.750317 (lr=7.2774e-06) (hash(x)=141162902)
432
- 7600 val loss 5.7189
433
- 7600 val perplexity 304.5720
434
- 7600 train 5.639998 (lr=4.9862e-06) (hash(x)=150235132)
435
- 6700 val loss 5.8564
436
- 6700 val perplexity 349.4663
437
- 6700 train 5.760009 (lr=6.9087e-06) (hash(x)=153018737)
438
- 7700 val loss 5.7165
439
- 7700 val perplexity 303.8320
440
- 7700 train 5.625147 (lr=4.7423e-06) (hash(x)=154543455)
441
- 6800 val loss 5.8492
442
- 6800 val perplexity 346.9674
443
- 6800 train 5.780306 (lr=6.5540e-06) (hash(x)=155640155)
444
- 7800 val loss 5.7141
445
- 7800 val perplexity 303.1216
446
- 7800 train 5.606357 (lr=4.5194e-06) (hash(x)=142456852)
447
- 6900 val loss 5.8453
448
- 6900 val perplexity 345.5936
449
- 6900 train 5.812240 (lr=6.2137e-06) (hash(x)=153722115)
450
- 7900 val loss 5.7148
451
- 7900 val perplexity 303.3319
452
- 7900 train 5.495040 (lr=4.3179e-06) (hash(x)=147363479)
453
- 7000 val loss 5.8411
454
- 7000 val perplexity 344.1611
455
- 7000 train 5.742386 (lr=5.8883e-06) (hash(x)=146953450)
456
- 8000 val loss 5.7078
457
- 8000 val perplexity 301.2154
458
- 8000 train 5.742621 (lr=4.1380e-06) (hash(x)=156122973)
459
- 7100 val loss 5.8403
460
- 7100 val perplexity 343.8915
461
- 7100 train 5.669028 (lr=5.5783e-06) (hash(x)=137663885)
462
- 8100 val loss 5.7063
463
- 8100 val perplexity 300.7574
464
- 8100 train 5.747419 (lr=3.9800e-06) (hash(x)=156153179)
465
- 7200 val loss 5.8320
466
- 7200 val perplexity 341.0402
467
- 7200 train 5.886730 (lr=5.2841e-06) (hash(x)=146172950)
468
- 8200 val loss 5.7053
469
- 8200 val perplexity 300.4537
470
- 8200 train 5.748656 (lr=3.8442e-06) (hash(x)=146430698)
471
- 8300 val loss 5.7037
472
- 8300 val perplexity 299.9784
473
- 8300 train 5.569663 (lr=3.7307e-06) (hash(x)=143507257)
474
- 7300 val loss 5.8294
475
- 7300 val perplexity 340.1391
476
- 7300 train 5.728073 (lr=5.0062e-06) (hash(x)=150018163)
477
- 8400 val loss 5.6993
478
- 8400 val perplexity 298.6586
479
- 8400 train 5.759250 (lr=3.6397e-06) (hash(x)=166272643)
480
- 7400 val loss 5.8271
481
- 7400 val perplexity 339.3649
482
- 7400 train 5.802515 (lr=4.7449e-06) (hash(x)=145351166)
483
- 8500 val loss 5.6977
484
- 8500 val perplexity 298.1721
485
- 8500 train 5.598984 (lr=3.5713e-06) (hash(x)=143887848)
486
- 7500 val loss 5.8267
487
- 7500 val perplexity 339.2248
488
- 7500 train 5.605633 (lr=4.5007e-06) (hash(x)=145292116)
489
- 8600 val loss 5.6975
490
- 8600 val perplexity 298.1066
491
- 8600 train 5.740758 (lr=3.5257e-06) (hash(x)=156900341)
492
- 7600 val loss 5.8205
493
- 7600 val perplexity 337.1429
494
- 7600 train 5.732724 (lr=4.2739e-06) (hash(x)=150235132)
495
- 8700 val loss 5.6928
496
- 8700 val perplexity 296.7115
497
- 8700 train 5.918503 (lr=3.5029e-06) (hash(x)=146417632)
498
- 8749 val loss 5.6925
499
- 8749 val perplexity 296.6473
500
- 7700 val loss 5.8191
501
- 7700 val perplexity 336.6639
502
- 7700 train 5.747572 (lr=4.0648e-06) (hash(x)=154543455)
 
1
+ 8000 train 5.839575 (lr=3.5468e-06) (hash(x)=156122973)
2
  max_steps: 8750
3
  0 val loss 11.2808
4
  0 val perplexity 79287.6797
5
+ 8100 val loss 5.8084
6
+ 8100 val perplexity 333.0969
7
+ 8100 train 5.844770 (lr=3.4114e-06) (hash(x)=156153179)
8
+ 0 train 11.289339 (lr=1.0000e-07) (hash(x)=150724848)
9
+ 100 val loss 10.0102
10
+ 100 val perplexity 22253.1465
11
+ 100 train 10.025309 (lr=1.0100e-05) (hash(x)=149910534)
12
+ 8200 val loss 5.8078
13
+ 8200 val perplexity 332.8854
14
+ 8200 train 5.844475 (lr=3.2950e-06) (hash(x)=146430698)
15
+ 200 val loss 9.2383
16
+ 200 val perplexity 10283.3877
17
+ 200 train 9.265762 (lr=2.0100e-05) (hash(x)=148123706)
18
+ 8300 val loss 5.8067
19
+ 8300 val perplexity 332.5358
20
+ 8300 train 5.676077 (lr=3.1977e-06) (hash(x)=143507257)
21
+ 300 val loss 8.0321
22
+ 300 val perplexity 3078.1367
23
+ 300 train 7.902669 (lr=3.0100e-05) (hash(x)=146678221)
24
+ 8400 val loss 5.8027
25
+ 8400 val perplexity 331.1894
26
+ 8400 train 5.896213 (lr=3.1197e-06) (hash(x)=166272643)
27
+ 400 val loss 7.5622
28
+ 400 val perplexity 1924.0914
29
+ 400 train 7.451694 (lr=4.0100e-05) (hash(x)=151700982)
30
+ 8500 val loss 5.8010
31
+ 8500 val perplexity 330.6212
32
+ 8500 train 5.702298 (lr=3.0611e-06) (hash(x)=143887848)
33
+ 500 val loss 7.4032
34
+ 500 val perplexity 1641.3018
35
+ 500 train 7.482969 (lr=5.0000e-05) (hash(x)=156182087)
36
+ 8600 val loss 5.8008
37
+ 8600 val perplexity 330.5657
38
+ 8600 train 5.850898 (lr=3.0220e-06) (hash(x)=156900341)
39
+ 600 val loss 7.2893
40
+ 600 val perplexity 1464.6086
41
+ 600 train 7.262670 (lr=4.9984e-05) (hash(x)=149318660)
42
+ 8700 val loss 5.7967
43
+ 8700 val perplexity 329.2034
44
+ 8700 train 6.024566 (lr=3.0024e-06) (hash(x)=146417632)
45
+ 700 val loss 7.1942
46
+ 700 val perplexity 1331.6840
47
+ 700 train 7.235372 (lr=4.9935e-05) (hash(x)=150482428)
48
+ 8749 val loss 5.7958
49
+ 8749 val perplexity 328.9303
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
attention_kindselective_n_heads4_seed1339/model_08749.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5f9f086e5bc9016de004cf97e0e1fe7e0a7ef5565d49ea076873db395a988fe1
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5e246a3164bacf50817c15a67bd6b599ff7acc11ae461b713148076d24a4299
3
  size 92843394
attention_kindselective_n_heads4_seed1339/optimizer_08749.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:22c2cc84339b65454afc89bf4f0daaa707e2a4500376081f0b13ded666f36934
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ee398ac385f1f29570d770fc19076827ace193d5ebf660ef5f349bc78d93979
3
  size 179406214