andrew-healey commited on
Commit
4023451
·
verified ·
1 Parent(s): 2e7a9ec

Upload folder using huggingface_hub

Browse files
attention_kindselective_n_heads4_seed1338/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_4/attention_kindselective_n_heads4_seed1338", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10000, "warmup_steps": 200, "group": "wider_is_better_4", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1338, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 30720, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 5e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "0.5e-4_30720_4_1338", "n_embd": 256}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_4/attention_kindselective_n_heads4_seed1338", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10000, "warmup_steps": 200, "group": "wider_is_better_4", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1338, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 30720, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 3e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "3e-5_30720_4_1338", "n_embd": 256}
attention_kindselective_n_heads4_seed1338/log2.txt CHANGED
@@ -1,303 +1,303 @@
1
  max_steps: 10000
2
  0 val loss 11.2663
3
  0 val perplexity 78147.0000
4
- 0 train 11.263341 (lr=2.5000e-07) (hash(x)=150327452)
5
- 100 val loss 9.7680
6
- 100 val perplexity 17465.8770
7
- 100 train 9.776854 (lr=2.5250e-05) (hash(x)=166441190)
8
- 200 val loss 8.2656
9
- 200 val perplexity 3887.9541
10
- 200 train 8.404812 (lr=5.0000e-05) (hash(x)=166780046)
11
- 300 val loss 7.6075
12
- 300 val perplexity 2013.3032
13
- 300 train 7.561357 (lr=4.9988e-05) (hash(x)=159835303)
14
- 400 val loss 7.5006
15
- 400 val perplexity 1809.1592
16
- 400 train 7.274863 (lr=4.9954e-05) (hash(x)=155040610)
17
- 500 val loss 7.4506
18
- 500 val perplexity 1720.8308
19
- 500 train 7.180302 (lr=4.9896e-05) (hash(x)=130190460)
20
- 600 val loss 7.3842
21
- 600 val perplexity 1610.4052
22
- 600 train 7.403039 (lr=4.9815e-05) (hash(x)=155504036)
23
- 700 val loss 7.3113
24
- 700 val perplexity 1497.1669
25
- 700 train 7.167824 (lr=4.9712e-05) (hash(x)=137347213)
26
- 800 val loss 7.2409
27
- 800 val perplexity 1395.3639
28
- 800 train 7.086001 (lr=4.9585e-05) (hash(x)=143823248)
29
- 900 val loss 7.1756
30
- 900 val perplexity 1307.1094
31
- 900 train 7.283320 (lr=4.9436e-05) (hash(x)=156260416)
32
- 1000 val loss 7.1145
33
- 1000 val perplexity 1229.6963
34
- 1000 train 7.071252 (lr=4.9264e-05) (hash(x)=143734685)
35
- 1100 val loss 7.0581
36
- 1100 val perplexity 1162.2678
37
- 1100 train 6.978105 (lr=4.9070e-05) (hash(x)=160013925)
38
- 1200 val loss 7.0159
39
- 1200 val perplexity 1114.1937
40
- 1200 train 6.744773 (lr=4.8854e-05) (hash(x)=150678249)
41
- 1300 val loss 6.9693
42
- 1300 val perplexity 1063.5088
43
- 1300 train 6.734881 (lr=4.8616e-05) (hash(x)=149073315)
44
- 1400 val loss 6.9083
45
- 1400 val perplexity 1000.5272
46
- 1400 train 7.205764 (lr=4.8356e-05) (hash(x)=175802021)
47
- 1500 val loss 6.8426
48
- 1500 val perplexity 936.9531
49
- 1500 train 7.109816 (lr=4.8074e-05) (hash(x)=171034639)
50
- 1600 val loss 6.7799
51
- 1600 val perplexity 880.0223
52
- 1600 train 6.994730 (lr=4.7772e-05) (hash(x)=158681215)
53
- 1700 val loss 6.7242
54
- 1700 val perplexity 832.2708
55
- 1700 train 6.673022 (lr=4.7448e-05) (hash(x)=152116061)
56
- 1800 val loss 6.6782
57
- 1800 val perplexity 794.9266
58
- 1800 train 6.616482 (lr=4.7105e-05) (hash(x)=146108145)
59
- 1900 val loss 6.6421
60
- 1900 val perplexity 766.7340
61
- 1900 train 6.473023 (lr=4.6741e-05) (hash(x)=147598108)
62
- 2000 val loss 6.6130
63
- 2000 val perplexity 744.6790
64
- 2000 train 6.446514 (lr=4.6357e-05) (hash(x)=154996086)
65
- 2100 val loss 6.5858
66
- 2100 val perplexity 724.7159
67
- 2100 train 6.348539 (lr=4.5954e-05) (hash(x)=153396183)
68
- 2200 val loss 6.5170
69
- 2200 val perplexity 676.5494
70
- 2200 train 6.519897 (lr=4.5532e-05) (hash(x)=153885445)
71
- 2300 val loss 6.4963
72
- 2300 val perplexity 662.7046
73
- 2300 train 6.508783 (lr=4.5091e-05) (hash(x)=159666385)
74
- 2400 val loss 6.4541
75
- 2400 val perplexity 635.2918
76
- 2400 train 6.450172 (lr=4.4633e-05) (hash(x)=142353087)
77
- 2500 val loss 6.4255
78
- 2500 val perplexity 617.4177
79
- 2500 train 6.436049 (lr=4.4156e-05) (hash(x)=146491718)
80
- 2600 val loss 6.3965
81
- 2600 val perplexity 599.7672
82
- 2600 train 6.324273 (lr=4.3663e-05) (hash(x)=150750353)
83
- 2700 val loss 6.3785
84
- 2700 val perplexity 589.0278
85
- 2700 train 6.106255 (lr=4.3153e-05) (hash(x)=129849193)
86
- 2800 val loss 6.3688
87
- 2800 val perplexity 583.3700
88
- 2800 train 6.194311 (lr=4.2627e-05) (hash(x)=152767913)
89
- 2900 val loss 6.3537
90
- 2900 val perplexity 574.6100
91
- 2900 train 6.117503 (lr=4.2085e-05) (hash(x)=146531140)
92
- 3000 val loss 6.3201
93
- 3000 val perplexity 555.6428
94
- 3000 train 6.412408 (lr=4.1529e-05) (hash(x)=151562048)
95
- 3100 val loss 6.2970
96
- 3100 val perplexity 542.9517
97
- 3100 train 6.253851 (lr=4.0957e-05) (hash(x)=146001424)
98
- 3200 val loss 6.2832
99
- 3200 val perplexity 535.4943
100
- 3200 train 6.354994 (lr=4.0373e-05) (hash(x)=166486165)
101
- 3300 val loss 6.2554
102
- 3300 val perplexity 520.8416
103
- 3300 train 6.189920 (lr=3.9775e-05) (hash(x)=150866680)
104
- 3400 val loss 6.2483
105
- 3400 val perplexity 517.1379
106
- 3400 train 6.144484 (lr=3.9164e-05) (hash(x)=143900419)
107
- 3500 val loss 6.2245
108
- 3500 val perplexity 504.9602
109
- 3500 train 6.042019 (lr=3.8541e-05) (hash(x)=148845794)
110
- 3600 val loss 6.2137
111
- 3600 val perplexity 499.5549
112
- 3600 train 6.040150 (lr=3.7907e-05) (hash(x)=145667796)
113
- 3700 val loss 6.2002
114
- 3700 val perplexity 492.8700
115
- 3700 train 6.218114 (lr=3.7262e-05) (hash(x)=163563851)
116
- 3800 val loss 6.1895
117
- 3800 val perplexity 487.6252
118
- 3800 train 6.143688 (lr=3.6608e-05) (hash(x)=147488689)
119
- 3900 val loss 6.1622
120
- 3900 val perplexity 474.4812
121
- 3900 train 6.172974 (lr=3.5944e-05) (hash(x)=148186608)
122
- 4000 val loss 6.1476
123
- 4000 val perplexity 467.6026
124
- 4000 train 6.033812 (lr=3.5271e-05) (hash(x)=142970187)
125
- 4100 val loss 6.1300
126
- 4100 val perplexity 459.4391
127
- 4100 train 6.274983 (lr=3.4590e-05) (hash(x)=141584883)
128
- 4200 val loss 6.1217
129
- 4200 val perplexity 455.6507
130
- 4200 train 5.920924 (lr=3.3902e-05) (hash(x)=145664585)
131
- 4300 val loss 6.1084
132
- 4300 val perplexity 449.6054
133
- 4300 train 5.988526 (lr=3.3207e-05) (hash(x)=143736499)
134
- 4400 val loss 6.1028
135
- 4400 val perplexity 447.1300
136
- 4400 train 5.933807 (lr=3.2507e-05) (hash(x)=151883322)
137
- 4500 val loss 6.0935
138
- 4500 val perplexity 442.9734
139
- 4500 train 5.971932 (lr=3.1801e-05) (hash(x)=153904871)
140
- 4600 val loss 6.0849
141
- 4600 val perplexity 439.1603
142
- 4600 train 6.182971 (lr=3.1091e-05) (hash(x)=154893521)
143
- 4700 val loss 6.0605
144
- 4700 val perplexity 428.5926
145
- 4700 train 6.105700 (lr=3.0377e-05) (hash(x)=152323949)
146
- 4800 val loss 6.0517
147
- 4800 val perplexity 424.8422
148
- 4800 train 6.006094 (lr=2.9661e-05) (hash(x)=154104619)
149
- 4900 val loss 6.0391
150
- 4900 val perplexity 419.5204
151
- 4900 train 6.183090 (lr=2.8942e-05) (hash(x)=146311426)
152
- 5000 val loss 6.0307
153
- 5000 val perplexity 415.9913
154
- 5000 train 6.007878 (lr=2.8221e-05) (hash(x)=156741847)
155
- 5100 val loss 6.0244
156
- 5100 val perplexity 413.4094
157
- 5100 train 5.806924 (lr=2.7500e-05) (hash(x)=142086346)
158
- 5200 val loss 6.0194
159
- 5200 val perplexity 411.3459
160
- 5200 train 5.789549 (lr=2.6779e-05) (hash(x)=150265428)
161
- 5300 val loss 6.0178
162
- 5300 val perplexity 410.6676
163
- 5300 train 5.877178 (lr=2.6058e-05) (hash(x)=151339108)
164
- 5400 val loss 6.0044
165
- 5400 val perplexity 405.2266
166
- 5400 train 5.971837 (lr=2.5339e-05) (hash(x)=154654372)
167
- 5500 val loss 5.9877
168
- 5500 val perplexity 398.4772
169
- 5500 train 6.004115 (lr=2.4623e-05) (hash(x)=150575051)
170
- 5600 val loss 5.9764
171
- 5600 val perplexity 394.0124
172
- 5600 train 5.889816 (lr=2.3909e-05) (hash(x)=140396423)
173
- 5700 val loss 5.9682
174
- 5700 val perplexity 390.8166
175
- 5700 train 5.814392 (lr=2.3199e-05) (hash(x)=144678758)
176
- 5800 val loss 5.9581
177
- 5800 val perplexity 386.8771
178
- 5800 train 6.018045 (lr=2.2493e-05) (hash(x)=151992743)
179
- 5900 val loss 5.9556
180
- 5900 val perplexity 385.8925
181
- 5900 train 5.750163 (lr=2.1793e-05) (hash(x)=144396927)
182
- 6000 val loss 5.9481
183
- 6000 val perplexity 383.0098
184
- 6000 train 5.898988 (lr=2.1098e-05) (hash(x)=165478625)
185
- 6100 val loss 5.9473
186
- 6100 val perplexity 382.7315
187
- 6100 train 5.683533 (lr=2.0410e-05) (hash(x)=147088621)
188
- 6200 val loss 5.9423
189
- 6200 val perplexity 380.8150
190
- 6200 train 5.709328 (lr=1.9729e-05) (hash(x)=140794994)
191
- 6300 val loss 5.9301
192
- 6300 val perplexity 376.2013
193
- 6300 train 5.876333 (lr=1.9056e-05) (hash(x)=134780906)
194
- 6400 val loss 5.9171
195
- 6400 val perplexity 371.3382
196
- 6400 train 5.900079 (lr=1.8392e-05) (hash(x)=149023655)
197
- 6500 val loss 5.9116
198
- 6500 val perplexity 369.3044
199
- 6500 train 5.767069 (lr=1.7738e-05) (hash(x)=147497796)
200
- 6600 val loss 5.9113
201
- 6600 val perplexity 369.1790
202
- 6600 train 5.760261 (lr=1.7093e-05) (hash(x)=152902689)
203
- 6700 val loss 5.9061
204
- 6700 val perplexity 367.2793
205
- 6700 train 5.944985 (lr=1.6459e-05) (hash(x)=153846046)
206
- 6800 val loss 5.8929
207
- 6800 val perplexity 362.4490
208
- 6800 train 6.026623 (lr=1.5836e-05) (hash(x)=158512738)
209
- 6900 val loss 5.8884
210
- 6900 val perplexity 360.8391
211
- 6900 train 6.427971 (lr=1.5225e-05) (hash(x)=156849968)
212
- 7000 val loss 5.8888
213
- 7000 val perplexity 360.9831
214
- 7000 train 5.743077 (lr=1.4627e-05) (hash(x)=142395855)
215
- 7100 val loss 5.8795
216
- 7100 val perplexity 357.6370
217
- 7100 train 5.722335 (lr=1.4043e-05) (hash(x)=147114884)
218
- 7200 val loss 5.8715
219
- 7200 val perplexity 354.7682
220
- 7200 train 5.895081 (lr=1.3471e-05) (hash(x)=156979839)
221
- 7300 val loss 5.8697
222
- 7300 val perplexity 354.1501
223
- 7300 train 5.670859 (lr=1.2915e-05) (hash(x)=145584373)
224
- 7400 val loss 5.8667
225
- 7400 val perplexity 353.0789
226
- 7400 train 5.616560 (lr=1.2373e-05) (hash(x)=141508204)
227
- 7500 val loss 5.8605
228
- 7500 val perplexity 350.8938
229
- 7500 train 5.897530 (lr=1.1847e-05) (hash(x)=148803965)
230
- 7600 val loss 5.8527
231
- 7600 val perplexity 348.1857
232
- 7600 train 5.919757 (lr=1.1337e-05) (hash(x)=151019676)
233
- 7700 val loss 5.8463
234
- 7700 val perplexity 345.9438
235
- 7700 train 5.939406 (lr=1.0844e-05) (hash(x)=143155750)
236
- 7800 val loss 5.8467
237
- 7800 val perplexity 346.0931
238
- 7800 train 5.885753 (lr=1.0367e-05) (hash(x)=152569653)
239
- 7900 val loss 5.8394
240
- 7900 val perplexity 343.5809
241
- 7900 train 5.719753 (lr=9.9088e-06) (hash(x)=143519455)
242
- 8000 val loss 5.8384
243
- 8000 val perplexity 343.2177
244
- 8000 train 5.945567 (lr=9.4682e-06) (hash(x)=161180944)
245
- 8100 val loss 5.8374
246
- 8100 val perplexity 342.8810
247
- 8100 train 5.860254 (lr=9.0461e-06) (hash(x)=154107345)
248
- 8200 val loss 5.8361
249
- 8200 val perplexity 342.4522
250
- 8200 train 5.871372 (lr=8.6430e-06) (hash(x)=152486517)
251
- 8300 val loss 5.8277
252
- 8300 val perplexity 339.5885
253
- 8300 train 5.832849 (lr=8.2593e-06) (hash(x)=156167749)
254
- 8400 val loss 5.8224
255
- 8400 val perplexity 337.7906
256
- 8400 train 5.911955 (lr=7.8953e-06) (hash(x)=149155006)
257
- 8500 val loss 5.8189
258
- 8500 val perplexity 336.6066
259
- 8500 train 5.774584 (lr=7.5515e-06) (hash(x)=147844390)
260
- 8600 val loss 5.8161
261
- 8600 val perplexity 335.6637
262
- 8600 train 6.070689 (lr=7.2282e-06) (hash(x)=165753320)
263
- 8700 val loss 5.8162
264
- 8700 val perplexity 335.7093
265
- 8700 train 5.592045 (lr=6.9257e-06) (hash(x)=146079979)
266
- 8800 val loss 5.8135
267
- 8800 val perplexity 334.8026
268
- 8800 train 6.002615 (lr=6.6444e-06) (hash(x)=172259509)
269
- 8900 val loss 5.8130
270
- 8900 val perplexity 334.6322
271
- 8900 train 5.497927 (lr=6.3845e-06) (hash(x)=145148314)
272
- 9000 val loss 5.8122
273
- 9000 val perplexity 334.3413
274
- 9000 train 5.735715 (lr=6.1462e-06) (hash(x)=144250633)
275
- 9100 val loss 5.8077
276
- 9100 val perplexity 332.8481
277
- 9100 train 5.904010 (lr=5.9300e-06) (hash(x)=157219797)
278
- 9200 val loss 5.8026
279
- 9200 val perplexity 331.1507
280
- 9200 train 5.841455 (lr=5.7359e-06) (hash(x)=142743778)
281
- 9300 val loss 5.8006
282
- 9300 val perplexity 330.4972
283
- 9300 train 5.726264 (lr=5.5641e-06) (hash(x)=139669771)
284
- 9400 val loss 5.7973
285
- 9400 val perplexity 329.4224
286
- 9400 train 5.843239 (lr=5.4149e-06) (hash(x)=145916843)
287
- 9500 val loss 5.7967
288
- 9500 val perplexity 329.2191
289
- 9500 train 5.718668 (lr=5.2884e-06) (hash(x)=150196125)
290
- 9600 val loss 5.7954
291
- 9600 val perplexity 328.7936
292
- 9600 train 6.106845 (lr=5.1847e-06) (hash(x)=160041419)
293
- 9700 val loss 5.7950
294
- 9700 val perplexity 328.6423
295
- 9700 train 5.582028 (lr=5.1040e-06) (hash(x)=139931627)
296
- 9800 val loss 5.7942
297
- 9800 val perplexity 328.3821
298
- 9800 train 5.775959 (lr=5.0462e-06) (hash(x)=150370792)
299
- 9900 val loss 5.7914
300
- 9900 val perplexity 327.4780
301
- 9900 train 5.631240 (lr=5.0116e-06) (hash(x)=153014886)
302
- 9999 val loss 5.7907
303
- 9999 val perplexity 327.2414
 
1
  max_steps: 10000
2
  0 val loss 11.2663
3
  0 val perplexity 78147.0000
4
+ 0 train 11.263341 (lr=1.5000e-07) (hash(x)=150327452)
5
+ 100 val loss 9.9038
6
+ 100 val perplexity 20006.5938
7
+ 100 train 9.914461 (lr=1.5150e-05) (hash(x)=166441190)
8
+ 200 val loss 8.9665
9
+ 200 val perplexity 7836.3984
10
+ 200 train 9.079846 (lr=3.0000e-05) (hash(x)=166780046)
11
+ 300 val loss 8.0400
12
+ 300 val perplexity 3102.7432
13
+ 300 train 7.989579 (lr=2.9993e-05) (hash(x)=159835303)
14
+ 400 val loss 7.7330
15
+ 400 val perplexity 2282.3311
16
+ 400 train 7.532744 (lr=2.9972e-05) (hash(x)=155040610)
17
+ 500 val loss 7.6145
18
+ 500 val perplexity 2027.4534
19
+ 500 train 7.370183 (lr=2.9938e-05) (hash(x)=130190460)
20
+ 600 val loss 7.5275
21
+ 600 val perplexity 1858.4928
22
+ 600 train 7.548462 (lr=2.9889e-05) (hash(x)=155504036)
23
+ 700 val loss 7.4642
24
+ 700 val perplexity 1744.3755
25
+ 700 train 7.321239 (lr=2.9827e-05) (hash(x)=137347213)
26
+ 800 val loss 7.4136
27
+ 800 val perplexity 1658.3201
28
+ 800 train 7.269616 (lr=2.9751e-05) (hash(x)=143823248)
29
+ 900 val loss 7.3688
30
+ 900 val perplexity 1585.6975
31
+ 900 train 7.486793 (lr=2.9662e-05) (hash(x)=156260416)
32
+ 1000 val loss 7.3444
33
+ 1000 val perplexity 1547.5503
34
+ 1000 train 7.314602 (lr=2.9558e-05) (hash(x)=143734685)
35
+ 1100 val loss 7.3103
36
+ 1100 val perplexity 1495.6113
37
+ 1100 train 7.249287 (lr=2.9442e-05) (hash(x)=160013925)
38
+ 1200 val loss 7.2809
39
+ 1200 val perplexity 1452.2426
40
+ 1200 train 7.030381 (lr=2.9312e-05) (hash(x)=150678249)
41
+ 1300 val loss 7.2499
42
+ 1300 val perplexity 1407.9894
43
+ 1300 train 7.035954 (lr=2.9169e-05) (hash(x)=149073315)
44
+ 1400 val loss 7.1976
45
+ 1400 val perplexity 1336.1791
46
+ 1400 train 7.560995 (lr=2.9013e-05) (hash(x)=175802021)
47
+ 1500 val loss 7.1596
48
+ 1500 val perplexity 1286.3782
49
+ 1500 train 7.414792 (lr=2.8845e-05) (hash(x)=171034639)
50
+ 1600 val loss 7.1100
51
+ 1600 val perplexity 1224.1091
52
+ 1600 train 7.318711 (lr=2.8663e-05) (hash(x)=158681215)
53
+ 1700 val loss 7.0716
54
+ 1700 val perplexity 1178.0487
55
+ 1700 train 7.011314 (lr=2.8469e-05) (hash(x)=152116061)
56
+ 1800 val loss 7.0428
57
+ 1800 val perplexity 1144.6343
58
+ 1800 train 6.981174 (lr=2.8263e-05) (hash(x)=146108145)
59
+ 1900 val loss 7.0107
60
+ 1900 val perplexity 1108.4087
61
+ 1900 train 6.845355 (lr=2.8044e-05) (hash(x)=147598108)
62
+ 2000 val loss 6.9936
63
+ 2000 val perplexity 1089.6827
64
+ 2000 train 6.852312 (lr=2.7814e-05) (hash(x)=154996086)
65
+ 2100 val loss 6.9603
66
+ 2100 val perplexity 1053.9778
67
+ 2100 train 6.747998 (lr=2.7572e-05) (hash(x)=153396183)
68
+ 2200 val loss 6.9095
69
+ 2200 val perplexity 1001.7344
70
+ 2200 train 6.897384 (lr=2.7319e-05) (hash(x)=153885445)
71
+ 2300 val loss 6.8842
72
+ 2300 val perplexity 976.7466
73
+ 2300 train 6.883799 (lr=2.7055e-05) (hash(x)=159666385)
74
+ 2400 val loss 6.8381
75
+ 2400 val perplexity 932.6805
76
+ 2400 train 6.814809 (lr=2.6780e-05) (hash(x)=142353087)
77
+ 2500 val loss 6.8053
78
+ 2500 val perplexity 902.6398
79
+ 2500 train 6.818474 (lr=2.6494e-05) (hash(x)=146491718)
80
+ 2600 val loss 6.7792
81
+ 2600 val perplexity 879.3629
82
+ 2600 train 6.706415 (lr=2.6198e-05) (hash(x)=150750353)
83
+ 2700 val loss 6.7435
84
+ 2700 val perplexity 848.5612
85
+ 2700 train 6.450558 (lr=2.5892e-05) (hash(x)=129849193)
86
+ 2800 val loss 6.7261
87
+ 2800 val perplexity 833.9042
88
+ 2800 train 6.579156 (lr=2.5576e-05) (hash(x)=152767913)
89
+ 2900 val loss 6.6977
90
+ 2900 val perplexity 810.5140
91
+ 2900 train 6.479834 (lr=2.5251e-05) (hash(x)=146531140)
92
+ 3000 val loss 6.6654
93
+ 3000 val perplexity 784.7470
94
+ 3000 train 6.737319 (lr=2.4917e-05) (hash(x)=151562048)
95
+ 3100 val loss 6.6276
96
+ 3100 val perplexity 755.6806
97
+ 3100 train 6.601431 (lr=2.4574e-05) (hash(x)=146001424)
98
+ 3200 val loss 6.6050
99
+ 3200 val perplexity 738.7577
100
+ 3200 train 6.661371 (lr=2.4224e-05) (hash(x)=166486165)
101
+ 3300 val loss 6.5734
102
+ 3300 val perplexity 715.8162
103
+ 3300 train 6.504717 (lr=2.3865e-05) (hash(x)=150866680)
104
+ 3400 val loss 6.5584
105
+ 3400 val perplexity 705.1649
106
+ 3400 train 6.459944 (lr=2.3498e-05) (hash(x)=143900419)
107
+ 3500 val loss 6.5325
108
+ 3500 val perplexity 687.0813
109
+ 3500 train 6.343013 (lr=2.3125e-05) (hash(x)=148845794)
110
+ 3600 val loss 6.5133
111
+ 3600 val perplexity 674.0522
112
+ 3600 train 6.354141 (lr=2.2744e-05) (hash(x)=145667796)
113
+ 3700 val loss 6.4988
114
+ 3700 val perplexity 664.3641
115
+ 3700 train 6.519285 (lr=2.2357e-05) (hash(x)=163563851)
116
+ 3800 val loss 6.4812
117
+ 3800 val perplexity 652.7576
118
+ 3800 train 6.425587 (lr=2.1965e-05) (hash(x)=147488689)
119
+ 3900 val loss 6.4513
120
+ 3900 val perplexity 633.5565
121
+ 3900 train 6.449866 (lr=2.1566e-05) (hash(x)=148186608)
122
+ 4000 val loss 6.4286
123
+ 4000 val perplexity 619.2852
124
+ 4000 train 6.293235 (lr=2.1162e-05) (hash(x)=142970187)
125
+ 4100 val loss 6.4098
126
+ 4100 val perplexity 607.7936
127
+ 4100 train 6.482577 (lr=2.0754e-05) (hash(x)=141584883)
128
+ 4200 val loss 6.3939
129
+ 4200 val perplexity 598.1957
130
+ 4200 train 6.192812 (lr=2.0341e-05) (hash(x)=145664585)
131
+ 4300 val loss 6.3816
132
+ 4300 val perplexity 590.8762
133
+ 4300 train 6.243621 (lr=1.9924e-05) (hash(x)=143736499)
134
+ 4400 val loss 6.3786
135
+ 4400 val perplexity 589.0906
136
+ 4400 train 6.205530 (lr=1.9504e-05) (hash(x)=151883322)
137
+ 4500 val loss 6.3650
138
+ 4500 val perplexity 581.1219
139
+ 4500 train 6.248657 (lr=1.9081e-05) (hash(x)=153904871)
140
+ 4600 val loss 6.3499
141
+ 4600 val perplexity 572.4113
142
+ 4600 train 6.429447 (lr=1.8655e-05) (hash(x)=154893521)
143
+ 4700 val loss 6.3303
144
+ 4700 val perplexity 561.3388
145
+ 4700 train 6.389878 (lr=1.8226e-05) (hash(x)=152323949)
146
+ 4800 val loss 6.3137
147
+ 4800 val perplexity 552.0685
148
+ 4800 train 6.247524 (lr=1.7796e-05) (hash(x)=154104619)
149
+ 4900 val loss 6.3041
150
+ 4900 val perplexity 546.8019
151
+ 4900 train 6.426771 (lr=1.7365e-05) (hash(x)=146311426)
152
+ 5000 val loss 6.2913
153
+ 5000 val perplexity 539.8301
154
+ 5000 train 6.296994 (lr=1.6933e-05) (hash(x)=156741847)
155
+ 5100 val loss 6.2820
156
+ 5100 val perplexity 534.8316
157
+ 5100 train 6.057541 (lr=1.6500e-05) (hash(x)=142086346)
158
+ 5200 val loss 6.2774
159
+ 5200 val perplexity 532.4290
160
+ 5200 train 6.050554 (lr=1.6067e-05) (hash(x)=150265428)
161
+ 5300 val loss 6.2747
162
+ 5300 val perplexity 530.9745
163
+ 5300 train 6.163742 (lr=1.5635e-05) (hash(x)=151339108)
164
+ 5400 val loss 6.2586
165
+ 5400 val perplexity 522.4918
166
+ 5400 train 6.242078 (lr=1.5204e-05) (hash(x)=154654372)
167
+ 5500 val loss 6.2442
168
+ 5500 val perplexity 515.0331
169
+ 5500 train 6.253026 (lr=1.4774e-05) (hash(x)=150575051)
170
+ 5600 val loss 6.2343
171
+ 5600 val perplexity 509.9215
172
+ 5600 train 6.134394 (lr=1.4345e-05) (hash(x)=140396423)
173
+ 5700 val loss 6.2225
174
+ 5700 val perplexity 503.9442
175
+ 5700 train 6.086608 (lr=1.3919e-05) (hash(x)=144678758)
176
+ 5800 val loss 6.2157
177
+ 5800 val perplexity 500.5370
178
+ 5800 train 6.277523 (lr=1.3496e-05) (hash(x)=151992743)
179
+ 5900 val loss 6.2089
180
+ 5900 val perplexity 497.1764
181
+ 5900 train 5.998001 (lr=1.3076e-05) (hash(x)=144396927)
182
+ 6000 val loss 6.2031
183
+ 6000 val perplexity 494.2646
184
+ 6000 train 6.175484 (lr=1.2659e-05) (hash(x)=165478625)
185
+ 6100 val loss 6.2044
186
+ 6100 val perplexity 494.9267
187
+ 6100 train 5.943242 (lr=1.2246e-05) (hash(x)=147088621)
188
+ 6200 val loss 6.1952
189
+ 6200 val perplexity 490.3867
190
+ 6200 train 5.959939 (lr=1.1838e-05) (hash(x)=140794994)
191
+ 6300 val loss 6.1857
192
+ 6300 val perplexity 485.7651
193
+ 6300 train 6.112713 (lr=1.1434e-05) (hash(x)=134780906)
194
+ 6400 val loss 6.1726
195
+ 6400 val perplexity 479.4460
196
+ 6400 train 6.158383 (lr=1.1035e-05) (hash(x)=149023655)
197
+ 6500 val loss 6.1676
198
+ 6500 val perplexity 477.0429
199
+ 6500 train 6.019427 (lr=1.0643e-05) (hash(x)=147497796)
200
+ 6600 val loss 6.1663
201
+ 6600 val perplexity 476.4064
202
+ 6600 train 6.022311 (lr=1.0256e-05) (hash(x)=152902689)
203
+ 6700 val loss 6.1598
204
+ 6700 val perplexity 473.3375
205
+ 6700 train 6.199059 (lr=9.8753e-06) (hash(x)=153846046)
206
+ 6800 val loss 6.1487
207
+ 6800 val perplexity 468.0990
208
+ 6800 train 6.287767 (lr=9.5017e-06) (hash(x)=158512738)
209
+ 6900 val loss 6.1438
210
+ 6900 val perplexity 465.8420
211
+ 6900 train 6.667380 (lr=9.1353e-06) (hash(x)=156849968)
212
+ 7000 val loss 6.1404
213
+ 7000 val perplexity 464.2478
214
+ 7000 train 5.972945 (lr=8.7764e-06) (hash(x)=142395855)
215
+ 7100 val loss 6.1347
216
+ 7100 val perplexity 461.6042
217
+ 7100 train 5.991301 (lr=8.4255e-06) (hash(x)=147114884)
218
+ 7200 val loss 6.1303
219
+ 7200 val perplexity 459.5841
220
+ 7200 train 6.150269 (lr=8.0829e-06) (hash(x)=156979839)
221
+ 7300 val loss 6.1279
222
+ 7300 val perplexity 458.4534
223
+ 7300 train 5.902767 (lr=7.7489e-06) (hash(x)=145584373)
224
+ 7400 val loss 6.1273
225
+ 7400 val perplexity 458.1935
226
+ 7400 train 5.868430 (lr=7.4239e-06) (hash(x)=141508204)
227
+ 7500 val loss 6.1193
228
+ 7500 val perplexity 454.5416
229
+ 7500 train 6.157444 (lr=7.1083e-06) (hash(x)=148803965)
230
+ 7600 val loss 6.1136
231
+ 7600 val perplexity 451.9759
232
+ 7600 train 6.178225 (lr=6.8023e-06) (hash(x)=151019676)
233
+ 7700 val loss 6.1085
234
+ 7700 val perplexity 449.6451
235
+ 7700 train 6.151628 (lr=6.5062e-06) (hash(x)=143155750)
236
+ 7800 val loss 6.1058
237
+ 7800 val perplexity 448.4541
238
+ 7800 train 6.123940 (lr=6.2205e-06) (hash(x)=152569653)
239
+ 7900 val loss 6.1027
240
+ 7900 val perplexity 447.0435
241
+ 7900 train 5.969548 (lr=5.9453e-06) (hash(x)=143519455)
242
+ 8000 val loss 6.1011
243
+ 8000 val perplexity 446.3461
244
+ 8000 train 6.265728 (lr=5.6809e-06) (hash(x)=161180944)
245
+ 8100 val loss 6.1003
246
+ 8100 val perplexity 445.9847
247
+ 8100 train 6.132887 (lr=5.4277e-06) (hash(x)=154107345)
248
+ 8200 val loss 6.1007
249
+ 8200 val perplexity 446.1861
250
+ 8200 train 6.124985 (lr=5.1858e-06) (hash(x)=152486517)
251
+ 8300 val loss 6.0919
252
+ 8300 val perplexity 442.2632
253
+ 8300 train 6.113282 (lr=4.9556e-06) (hash(x)=156167749)
254
+ 8400 val loss 6.0884
255
+ 8400 val perplexity 440.7003
256
+ 8400 train 6.169589 (lr=4.7372e-06) (hash(x)=149155006)
257
+ 8500 val loss 6.0843
258
+ 8500 val perplexity 438.8997
259
+ 8500 train 6.044793 (lr=4.5309e-06) (hash(x)=147844390)
260
+ 8600 val loss 6.0825
261
+ 8600 val perplexity 438.1269
262
+ 8600 train 6.348199 (lr=4.3369e-06) (hash(x)=165753320)
263
+ 8700 val loss 6.0821
264
+ 8700 val perplexity 437.9322
265
+ 8700 train 5.860587 (lr=4.1554e-06) (hash(x)=146079979)
266
+ 8800 val loss 6.0813
267
+ 8800 val perplexity 437.6174
268
+ 8800 train 6.286478 (lr=3.9866e-06) (hash(x)=172259509)
269
+ 8900 val loss 6.0796
270
+ 8900 val perplexity 436.8750
271
+ 8900 train 5.752324 (lr=3.8307e-06) (hash(x)=145148314)
272
+ 9000 val loss 6.0798
273
+ 9000 val perplexity 436.9346
274
+ 9000 train 5.990164 (lr=3.6877e-06) (hash(x)=144250633)
275
+ 9100 val loss 6.0743
276
+ 9100 val perplexity 434.5333
277
+ 9100 train 6.182425 (lr=3.5580e-06) (hash(x)=157219797)
278
+ 9200 val loss 6.0697
279
+ 9200 val perplexity 432.5594
280
+ 9200 train 6.088066 (lr=3.4415e-06) (hash(x)=142743778)
281
+ 9300 val loss 6.0686
282
+ 9300 val perplexity 432.0853
283
+ 9300 train 5.967726 (lr=3.3385e-06) (hash(x)=139669771)
284
+ 9400 val loss 6.0656
285
+ 9400 val perplexity 430.7808
286
+ 9400 train 6.073001 (lr=3.2490e-06) (hash(x)=145916843)
287
+ 9500 val loss 6.0646
288
+ 9500 val perplexity 430.3534
289
+ 9500 train 5.989102 (lr=3.1730e-06) (hash(x)=150196125)
290
+ 9600 val loss 6.0638
291
+ 9600 val perplexity 429.9952
292
+ 9600 train 6.368902 (lr=3.1108e-06) (hash(x)=160041419)
293
+ 9700 val loss 6.0641
294
+ 9700 val perplexity 430.1185
295
+ 9700 train 5.840649 (lr=3.0624e-06) (hash(x)=139931627)
296
+ 9800 val loss 6.0617
297
+ 9800 val perplexity 429.1059
298
+ 9800 train 6.026587 (lr=3.0277e-06) (hash(x)=150370792)
299
+ 9900 val loss 6.0607
300
+ 9900 val perplexity 428.6625
301
+ 9900 train 5.908560 (lr=3.0069e-06) (hash(x)=153014886)
302
+ 9999 val loss 6.0601
303
+ 9999 val perplexity 428.4363
attention_kindselective_n_heads4_seed1338/model_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5e2a3444472db35903bad54849a7d92f16bdf5058dc0028bd4e879da6753a0ee
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cfb2160933da3c9bd20cc75e7b5e3e17efe7597e8f4ceb7e2131d2db5fe2a7be
3
  size 92843394
attention_kindselective_n_heads4_seed1338/model_05000.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:07f88bee1952786bb706ac1ece6b70bb516762973ef232996aedd47bc85f5914
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d1a68456ab6dd22f9e0fa8d9d98fa5890dc9184894790e74af0dbd1de1e7eabb
3
  size 92843394
attention_kindselective_n_heads4_seed1338/model_07500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7f17cb1edd948496192f36670a2d54cb36d4c2cf4ed9460a19439cd7973ae0b0
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6ad3c1940f30f39a067179a398d7645ba2a216761c016c202a7bd2ff59ad9146
3
  size 92843394
attention_kindselective_n_heads4_seed1338/model_09999.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cee7c46ff5ee9b5bd69ce571c6fb4bc48415b90cd60598519577aaed6e18d2ed
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:48cf363ca99835cf2690a96b5b649ff230b1bd43022ac3c5cc864f3dbc4ad02b
3
  size 92843394
attention_kindselective_n_heads4_seed1338/optimizer_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f12935f73ed1859ecb001e53fec6eefe2e3bed665d566862e476437d688fa0e8
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b4b9a2b4aab41d39c6c30b36ca11463d9a9c59401a5856681c6f004e3eb2b08b
3
  size 179406214
attention_kindselective_n_heads4_seed1338/optimizer_05000.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4648c7e9d73bdd9213ee275a1fade55bd374d699c37da53d9b6dc0e019b13b0e
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:85d3d0e3997291682cfc03365e6569ad60b2cc96084e2f466df903968cb7dad4
3
  size 179406214
attention_kindselective_n_heads4_seed1338/optimizer_07500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:605cc9c9c5fb966ee5ff797f2afebe9297dae51d7992f2ff53e2af23f913a066
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6720b9370b3c247560dd03bca887c5fd03cca9f0f8935c8f0e32f2b2d0bc882
3
  size 179406214
attention_kindselective_n_heads4_seed1338/optimizer_09999.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8638fe40bfe3a42864468b040bb934f9b730edafda247fd728082ac2c391fa5a
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8f09307ada0be1dc0ce77e7e2f58b55e8fcbcd5554b4db8268178906ff5dee43
3
  size 179406214