andrew-healey commited on
Commit
e145634
·
verified ·
1 Parent(s): 4023451

Upload folder using huggingface_hub

Browse files
attention_kindselective_n_heads4_seed1340/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_4/attention_kindselective_n_heads4_seed1340", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10000, "warmup_steps": 200, "group": "wider_is_better_4", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1340, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 30720, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 5e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "0.5e-4_30720_4_1340", "n_embd": 256}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_4/attention_kindselective_n_heads4_seed1340", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10000, "warmup_steps": 200, "group": "wider_is_better_4", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1340, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 30720, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 3e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "3e-5_30720_4_1340", "n_embd": 256}
attention_kindselective_n_heads4_seed1340/log2.txt CHANGED
@@ -1,303 +1,303 @@
1
  max_steps: 10000
2
  0 val loss 11.2714
3
  0 val perplexity 78545.6094
4
- 0 train 11.276529 (lr=2.5000e-07) (hash(x)=164406924)
5
- 100 val loss 9.8154
6
- 100 val perplexity 18313.5645
7
- 100 train 9.875526 (lr=2.5250e-05) (hash(x)=177407419)
8
- 200 val loss 8.2338
9
- 200 val perplexity 3766.0889
10
- 200 train 8.183726 (lr=5.0000e-05) (hash(x)=144903932)
11
- 300 val loss 7.6691
12
- 300 val perplexity 2141.1450
13
- 300 train 7.985724 (lr=4.9988e-05) (hash(x)=173839165)
14
- 400 val loss 7.5500
15
- 400 val perplexity 1900.7639
16
- 400 train 7.632100 (lr=4.9954e-05) (hash(x)=167734596)
17
- 500 val loss 7.4711
18
- 500 val perplexity 1756.4982
19
- 500 train 7.484628 (lr=4.9896e-05) (hash(x)=153224076)
20
- 600 val loss 7.3888
21
- 600 val perplexity 1617.7747
22
- 600 train 7.289467 (lr=4.9815e-05) (hash(x)=149619098)
23
- 700 val loss 7.3274
24
- 700 val perplexity 1521.4415
25
- 700 train 7.250896 (lr=4.9712e-05) (hash(x)=146539909)
26
- 800 val loss 7.2574
27
- 800 val perplexity 1418.5006
28
- 800 train 7.213085 (lr=4.9585e-05) (hash(x)=153710890)
29
- 900 val loss 7.1994
30
- 900 val perplexity 1338.6649
31
- 900 train 7.131366 (lr=4.9436e-05) (hash(x)=155873620)
32
- 1000 val loss 7.1452
33
- 1000 val perplexity 1267.9447
34
- 1000 train 7.060977 (lr=4.9264e-05) (hash(x)=145450636)
35
- 1100 val loss 7.0898
36
- 1100 val perplexity 1199.6580
37
- 1100 train 7.176812 (lr=4.9070e-05) (hash(x)=154123388)
38
- 1200 val loss 7.0376
39
- 1200 val perplexity 1138.6163
40
- 1200 train 6.907458 (lr=4.8854e-05) (hash(x)=145249251)
41
- 1300 val loss 6.9744
42
- 1300 val perplexity 1068.9224
43
- 1300 train 6.860754 (lr=4.8616e-05) (hash(x)=148937127)
44
- 1400 val loss 6.9257
45
- 1400 val perplexity 1018.1540
46
- 1400 train 7.042790 (lr=4.8356e-05) (hash(x)=150475545)
47
- 1500 val loss 6.8788
48
- 1500 val perplexity 971.4904
49
- 1500 train 6.801968 (lr=4.8074e-05) (hash(x)=154653428)
50
- 1600 val loss 6.8310
51
- 1600 val perplexity 926.0797
52
- 1600 train 6.806830 (lr=4.7772e-05) (hash(x)=144483776)
53
- 1700 val loss 6.7804
54
- 1700 val perplexity 880.4584
55
- 1700 train 6.921985 (lr=4.7448e-05) (hash(x)=157395496)
56
- 1800 val loss 6.7448
57
- 1800 val perplexity 849.6143
58
- 1800 train 6.765642 (lr=4.7105e-05) (hash(x)=157916369)
59
- 1900 val loss 6.7027
60
- 1900 val perplexity 814.5706
61
- 1900 train 6.902748 (lr=4.6741e-05) (hash(x)=166073923)
62
- 2000 val loss 6.6750
63
- 2000 val perplexity 792.3612
64
- 2000 train 6.733498 (lr=4.6357e-05) (hash(x)=154856891)
65
- 2100 val loss 6.6482
66
- 2100 val perplexity 771.3788
67
- 2100 train 6.638843 (lr=4.5954e-05) (hash(x)=151925203)
68
- 2200 val loss 6.6066
69
- 2200 val perplexity 739.9871
70
- 2200 train 6.287358 (lr=4.5532e-05) (hash(x)=136191502)
71
- 2300 val loss 6.5721
72
- 2300 val perplexity 714.8580
73
- 2300 train 6.767154 (lr=4.5091e-05) (hash(x)=153273362)
74
- 2400 val loss 6.5409
75
- 2400 val perplexity 692.8893
76
- 2400 train 6.494888 (lr=4.4633e-05) (hash(x)=148021541)
77
- 2500 val loss 6.5148
78
- 2500 val perplexity 675.0336
79
- 2500 train 6.460410 (lr=4.4156e-05) (hash(x)=141356608)
80
- 2600 val loss 6.4847
81
- 2600 val perplexity 655.0107
82
- 2600 train 6.427143 (lr=4.3663e-05) (hash(x)=146005217)
83
- 2700 val loss 6.4599
84
- 2700 val perplexity 638.9852
85
- 2700 train 6.306155 (lr=4.3153e-05) (hash(x)=144511718)
86
- 2800 val loss 6.4346
87
- 2800 val perplexity 623.0291
88
- 2800 train 6.313002 (lr=4.2627e-05) (hash(x)=146019502)
89
- 2900 val loss 6.4083
90
- 2900 val perplexity 606.8544
91
- 2900 train 6.329792 (lr=4.2085e-05) (hash(x)=146496200)
92
- 3000 val loss 6.4059
93
- 3000 val perplexity 605.3804
94
- 3000 train 6.345868 (lr=4.1529e-05) (hash(x)=150127281)
95
- 3100 val loss 6.3632
96
- 3100 val perplexity 580.0782
97
- 3100 train 6.357021 (lr=4.0957e-05) (hash(x)=142022255)
98
- 3200 val loss 6.3417
99
- 3200 val perplexity 567.7715
100
- 3200 train 6.389840 (lr=4.0373e-05) (hash(x)=154120875)
101
- 3300 val loss 6.3309
102
- 3300 val perplexity 561.6668
103
- 3300 train 6.418671 (lr=3.9775e-05) (hash(x)=153999717)
104
- 3400 val loss 6.3119
105
- 3400 val perplexity 551.1045
106
- 3400 train 6.081605 (lr=3.9164e-05) (hash(x)=139694097)
107
- 3500 val loss 6.2892
108
- 3500 val perplexity 538.6971
109
- 3500 train 6.477514 (lr=3.8541e-05) (hash(x)=162992732)
110
- 3600 val loss 6.2635
111
- 3600 val perplexity 525.0790
112
- 3600 train 6.247020 (lr=3.7907e-05) (hash(x)=147574101)
113
- 3700 val loss 6.2520
114
- 3700 val perplexity 519.0330
115
- 3700 train 6.364397 (lr=3.7262e-05) (hash(x)=157763099)
116
- 3800 val loss 6.2303
117
- 3800 val perplexity 507.9313
118
- 3800 train 6.389091 (lr=3.6608e-05) (hash(x)=170800034)
119
- 3900 val loss 6.2183
120
- 3900 val perplexity 501.8706
121
- 3900 train 6.302568 (lr=3.5944e-05) (hash(x)=164984528)
122
- 4000 val loss 6.2018
123
- 4000 val perplexity 493.6586
124
- 4000 train 6.060319 (lr=3.5271e-05) (hash(x)=141743323)
125
- 4100 val loss 6.2016
126
- 4100 val perplexity 493.5134
127
- 4100 train 6.226849 (lr=3.4590e-05) (hash(x)=153392872)
128
- 4200 val loss 6.1929
129
- 4200 val perplexity 489.2509
130
- 4200 train 6.065385 (lr=3.3902e-05) (hash(x)=149074933)
131
- 4300 val loss 6.1750
132
- 4300 val perplexity 480.6028
133
- 4300 train 6.553733 (lr=3.3207e-05) (hash(x)=167823423)
134
- 4400 val loss 6.1533
135
- 4400 val perplexity 470.2520
136
- 4400 train 5.930851 (lr=3.2507e-05) (hash(x)=141203114)
137
- 4500 val loss 6.1365
138
- 4500 val perplexity 462.4289
139
- 4500 train 6.135710 (lr=3.1801e-05) (hash(x)=146284780)
140
- 4600 val loss 6.1188
141
- 4600 val perplexity 454.3152
142
- 4600 train 5.924687 (lr=3.1091e-05) (hash(x)=141126464)
143
- 4700 val loss 6.1100
144
- 4700 val perplexity 450.3566
145
- 4700 train 6.132822 (lr=3.0377e-05) (hash(x)=154751926)
146
- 4800 val loss 6.0998
147
- 4800 val perplexity 445.7567
148
- 4800 train 6.181572 (lr=2.9661e-05) (hash(x)=154793198)
149
- 4900 val loss 6.0956
150
- 4900 val perplexity 443.9148
151
- 4900 train 5.804508 (lr=2.8942e-05) (hash(x)=139406392)
152
- 5000 val loss 6.1043
153
- 5000 val perplexity 447.7915
154
- 5000 train 5.813259 (lr=2.8221e-05) (hash(x)=153548741)
155
- 5100 val loss 6.0828
156
- 5100 val perplexity 438.2686
157
- 5100 train 6.164180 (lr=2.7500e-05) (hash(x)=160488568)
158
- 5200 val loss 6.0660
159
- 5200 val perplexity 430.9618
160
- 5200 train 6.082884 (lr=2.6779e-05) (hash(x)=149645053)
161
- 5300 val loss 6.0438
162
- 5300 val perplexity 421.4874
163
- 5300 train 6.135334 (lr=2.6058e-05) (hash(x)=155820556)
164
- 5400 val loss 6.0364
165
- 5400 val perplexity 418.3896
166
- 5400 train 5.987836 (lr=2.5339e-05) (hash(x)=147538134)
167
- 5500 val loss 6.0267
168
- 5500 val perplexity 414.3346
169
- 5500 train 6.239408 (lr=2.4623e-05) (hash(x)=166889307)
170
- 5600 val loss 6.0201
171
- 5600 val perplexity 411.6386
172
- 5600 train 5.719359 (lr=2.3909e-05) (hash(x)=139516699)
173
- 5700 val loss 6.0218
174
- 5700 val perplexity 412.3290
175
- 5700 train 5.710136 (lr=2.3199e-05) (hash(x)=140453511)
176
- 5800 val loss 6.0138
177
- 5800 val perplexity 409.0398
178
- 5800 train 5.955937 (lr=2.2493e-05) (hash(x)=162964847)
179
- 5900 val loss 6.0018
180
- 5900 val perplexity 404.1522
181
- 5900 train 6.014284 (lr=2.1793e-05) (hash(x)=150606634)
182
- 6000 val loss 5.9860
183
- 6000 val perplexity 397.8111
184
- 6000 train 6.067945 (lr=2.1098e-05) (hash(x)=149890857)
185
- 6100 val loss 5.9842
186
- 6100 val perplexity 397.0892
187
- 6100 train 6.078321 (lr=2.0410e-05) (hash(x)=173884145)
188
- 6200 val loss 5.9740
189
- 6200 val perplexity 393.0685
190
- 6200 train 6.016133 (lr=1.9729e-05) (hash(x)=151987098)
191
- 6300 val loss 5.9673
192
- 6300 val perplexity 390.4381
193
- 6300 train 5.915937 (lr=1.9056e-05) (hash(x)=148853562)
194
- 6400 val loss 5.9593
195
- 6400 val perplexity 387.3463
196
- 6400 train 5.721579 (lr=1.8392e-05) (hash(x)=141530101)
197
- 6500 val loss 5.9553
198
- 6500 val perplexity 385.8022
199
- 6500 train 5.821025 (lr=1.7738e-05) (hash(x)=142297809)
200
- 6600 val loss 5.9475
201
- 6600 val perplexity 382.7892
202
- 6600 train 5.840808 (lr=1.7093e-05) (hash(x)=142447782)
203
- 6700 val loss 5.9435
204
- 6700 val perplexity 381.2514
205
- 6700 train 5.940805 (lr=1.6459e-05) (hash(x)=147004686)
206
- 6800 val loss 5.9352
207
- 6800 val perplexity 378.1024
208
- 6800 train 5.679741 (lr=1.5836e-05) (hash(x)=133438702)
209
- 6900 val loss 5.9317
210
- 6900 val perplexity 376.8032
211
- 6900 train 5.951016 (lr=1.5225e-05) (hash(x)=157085143)
212
- 7000 val loss 5.9286
213
- 7000 val perplexity 375.6203
214
- 7000 train 5.805225 (lr=1.4627e-05) (hash(x)=139437666)
215
- 7100 val loss 5.9237
216
- 7100 val perplexity 373.7811
217
- 7100 train 5.940050 (lr=1.4043e-05) (hash(x)=159792986)
218
- 7200 val loss 5.9126
219
- 7200 val perplexity 369.6597
220
- 7200 train 5.891582 (lr=1.3471e-05) (hash(x)=144930687)
221
- 7300 val loss 5.9092
222
- 7300 val perplexity 368.4073
223
- 7300 train 5.956987 (lr=1.2915e-05) (hash(x)=156242690)
224
- 7400 val loss 5.9077
225
- 7400 val perplexity 367.8558
226
- 7400 train 5.739932 (lr=1.2373e-05) (hash(x)=148183719)
227
- 7500 val loss 5.9025
228
- 7500 val perplexity 365.9624
229
- 7500 train 5.946316 (lr=1.1847e-05) (hash(x)=152494758)
230
- 7600 val loss 5.8989
231
- 7600 val perplexity 364.6480
232
- 7600 train 5.597721 (lr=1.1337e-05) (hash(x)=142485027)
233
- 7700 val loss 5.9010
234
- 7700 val perplexity 365.4037
235
- 7700 train 5.767652 (lr=1.0844e-05) (hash(x)=147512165)
236
- 7800 val loss 5.8856
237
- 7800 val perplexity 359.8116
238
- 7800 train 5.766011 (lr=1.0367e-05) (hash(x)=160346994)
239
- 7900 val loss 5.8847
240
- 7900 val perplexity 359.4823
241
- 7900 train 5.756016 (lr=9.9088e-06) (hash(x)=144488254)
242
- 8000 val loss 5.8796
243
- 8000 val perplexity 357.6544
244
- 8000 train 5.692674 (lr=9.4682e-06) (hash(x)=147637019)
245
- 8100 val loss 5.8788
246
- 8100 val perplexity 357.3631
247
- 8100 train 5.741425 (lr=9.0461e-06) (hash(x)=147340534)
248
- 8200 val loss 5.8760
249
- 8200 val perplexity 356.3931
250
- 8200 train 5.935407 (lr=8.6430e-06) (hash(x)=151630665)
251
- 8300 val loss 5.8703
252
- 8300 val perplexity 354.3390
253
- 8300 train 5.975782 (lr=8.2593e-06) (hash(x)=149747064)
254
- 8400 val loss 5.8661
255
- 8400 val perplexity 352.8584
256
- 8400 train 6.060540 (lr=7.8953e-06) (hash(x)=154245770)
257
- 8500 val loss 5.8655
258
- 8500 val perplexity 352.6601
259
- 8500 train 5.755071 (lr=7.5515e-06) (hash(x)=152559100)
260
- 8600 val loss 5.8608
261
- 8600 val perplexity 351.0195
262
- 8600 train 6.293483 (lr=7.2282e-06) (hash(x)=181365926)
263
- 8700 val loss 5.8604
264
- 8700 val perplexity 350.8503
265
- 8700 train 5.698334 (lr=6.9257e-06) (hash(x)=154405991)
266
- 8800 val loss 5.8571
267
- 8800 val perplexity 349.6920
268
- 8800 train 5.809381 (lr=6.6444e-06) (hash(x)=153755904)
269
- 8900 val loss 5.8525
270
- 8900 val perplexity 348.0933
271
- 8900 train 5.771592 (lr=6.3845e-06) (hash(x)=152120568)
272
- 9000 val loss 5.8484
273
- 9000 val perplexity 346.6963
274
- 9000 train 5.638720 (lr=6.1462e-06) (hash(x)=142797279)
275
- 9100 val loss 5.8454
276
- 9100 val perplexity 345.6317
277
- 9100 train 5.712313 (lr=5.9300e-06) (hash(x)=143037503)
278
- 9200 val loss 5.8462
279
- 9200 val perplexity 345.9006
280
- 9200 train 5.736977 (lr=5.7359e-06) (hash(x)=113690273)
281
- 9300 val loss 5.8437
282
- 9300 val perplexity 345.0645
283
- 9300 train 5.782093 (lr=5.5641e-06) (hash(x)=158025077)
284
- 9400 val loss 5.8422
285
- 9400 val perplexity 344.5193
286
- 9400 train 5.916287 (lr=5.4149e-06) (hash(x)=158251718)
287
- 9500 val loss 5.8380
288
- 9500 val perplexity 343.0774
289
- 9500 train 5.881953 (lr=5.2884e-06) (hash(x)=154752610)
290
- 9600 val loss 5.8383
291
- 9600 val perplexity 343.1870
292
- 9600 train 5.720537 (lr=5.1847e-06) (hash(x)=146889093)
293
- 9700 val loss 5.8335
294
- 9700 val perplexity 341.5587
295
- 9700 train 5.841027 (lr=5.1040e-06) (hash(x)=156906516)
296
- 9800 val loss 5.8335
297
- 9800 val perplexity 341.5619
298
- 9800 train 5.668470 (lr=5.0462e-06) (hash(x)=153841927)
299
- 9900 val loss 5.8336
300
- 9900 val perplexity 341.5966
301
- 9900 train 6.047590 (lr=5.0116e-06) (hash(x)=163514334)
302
- 9999 val loss 5.8284
303
- 9999 val perplexity 339.8243
 
1
  max_steps: 10000
2
  0 val loss 11.2714
3
  0 val perplexity 78545.6094
4
+ 0 train 11.276529 (lr=1.5000e-07) (hash(x)=164406924)
5
+ 100 val loss 9.9606
6
+ 100 val perplexity 21175.6562
7
+ 100 train 10.004577 (lr=1.5150e-05) (hash(x)=177407419)
8
+ 200 val loss 9.0320
9
+ 200 val perplexity 8366.1895
10
+ 200 train 8.999515 (lr=3.0000e-05) (hash(x)=144903932)
11
+ 300 val loss 7.8938
12
+ 300 val perplexity 2680.6653
13
+ 300 train 8.154392 (lr=2.9993e-05) (hash(x)=173839165)
14
+ 400 val loss 7.5861
15
+ 400 val perplexity 1970.6659
16
+ 400 train 7.666971 (lr=2.9972e-05) (hash(x)=167734596)
17
+ 500 val loss 7.4743
18
+ 500 val perplexity 1762.1843
19
+ 500 train 7.479569 (lr=2.9938e-05) (hash(x)=153224076)
20
+ 600 val loss 7.3984
21
+ 600 val perplexity 1633.3772
22
+ 600 train 7.299772 (lr=2.9889e-05) (hash(x)=149619098)
23
+ 700 val loss 7.3482
24
+ 700 val perplexity 1553.3265
25
+ 700 train 7.271634 (lr=2.9827e-05) (hash(x)=146539909)
26
+ 800 val loss 7.2867
27
+ 800 val perplexity 1460.7719
28
+ 800 train 7.243446 (lr=2.9751e-05) (hash(x)=153710890)
29
+ 900 val loss 7.2455
30
+ 900 val perplexity 1401.7346
31
+ 900 train 7.184447 (lr=2.9662e-05) (hash(x)=155873620)
32
+ 1000 val loss 7.1940
33
+ 1000 val perplexity 1331.4694
34
+ 1000 train 7.114730 (lr=2.9558e-05) (hash(x)=145450636)
35
+ 1100 val loss 7.1314
36
+ 1100 val perplexity 1250.6267
37
+ 1100 train 7.213695 (lr=2.9442e-05) (hash(x)=154123388)
38
+ 1200 val loss 7.0798
39
+ 1200 val perplexity 1187.7039
40
+ 1200 train 6.951880 (lr=2.9312e-05) (hash(x)=145249251)
41
+ 1300 val loss 7.0330
42
+ 1300 val perplexity 1133.4534
43
+ 1300 train 6.922292 (lr=2.9169e-05) (hash(x)=148937127)
44
+ 1400 val loss 6.9842
45
+ 1400 val perplexity 1079.3993
46
+ 1400 train 7.102539 (lr=2.9013e-05) (hash(x)=150475545)
47
+ 1500 val loss 6.9463
48
+ 1500 val perplexity 1039.3448
49
+ 1500 train 6.868868 (lr=2.8845e-05) (hash(x)=154653428)
50
+ 1600 val loss 6.9020
51
+ 1600 val perplexity 994.2404
52
+ 1600 train 6.883031 (lr=2.8663e-05) (hash(x)=144483776)
53
+ 1700 val loss 6.8505
54
+ 1700 val perplexity 944.3855
55
+ 1700 train 6.997636 (lr=2.8469e-05) (hash(x)=157395496)
56
+ 1800 val loss 6.8109
57
+ 1800 val perplexity 907.7036
58
+ 1800 train 6.835225 (lr=2.8263e-05) (hash(x)=157916369)
59
+ 1900 val loss 6.7660
60
+ 1900 val perplexity 867.8434
61
+ 1900 train 6.961428 (lr=2.8044e-05) (hash(x)=166073923)
62
+ 2000 val loss 6.7284
63
+ 2000 val perplexity 835.7856
64
+ 2000 train 6.798526 (lr=2.7814e-05) (hash(x)=154856891)
65
+ 2100 val loss 6.6863
66
+ 2100 val perplexity 801.3806
67
+ 2100 train 6.677713 (lr=2.7572e-05) (hash(x)=151925203)
68
+ 2200 val loss 6.6460
69
+ 2200 val perplexity 769.6996
70
+ 2200 train 6.342697 (lr=2.7319e-05) (hash(x)=136191502)
71
+ 2300 val loss 6.6026
72
+ 2300 val perplexity 736.9893
73
+ 2300 train 6.794070 (lr=2.7055e-05) (hash(x)=153273362)
74
+ 2400 val loss 6.5684
75
+ 2400 val perplexity 712.1960
76
+ 2400 train 6.527608 (lr=2.6780e-05) (hash(x)=148021541)
77
+ 2500 val loss 6.5452
78
+ 2500 val perplexity 695.8914
79
+ 2500 train 6.497287 (lr=2.6494e-05) (hash(x)=141356608)
80
+ 2600 val loss 6.5098
81
+ 2600 val perplexity 671.6882
82
+ 2600 train 6.449068 (lr=2.6198e-05) (hash(x)=146005217)
83
+ 2700 val loss 6.4881
84
+ 2700 val perplexity 657.2935
85
+ 2700 train 6.327260 (lr=2.5892e-05) (hash(x)=144511718)
86
+ 2800 val loss 6.4644
87
+ 2800 val perplexity 641.8793
88
+ 2800 train 6.342343 (lr=2.5576e-05) (hash(x)=146019502)
89
+ 2900 val loss 6.4402
90
+ 2900 val perplexity 626.5353
91
+ 2900 train 6.346152 (lr=2.5251e-05) (hash(x)=146496200)
92
+ 3000 val loss 6.4309
93
+ 3000 val perplexity 620.7094
94
+ 3000 train 6.369481 (lr=2.4917e-05) (hash(x)=150127281)
95
+ 3100 val loss 6.3944
96
+ 3100 val perplexity 598.4836
97
+ 3100 train 6.382371 (lr=2.4574e-05) (hash(x)=142022255)
98
+ 3200 val loss 6.3760
99
+ 3200 val perplexity 587.5679
100
+ 3200 train 6.428134 (lr=2.4224e-05) (hash(x)=154120875)
101
+ 3300 val loss 6.3658
102
+ 3300 val perplexity 581.6215
103
+ 3300 train 6.442893 (lr=2.3865e-05) (hash(x)=153999717)
104
+ 3400 val loss 6.3510
105
+ 3400 val perplexity 573.0490
106
+ 3400 train 6.126100 (lr=2.3498e-05) (hash(x)=139694097)
107
+ 3500 val loss 6.3252
108
+ 3500 val perplexity 558.4834
109
+ 3500 train 6.507070 (lr=2.3125e-05) (hash(x)=162992732)
110
+ 3600 val loss 6.3052
111
+ 3600 val perplexity 547.4195
112
+ 3600 train 6.278409 (lr=2.2744e-05) (hash(x)=147574101)
113
+ 3700 val loss 6.2949
114
+ 3700 val perplexity 541.7805
115
+ 3700 train 6.414897 (lr=2.2357e-05) (hash(x)=157763099)
116
+ 3800 val loss 6.2728
117
+ 3800 val perplexity 529.9534
118
+ 3800 train 6.433603 (lr=2.1965e-05) (hash(x)=170800034)
119
+ 3900 val loss 6.2618
120
+ 3900 val perplexity 524.1657
121
+ 3900 train 6.339255 (lr=2.1566e-05) (hash(x)=164984528)
122
+ 4000 val loss 6.2489
123
+ 4000 val perplexity 517.4280
124
+ 4000 train 6.104458 (lr=2.1162e-05) (hash(x)=141743323)
125
+ 4100 val loss 6.2473
126
+ 4100 val perplexity 516.5930
127
+ 4100 train 6.270818 (lr=2.0754e-05) (hash(x)=153392872)
128
+ 4200 val loss 6.2393
129
+ 4200 val perplexity 512.4744
130
+ 4200 train 6.103446 (lr=2.0341e-05) (hash(x)=149074933)
131
+ 4300 val loss 6.2196
132
+ 4300 val perplexity 502.5102
133
+ 4300 train 6.615050 (lr=1.9924e-05) (hash(x)=167823423)
134
+ 4400 val loss 6.2036
135
+ 4400 val perplexity 494.5025
136
+ 4400 train 5.975879 (lr=1.9504e-05) (hash(x)=141203114)
137
+ 4500 val loss 6.1913
138
+ 4500 val perplexity 488.4986
139
+ 4500 train 6.191016 (lr=1.9081e-05) (hash(x)=146284780)
140
+ 4600 val loss 6.1767
141
+ 4600 val perplexity 481.3957
142
+ 4600 train 5.980778 (lr=1.8655e-05) (hash(x)=141126464)
143
+ 4700 val loss 6.1681
144
+ 4700 val perplexity 477.2804
145
+ 4700 train 6.195337 (lr=1.8226e-05) (hash(x)=154751926)
146
+ 4800 val loss 6.1580
147
+ 4800 val perplexity 472.4835
148
+ 4800 train 6.241466 (lr=1.7796e-05) (hash(x)=154793198)
149
+ 4900 val loss 6.1548
150
+ 4900 val perplexity 470.9877
151
+ 4900 train 5.870466 (lr=1.7365e-05) (hash(x)=139406392)
152
+ 5000 val loss 6.1565
153
+ 5000 val perplexity 471.7581
154
+ 5000 train 5.880513 (lr=1.6933e-05) (hash(x)=153548741)
155
+ 5100 val loss 6.1398
156
+ 5100 val perplexity 463.9838
157
+ 5100 train 6.219454 (lr=1.6500e-05) (hash(x)=160488568)
158
+ 5200 val loss 6.1232
159
+ 5200 val perplexity 456.3128
160
+ 5200 train 6.138048 (lr=1.6067e-05) (hash(x)=149645053)
161
+ 5300 val loss 6.1116
162
+ 5300 val perplexity 451.0385
163
+ 5300 train 6.218238 (lr=1.5635e-05) (hash(x)=155820556)
164
+ 5400 val loss 6.1054
165
+ 5400 val perplexity 448.2636
166
+ 5400 train 6.051918 (lr=1.5204e-05) (hash(x)=147538134)
167
+ 5500 val loss 6.0996
168
+ 5500 val perplexity 445.6834
169
+ 5500 train 6.314559 (lr=1.4774e-05) (hash(x)=166889307)
170
+ 5600 val loss 6.0906
171
+ 5600 val perplexity 441.7078
172
+ 5600 train 5.787103 (lr=1.4345e-05) (hash(x)=139516699)
173
+ 5700 val loss 6.0892
174
+ 5700 val perplexity 441.0808
175
+ 5700 train 5.765007 (lr=1.3919e-05) (hash(x)=140453511)
176
+ 5800 val loss 6.0883
177
+ 5800 val perplexity 440.6656
178
+ 5800 train 6.035215 (lr=1.3496e-05) (hash(x)=162964847)
179
+ 5900 val loss 6.0759
180
+ 5900 val perplexity 435.2562
181
+ 5900 train 6.082222 (lr=1.3076e-05) (hash(x)=150606634)
182
+ 6000 val loss 6.0622
183
+ 6000 val perplexity 429.3382
184
+ 6000 train 6.153077 (lr=1.2659e-05) (hash(x)=149890857)
185
+ 6100 val loss 6.0595
186
+ 6100 val perplexity 428.1720
187
+ 6100 train 6.164173 (lr=1.2246e-05) (hash(x)=173884145)
188
+ 6200 val loss 6.0520
189
+ 6200 val perplexity 424.9789
190
+ 6200 train 6.085427 (lr=1.1838e-05) (hash(x)=151987098)
191
+ 6300 val loss 6.0481
192
+ 6300 val perplexity 423.3203
193
+ 6300 train 5.995059 (lr=1.1434e-05) (hash(x)=148853562)
194
+ 6400 val loss 6.0401
195
+ 6400 val perplexity 419.9156
196
+ 6400 train 5.790720 (lr=1.1035e-05) (hash(x)=141530101)
197
+ 6500 val loss 6.0382
198
+ 6500 val perplexity 419.1209
199
+ 6500 train 5.891243 (lr=1.0643e-05) (hash(x)=142297809)
200
+ 6600 val loss 6.0280
201
+ 6600 val perplexity 414.8662
202
+ 6600 train 5.915996 (lr=1.0256e-05) (hash(x)=142447782)
203
+ 6700 val loss 6.0262
204
+ 6700 val perplexity 414.1220
205
+ 6700 train 6.011028 (lr=9.8753e-06) (hash(x)=147004686)
206
+ 6800 val loss 6.0205
207
+ 6800 val perplexity 411.7659
208
+ 6800 train 5.761605 (lr=9.5017e-06) (hash(x)=133438702)
209
+ 6900 val loss 6.0187
210
+ 6900 val perplexity 411.0467
211
+ 6900 train 6.033222 (lr=9.1353e-06) (hash(x)=157085143)
212
+ 7000 val loss 6.0155
213
+ 7000 val perplexity 409.7354
214
+ 7000 train 5.893958 (lr=8.7764e-06) (hash(x)=139437666)
215
+ 7100 val loss 6.0085
216
+ 7100 val perplexity 406.8734
217
+ 7100 train 6.023874 (lr=8.4255e-06) (hash(x)=159792986)
218
+ 7200 val loss 6.0023
219
+ 7200 val perplexity 404.3719
220
+ 7200 train 5.971371 (lr=8.0829e-06) (hash(x)=144930687)
221
+ 7300 val loss 5.9993
222
+ 7300 val perplexity 403.1321
223
+ 7300 train 6.061211 (lr=7.7489e-06) (hash(x)=156242690)
224
+ 7400 val loss 5.9979
225
+ 7400 val perplexity 402.5811
226
+ 7400 train 5.837852 (lr=7.4239e-06) (hash(x)=148183719)
227
+ 7500 val loss 5.9952
228
+ 7500 val perplexity 401.4802
229
+ 7500 train 6.040481 (lr=7.1083e-06) (hash(x)=152494758)
230
+ 7600 val loss 5.9928
231
+ 7600 val perplexity 400.5148
232
+ 7600 train 5.688015 (lr=6.8023e-06) (hash(x)=142485027)
233
+ 7700 val loss 5.9925
234
+ 7700 val perplexity 400.4065
235
+ 7700 train 5.856967 (lr=6.5062e-06) (hash(x)=147512165)
236
+ 7800 val loss 5.9823
237
+ 7800 val perplexity 396.3450
238
+ 7800 train 5.875477 (lr=6.2205e-06) (hash(x)=160346994)
239
+ 7900 val loss 5.9835
240
+ 7900 val perplexity 396.8160
241
+ 7900 train 5.854673 (lr=5.9453e-06) (hash(x)=144488254)
242
+ 8000 val loss 5.9761
243
+ 8000 val perplexity 393.9162
244
+ 8000 train 5.791002 (lr=5.6809e-06) (hash(x)=147637019)
245
+ 8100 val loss 5.9770
246
+ 8100 val perplexity 394.2499
247
+ 8100 train 5.836208 (lr=5.4277e-06) (hash(x)=147340534)
248
+ 8200 val loss 5.9740
249
+ 8200 val perplexity 393.0701
250
+ 8200 train 6.029647 (lr=5.1858e-06) (hash(x)=151630665)
251
+ 8300 val loss 5.9693
252
+ 8300 val perplexity 391.2296
253
+ 8300 train 6.069899 (lr=4.9556e-06) (hash(x)=149747064)
254
+ 8400 val loss 5.9674
255
+ 8400 val perplexity 390.5003
256
+ 8400 train 6.151947 (lr=4.7372e-06) (hash(x)=154245770)
257
+ 8500 val loss 5.9659
258
+ 8500 val perplexity 389.9173
259
+ 8500 train 5.844897 (lr=4.5309e-06) (hash(x)=152559100)
260
+ 8600 val loss 5.9633
261
+ 8600 val perplexity 388.8792
262
+ 8600 train 6.405670 (lr=4.3369e-06) (hash(x)=181365926)
263
+ 8700 val loss 5.9625
264
+ 8700 val perplexity 388.5897
265
+ 8700 train 5.803471 (lr=4.1554e-06) (hash(x)=154405991)
266
+ 8800 val loss 5.9606
267
+ 8800 val perplexity 387.8566
268
+ 8800 train 5.906060 (lr=3.9866e-06) (hash(x)=153755904)
269
+ 8900 val loss 5.9565
270
+ 8900 val perplexity 386.2402
271
+ 8900 train 5.878875 (lr=3.8307e-06) (hash(x)=152120568)
272
+ 9000 val loss 5.9533
273
+ 9000 val perplexity 385.0145
274
+ 9000 train 5.743235 (lr=3.6877e-06) (hash(x)=142797279)
275
+ 9100 val loss 5.9519
276
+ 9100 val perplexity 384.4747
277
+ 9100 train 5.811658 (lr=3.5580e-06) (hash(x)=143037503)
278
+ 9200 val loss 5.9518
279
+ 9200 val perplexity 384.4454
280
+ 9200 train 5.815796 (lr=3.4415e-06) (hash(x)=113690273)
281
+ 9300 val loss 5.9511
282
+ 9300 val perplexity 384.1758
283
+ 9300 train 5.896542 (lr=3.3385e-06) (hash(x)=158025077)
284
+ 9400 val loss 5.9488
285
+ 9400 val perplexity 383.2877
286
+ 9400 train 6.020230 (lr=3.2490e-06) (hash(x)=158251718)
287
+ 9500 val loss 5.9458
288
+ 9500 val perplexity 382.1567
289
+ 9500 train 5.991383 (lr=3.1730e-06) (hash(x)=154752610)
290
+ 9600 val loss 5.9455
291
+ 9600 val perplexity 382.0358
292
+ 9600 train 5.821980 (lr=3.1108e-06) (hash(x)=146889093)
293
+ 9700 val loss 5.9428
294
+ 9700 val perplexity 380.9863
295
+ 9700 train 5.947937 (lr=3.0624e-06) (hash(x)=156906516)
296
+ 9800 val loss 5.9424
297
+ 9800 val perplexity 380.8622
298
+ 9800 train 5.783137 (lr=3.0277e-06) (hash(x)=153841927)
299
+ 9900 val loss 5.9426
300
+ 9900 val perplexity 380.9407
301
+ 9900 train 6.163361 (lr=3.0069e-06) (hash(x)=163514334)
302
+ 9999 val loss 5.9385
303
+ 9999 val perplexity 379.3569
attention_kindselective_n_heads4_seed1340/model_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:abc5e719ab2d39dd15542a62fab76a12f211bdda8cb49d1f4cc978df6d5c511c
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e81f78916f1fcd907958b2ffa161577c66b852e8fd3c0779d687fb593da6ad3b
3
  size 92843394
attention_kindselective_n_heads4_seed1340/model_05000.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4adea175cfbc2aec7fa1818cc4955862e5783f4af8eed99bc8c0c0004a21b105
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6018096758756901f3094eb569bb1ff11a3078e161f7128e635f37a4fe73d4ac
3
  size 92843394
attention_kindselective_n_heads4_seed1340/model_07500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cd26e0326526daf53111cc178c81ef9d8d0794fbec4f45b266907ee2a63100eb
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dd6e8eabd0b98757525d429e9a980b400592f2326c9b6e08461fb96801fc5fa6
3
  size 92843394
attention_kindselective_n_heads4_seed1340/model_09999.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4c18c0c2379102f3d5a0dc91c07f339598affa0059ba1b204ddcd7b59fcc2156
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0edff24b6add430f0202c2e51013b6a0300d2a177d1940ce2c18dd7623b89ca9
3
  size 92843394
attention_kindselective_n_heads4_seed1340/optimizer_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:73bd395e697f222dad88aa6e4932f1c2132ab82b4a763c51958033424856cf1d
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d15f93b84a2def82d5aafdfa962ee066ed3a7abc6563962369764cb0ef0b364b
3
  size 179406214
attention_kindselective_n_heads4_seed1340/optimizer_05000.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:dceafff09f962434445411aaf6b406fbf74c4d91675fcc6b1a4b2ad2fc0187ef
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ca694617631608a39e17abec4008d3ce4640f7e1ad9e64b3ffbd590a9a2c5aaa
3
  size 179406214
attention_kindselective_n_heads4_seed1340/optimizer_07500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:68e4b2823d6671f41998ab15b43eb99146129f535fd8bdeb27769d668f347ff7
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:25b0c8eb9d31c8f5adf2d93370de5825f34eb19beb4933795ecf5b8f04413bd9
3
  size 179406214
attention_kindselective_n_heads4_seed1340/optimizer_09999.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8e1d34305e46c99a66f795b9ae398c282c929450872c4d4310042322dc6f4748
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b235a69300a9359b79b862673bd8470faa6c26089f65fcee66976e068cc6662e
3
  size 179406214