andrew-healey commited on
Commit
4216ac6
·
verified ·
1 Parent(s): e145634

Upload folder using huggingface_hub

Browse files
attention_kindselective_n_heads4_seed1339/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_4/attention_kindselective_n_heads4_seed1339", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10000, "warmup_steps": 200, "group": "wider_is_better_4", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1339, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 30720, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 5e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "0.5e-4_30720_4_1339", "n_embd": 256}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_4/attention_kindselective_n_heads4_seed1339", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10000, "warmup_steps": 200, "group": "wider_is_better_4", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1339, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 30720, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 3e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "3e-5_30720_4_1339", "n_embd": 256}
attention_kindselective_n_heads4_seed1339/log2.txt CHANGED
@@ -1,303 +1,303 @@
1
  max_steps: 10000
2
  0 val loss 11.2810
3
  0 val perplexity 79298.4922
4
- 0 train 11.295413 (lr=2.5000e-07) (hash(x)=150724848)
5
- 100 val loss 9.7855
6
- 100 val perplexity 17773.2871
7
- 100 train 9.836302 (lr=2.5250e-05) (hash(x)=149217061)
8
- 200 val loss 8.0909
9
- 200 val perplexity 3264.4639
10
- 200 train 8.104568 (lr=5.0000e-05) (hash(x)=149910534)
11
- 300 val loss 7.6193
12
- 300 val perplexity 2037.1655
13
- 300 train 7.494644 (lr=4.9988e-05) (hash(x)=142185643)
14
- 400 val loss 7.5177
15
- 400 val perplexity 1840.3098
16
- 400 train 7.550296 (lr=4.9954e-05) (hash(x)=148123706)
17
- 500 val loss 7.4518
18
- 500 val perplexity 1722.9491
19
- 500 train 7.302660 (lr=4.9896e-05) (hash(x)=149952383)
20
- 600 val loss 7.4002
21
- 600 val perplexity 1636.2810
22
- 600 train 7.193367 (lr=4.9815e-05) (hash(x)=146678221)
23
- 700 val loss 7.2899
24
- 700 val perplexity 1465.4797
25
- 700 train 7.324069 (lr=4.9712e-05) (hash(x)=156180736)
26
- 800 val loss 7.1962
27
- 800 val perplexity 1334.2875
28
- 800 train 7.182938 (lr=4.9585e-05) (hash(x)=151700982)
29
- 900 val loss 7.1326
30
- 900 val perplexity 1252.1842
31
- 900 train 6.912815 (lr=4.9436e-05) (hash(x)=147288467)
32
- 1000 val loss 7.0566
33
- 1000 val perplexity 1160.5161
34
- 1000 train 7.103452 (lr=4.9264e-05) (hash(x)=156182087)
35
- 1100 val loss 7.0104
36
- 1100 val perplexity 1108.0488
37
- 1100 train 6.875510 (lr=4.9070e-05) (hash(x)=147861550)
38
- 1200 val loss 6.9584
39
- 1200 val perplexity 1051.9824
40
- 1200 train 6.816287 (lr=4.8854e-05) (hash(x)=149318660)
41
- 1300 val loss 6.9151
42
- 1300 val perplexity 1007.3896
43
- 1300 train 6.738190 (lr=4.8616e-05) (hash(x)=151631103)
44
- 1400 val loss 6.8571
45
- 1400 val perplexity 950.6181
46
- 1400 train 6.885700 (lr=4.8356e-05) (hash(x)=150482428)
47
- 1500 val loss 6.7695
48
- 1500 val perplexity 870.8608
49
- 1500 train 6.605503 (lr=4.8074e-05) (hash(x)=144458617)
50
- 1600 val loss 6.6986
51
- 1600 val perplexity 811.2830
52
- 1600 train 6.586299 (lr=4.7772e-05) (hash(x)=143268605)
53
- 1700 val loss 6.6478
54
- 1700 val perplexity 771.0643
55
- 1700 train 6.589181 (lr=4.7448e-05) (hash(x)=150817011)
56
- 1800 val loss 6.6028
57
- 1800 val perplexity 737.1257
58
- 1800 train 6.595220 (lr=4.7105e-05) (hash(x)=152322423)
59
- 1900 val loss 6.5588
60
- 1900 val perplexity 705.4356
61
- 1900 train 6.328050 (lr=4.6741e-05) (hash(x)=143574126)
62
- 2000 val loss 6.5234
63
- 2000 val perplexity 680.9199
64
- 2000 train 6.262884 (lr=4.6357e-05) (hash(x)=147904298)
65
- 2100 val loss 6.4996
66
- 2100 val perplexity 664.8712
67
- 2100 train 6.229161 (lr=4.5954e-05) (hash(x)=144719400)
68
- 2200 val loss 6.4638
69
- 2200 val perplexity 641.5072
70
- 2200 train 6.717620 (lr=4.5532e-05) (hash(x)=154343147)
71
- 2300 val loss 6.4179
72
- 2300 val perplexity 612.6993
73
- 2300 train 6.457769 (lr=4.5091e-05) (hash(x)=154372635)
74
- 2400 val loss 6.3894
75
- 2400 val perplexity 595.4929
76
- 2400 train 6.222154 (lr=4.4633e-05) (hash(x)=141843115)
77
- 2500 val loss 6.3550
78
- 2500 val perplexity 575.3881
79
- 2500 train 6.258588 (lr=4.4156e-05) (hash(x)=149417679)
80
- 2600 val loss 6.3298
81
- 2600 val perplexity 561.0241
82
- 2600 train 6.210278 (lr=4.3663e-05) (hash(x)=145279030)
83
- 2700 val loss 6.3072
84
- 2700 val perplexity 548.5043
85
- 2700 train 6.232386 (lr=4.3153e-05) (hash(x)=146363911)
86
- 2800 val loss 6.3028
87
- 2800 val perplexity 546.0763
88
- 2800 train 6.250120 (lr=4.2627e-05) (hash(x)=152507639)
89
- 2900 val loss 6.2756
90
- 2900 val perplexity 531.4626
91
- 2900 train 6.065167 (lr=4.2085e-05) (hash(x)=139296191)
92
- 3000 val loss 6.2584
93
- 3000 val perplexity 522.3799
94
- 3000 train 6.259926 (lr=4.1529e-05) (hash(x)=148473774)
95
- 3100 val loss 6.2200
96
- 3100 val perplexity 502.7139
97
- 3100 train 6.297199 (lr=4.0957e-05) (hash(x)=154130527)
98
- 3200 val loss 6.2027
99
- 3200 val perplexity 494.1042
100
- 3200 train 6.156054 (lr=4.0373e-05) (hash(x)=151117002)
101
- 3300 val loss 6.2222
102
- 3300 val perplexity 503.8253
103
- 3300 train 6.403162 (lr=3.9775e-05) (hash(x)=151035383)
104
- 3400 val loss 6.1724
105
- 3400 val perplexity 479.3145
106
- 3400 train 6.324723 (lr=3.9164e-05) (hash(x)=150614749)
107
- 3500 val loss 6.1556
108
- 3500 val perplexity 471.3615
109
- 3500 train 6.130349 (lr=3.8541e-05) (hash(x)=151431391)
110
- 3600 val loss 6.1415
111
- 3600 val perplexity 464.7550
112
- 3600 train 6.086444 (lr=3.7907e-05) (hash(x)=151140797)
113
- 3700 val loss 6.1257
114
- 3700 val perplexity 457.4801
115
- 3700 train 6.282626 (lr=3.7262e-05) (hash(x)=154608340)
116
- 3800 val loss 6.1110
117
- 3800 val perplexity 450.7861
118
- 3800 train 6.130027 (lr=3.6608e-05) (hash(x)=169641309)
119
- 3900 val loss 6.0976
120
- 3900 val perplexity 444.7820
121
- 3900 train 6.078045 (lr=3.5944e-05) (hash(x)=150592479)
122
- 4000 val loss 6.0864
123
- 4000 val perplexity 439.8175
124
- 4000 train 6.084772 (lr=3.5271e-05) (hash(x)=155495317)
125
- 4100 val loss 6.0752
126
- 4100 val perplexity 434.9162
127
- 4100 train 6.258116 (lr=3.4590e-05) (hash(x)=170323970)
128
- 4200 val loss 6.0694
129
- 4200 val perplexity 432.4155
130
- 4200 train 5.917346 (lr=3.3902e-05) (hash(x)=140854360)
131
- 4300 val loss 6.0450
132
- 4300 val perplexity 421.9954
133
- 4300 train 6.074870 (lr=3.3207e-05) (hash(x)=148926644)
134
- 4400 val loss 6.0332
135
- 4400 val perplexity 417.0492
136
- 4400 train 6.078179 (lr=3.2507e-05) (hash(x)=149997871)
137
- 4500 val loss 6.0181
138
- 4500 val perplexity 410.7877
139
- 4500 train 5.947857 (lr=3.1801e-05) (hash(x)=149219289)
140
- 4600 val loss 6.0075
141
- 4600 val perplexity 406.4592
142
- 4600 train 5.819511 (lr=3.1091e-05) (hash(x)=139836197)
143
- 4700 val loss 6.0009
144
- 4700 val perplexity 403.7760
145
- 4700 train 5.956812 (lr=3.0377e-05) (hash(x)=150722883)
146
- 4800 val loss 5.9937
147
- 4800 val perplexity 400.8761
148
- 4800 train 6.216718 (lr=2.9661e-05) (hash(x)=166864648)
149
- 4900 val loss 5.9782
150
- 4900 val perplexity 394.7388
151
- 4900 train 6.082317 (lr=2.8942e-05) (hash(x)=157746743)
152
- 5000 val loss 5.9667
153
- 5000 val perplexity 390.2183
154
- 5000 train 5.909554 (lr=2.8221e-05) (hash(x)=153032257)
155
- 5100 val loss 5.9598
156
- 5100 val perplexity 387.5401
157
- 5100 train 6.027834 (lr=2.7500e-05) (hash(x)=151862371)
158
- 5200 val loss 5.9436
159
- 5200 val perplexity 381.3076
160
- 5200 train 6.246490 (lr=2.6779e-05) (hash(x)=168862995)
161
- 5300 val loss 5.9406
162
- 5300 val perplexity 380.1586
163
- 5300 train 5.897988 (lr=2.6058e-05) (hash(x)=148243567)
164
- 5400 val loss 5.9296
165
- 5400 val perplexity 376.0054
166
- 5400 train 5.878036 (lr=2.5339e-05) (hash(x)=145381742)
167
- 5500 val loss 5.9223
168
- 5500 val perplexity 373.2592
169
- 5500 train 6.065330 (lr=2.4623e-05) (hash(x)=155602174)
170
- 5600 val loss 5.9051
171
- 5600 val perplexity 366.8907
172
- 5600 train 5.973073 (lr=2.3909e-05) (hash(x)=160637672)
173
- 5700 val loss 5.8967
174
- 5700 val perplexity 363.8202
175
- 5700 train 5.849224 (lr=2.3199e-05) (hash(x)=152101131)
176
- 5800 val loss 5.8853
177
- 5800 val perplexity 359.7047
178
- 5800 train 5.849683 (lr=2.2493e-05) (hash(x)=152327089)
179
- 5900 val loss 5.8840
180
- 5900 val perplexity 359.2561
181
- 5900 train 5.854714 (lr=2.1793e-05) (hash(x)=151094543)
182
- 6000 val loss 5.8786
183
- 6000 val perplexity 357.3181
184
- 6000 train 5.959620 (lr=2.1098e-05) (hash(x)=160233315)
185
- 6100 val loss 5.8675
186
- 6100 val perplexity 353.3617
187
- 6100 train 5.975004 (lr=2.0410e-05) (hash(x)=158295337)
188
- 6200 val loss 5.8583
189
- 6200 val perplexity 350.1454
190
- 6200 train 5.810185 (lr=1.9729e-05) (hash(x)=151269817)
191
- 6300 val loss 5.8503
192
- 6300 val perplexity 347.3304
193
- 6300 train 6.021831 (lr=1.9056e-05) (hash(x)=156912756)
194
- 6400 val loss 5.8446
195
- 6400 val perplexity 345.3590
196
- 6400 train 5.762765 (lr=1.8392e-05) (hash(x)=125779637)
197
- 6500 val loss 5.8434
198
- 6500 val perplexity 344.9615
199
- 6500 train 5.746417 (lr=1.7738e-05) (hash(x)=148157661)
200
- 6600 val loss 5.8375
201
- 6600 val perplexity 342.9185
202
- 6600 train 5.843159 (lr=1.7093e-05) (hash(x)=152820049)
203
- 6700 val loss 5.8293
204
- 6700 val perplexity 340.1296
205
- 6700 train 5.762090 (lr=1.6459e-05) (hash(x)=155818717)
206
- 6800 val loss 5.8213
207
- 6800 val perplexity 337.4054
208
- 6800 train 5.823565 (lr=1.5836e-05) (hash(x)=158833881)
209
- 6900 val loss 5.8178
210
- 6900 val perplexity 336.2453
211
- 6900 train 5.699900 (lr=1.5225e-05) (hash(x)=164984909)
212
- 7000 val loss 5.8136
213
- 7000 val perplexity 334.8174
214
- 7000 train 5.715562 (lr=1.4627e-05) (hash(x)=151172493)
215
- 7100 val loss 5.8125
216
- 7100 val perplexity 334.4544
217
- 7100 train 5.878971 (lr=1.4043e-05) (hash(x)=154023633)
218
- 7200 val loss 5.8041
219
- 7200 val perplexity 331.6638
220
- 7200 train 5.654086 (lr=1.3471e-05) (hash(x)=139351250)
221
- 7300 val loss 5.7993
222
- 7300 val perplexity 330.0705
223
- 7300 train 5.760409 (lr=1.2915e-05) (hash(x)=158719458)
224
- 7400 val loss 5.7935
225
- 7400 val perplexity 328.1453
226
- 7400 train 5.858016 (lr=1.2373e-05) (hash(x)=162250810)
227
- 7500 val loss 5.7854
228
- 7500 val perplexity 325.4996
229
- 7500 train 5.671573 (lr=1.1847e-05) (hash(x)=142827557)
230
- 7600 val loss 5.7819
231
- 7600 val perplexity 324.3608
232
- 7600 train 5.689194 (lr=1.1337e-05) (hash(x)=147830801)
233
- 7700 val loss 5.7770
234
- 7700 val perplexity 322.7880
235
- 7700 train 5.886009 (lr=1.0844e-05) (hash(x)=165233314)
236
- 7800 val loss 5.7758
237
- 7800 val perplexity 322.3985
238
- 7800 train 5.762441 (lr=1.0367e-05) (hash(x)=160654717)
239
- 7900 val loss 5.7727
240
- 7900 val perplexity 321.4012
241
- 7900 train 5.651993 (lr=9.9088e-06) (hash(x)=140712586)
242
- 8000 val loss 5.7648
243
- 8000 val perplexity 318.8694
244
- 8000 train 5.921185 (lr=9.4682e-06) (hash(x)=150309061)
245
- 8100 val loss 5.7624
246
- 8100 val perplexity 318.1208
247
- 8100 train 5.754895 (lr=9.0461e-06) (hash(x)=158805466)
248
- 8200 val loss 5.7590
249
- 8200 val perplexity 317.0285
250
- 8200 train 5.649700 (lr=8.6430e-06) (hash(x)=142814078)
251
- 8300 val loss 5.7564
252
- 8300 val perplexity 316.1991
253
- 8300 train 5.721898 (lr=8.2593e-06) (hash(x)=148340417)
254
- 8400 val loss 5.7526
255
- 8400 val perplexity 315.0060
256
- 8400 train 5.684734 (lr=7.8953e-06) (hash(x)=157132639)
257
- 8500 val loss 5.7524
258
- 8500 val perplexity 314.9457
259
- 8500 train 5.686018 (lr=7.5515e-06) (hash(x)=152994240)
260
- 8600 val loss 5.7467
261
- 8600 val perplexity 313.1505
262
- 8600 train 5.707251 (lr=7.2282e-06) (hash(x)=152210154)
263
- 8700 val loss 5.7439
264
- 8700 val perplexity 312.2925
265
- 8700 train 5.840783 (lr=6.9257e-06) (hash(x)=149631247)
266
- 8800 val loss 5.7405
267
- 8800 val perplexity 311.2222
268
- 8800 train 5.616950 (lr=6.6444e-06) (hash(x)=145617784)
269
- 8900 val loss 5.7388
270
- 8900 val perplexity 310.6878
271
- 8900 train 5.849643 (lr=6.3845e-06) (hash(x)=155984970)
272
- 9000 val loss 5.7392
273
- 9000 val perplexity 310.8018
274
- 9000 train 5.479745 (lr=6.1462e-06) (hash(x)=143433013)
275
- 9100 val loss 5.7369
276
- 9100 val perplexity 310.1082
277
- 9100 train 5.641852 (lr=5.9300e-06) (hash(x)=152507533)
278
- 9200 val loss 5.7322
279
- 9200 val perplexity 308.6447
280
- 9200 train 5.855296 (lr=5.7359e-06) (hash(x)=162400650)
281
- 9300 val loss 5.7293
282
- 9300 val perplexity 307.7554
283
- 9300 train 5.773619 (lr=5.5641e-06) (hash(x)=148419068)
284
- 9400 val loss 5.7281
285
- 9400 val perplexity 307.3799
286
- 9400 train 5.693515 (lr=5.4149e-06) (hash(x)=145025169)
287
- 9500 val loss 5.7252
288
- 9500 val perplexity 306.4956
289
- 9500 train 5.701646 (lr=5.2884e-06) (hash(x)=145104235)
290
- 9600 val loss 5.7255
291
- 9600 val perplexity 306.5889
292
- 9600 train 5.670694 (lr=5.1847e-06) (hash(x)=150499797)
293
- 9700 val loss 5.7246
294
- 9700 val perplexity 306.3200
295
- 9700 train 5.538178 (lr=5.1040e-06) (hash(x)=139690022)
296
- 9800 val loss 5.7195
297
- 9800 val perplexity 304.7672
298
- 9800 train 5.719058 (lr=5.0462e-06) (hash(x)=145645279)
299
- 9900 val loss 5.7182
300
- 9900 val perplexity 304.3503
301
- 9900 train 5.652002 (lr=5.0116e-06) (hash(x)=150103432)
302
- 9999 val loss 5.7170
303
- 9999 val perplexity 303.9936
 
1
  max_steps: 10000
2
  0 val loss 11.2810
3
  0 val perplexity 79298.4922
4
+ 0 train 11.295413 (lr=1.5000e-07) (hash(x)=150724848)
5
+ 100 val loss 9.9459
6
+ 100 val perplexity 20867.0703
7
+ 100 train 9.994777 (lr=1.5150e-05) (hash(x)=149217061)
8
+ 200 val loss 8.9447
9
+ 200 val perplexity 7666.8354
10
+ 200 train 8.962429 (lr=3.0000e-05) (hash(x)=149910534)
11
+ 300 val loss 8.1110
12
+ 300 val perplexity 3330.9646
13
+ 300 train 8.046171 (lr=2.9993e-05) (hash(x)=142185643)
14
+ 400 val loss 7.8325
15
+ 400 val perplexity 2521.1560
16
+ 400 train 7.894480 (lr=2.9972e-05) (hash(x)=148123706)
17
+ 500 val loss 7.6880
18
+ 500 val perplexity 2182.0981
19
+ 500 train 7.583099 (lr=2.9938e-05) (hash(x)=149952383)
20
+ 600 val loss 7.6161
21
+ 600 val perplexity 2030.5824
22
+ 600 train 7.453497 (lr=2.9889e-05) (hash(x)=146678221)
23
+ 700 val loss 7.5561
24
+ 700 val perplexity 1912.4199
25
+ 700 train 7.585546 (lr=2.9827e-05) (hash(x)=156180736)
26
+ 800 val loss 7.5147
27
+ 800 val perplexity 1834.8335
28
+ 800 train 7.512384 (lr=2.9751e-05) (hash(x)=151700982)
29
+ 900 val loss 7.4737
30
+ 900 val perplexity 1761.1957
31
+ 900 train 7.274730 (lr=2.9662e-05) (hash(x)=147288467)
32
+ 1000 val loss 7.4296
33
+ 1000 val perplexity 1685.1378
34
+ 1000 train 7.474689 (lr=2.9558e-05) (hash(x)=156182087)
35
+ 1100 val loss 7.3952
36
+ 1100 val perplexity 1628.2239
37
+ 1100 train 7.274476 (lr=2.9442e-05) (hash(x)=147861550)
38
+ 1200 val loss 7.3647
39
+ 1200 val perplexity 1579.2979
40
+ 1200 train 7.235832 (lr=2.9312e-05) (hash(x)=149318660)
41
+ 1300 val loss 7.3459
42
+ 1300 val perplexity 1549.8248
43
+ 1300 train 7.192342 (lr=2.9169e-05) (hash(x)=151631103)
44
+ 1400 val loss 7.3127
45
+ 1400 val perplexity 1499.1851
46
+ 1400 train 7.358240 (lr=2.9013e-05) (hash(x)=150482428)
47
+ 1500 val loss 7.2705
48
+ 1500 val perplexity 1437.3295
49
+ 1500 train 7.126445 (lr=2.8845e-05) (hash(x)=144458617)
50
+ 1600 val loss 7.2328
51
+ 1600 val perplexity 1384.0747
52
+ 1600 train 7.107043 (lr=2.8663e-05) (hash(x)=143268605)
53
+ 1700 val loss 7.1955
54
+ 1700 val perplexity 1333.4301
55
+ 1700 train 7.124366 (lr=2.8469e-05) (hash(x)=150817011)
56
+ 1800 val loss 7.1705
57
+ 1800 val perplexity 1300.5093
58
+ 1800 train 7.149174 (lr=2.8263e-05) (hash(x)=152322423)
59
+ 1900 val loss 7.1346
60
+ 1900 val perplexity 1254.6371
61
+ 1900 train 6.945065 (lr=2.8044e-05) (hash(x)=143574126)
62
+ 2000 val loss 7.1080
63
+ 2000 val perplexity 1221.6805
64
+ 2000 train 6.863167 (lr=2.7814e-05) (hash(x)=147904298)
65
+ 2100 val loss 7.0765
66
+ 2100 val perplexity 1183.7659
67
+ 2100 train 6.827906 (lr=2.7572e-05) (hash(x)=144719400)
68
+ 2200 val loss 7.0363
69
+ 2200 val perplexity 1137.2002
70
+ 2200 train 7.275653 (lr=2.7319e-05) (hash(x)=154343147)
71
+ 2300 val loss 6.9925
72
+ 2300 val perplexity 1088.4722
73
+ 2300 train 7.036984 (lr=2.7055e-05) (hash(x)=154372635)
74
+ 2400 val loss 6.9563
75
+ 2400 val perplexity 1049.7842
76
+ 2400 train 6.786735 (lr=2.6780e-05) (hash(x)=141843115)
77
+ 2500 val loss 6.9160
78
+ 2500 val perplexity 1008.2984
79
+ 2500 train 6.819649 (lr=2.6494e-05) (hash(x)=149417679)
80
+ 2600 val loss 6.8807
81
+ 2600 val perplexity 973.2830
82
+ 2600 train 6.748271 (lr=2.6198e-05) (hash(x)=145279030)
83
+ 2700 val loss 6.8496
84
+ 2700 val perplexity 943.5231
85
+ 2700 train 6.754533 (lr=2.5892e-05) (hash(x)=146363911)
86
+ 2800 val loss 6.8212
87
+ 2800 val perplexity 917.1302
88
+ 2800 train 6.784424 (lr=2.5576e-05) (hash(x)=152507639)
89
+ 2900 val loss 6.7822
90
+ 2900 val perplexity 882.0267
91
+ 2900 train 6.574083 (lr=2.5251e-05) (hash(x)=139296191)
92
+ 3000 val loss 6.7505
93
+ 3000 val perplexity 854.4930
94
+ 3000 train 6.765373 (lr=2.4917e-05) (hash(x)=148473774)
95
+ 3100 val loss 6.6986
96
+ 3100 val perplexity 811.3074
97
+ 3100 train 6.792789 (lr=2.4574e-05) (hash(x)=154130527)
98
+ 3200 val loss 6.6650
99
+ 3200 val perplexity 784.5016
100
+ 3200 train 6.606997 (lr=2.4224e-05) (hash(x)=151117002)
101
+ 3300 val loss 6.6496
102
+ 3300 val perplexity 772.4790
103
+ 3300 train 6.792391 (lr=2.3865e-05) (hash(x)=151035383)
104
+ 3400 val loss 6.6047
105
+ 3400 val perplexity 738.5643
106
+ 3400 train 6.713327 (lr=2.3498e-05) (hash(x)=150614749)
107
+ 3500 val loss 6.5760
108
+ 3500 val perplexity 717.6874
109
+ 3500 train 6.599133 (lr=2.3125e-05) (hash(x)=151431391)
110
+ 3600 val loss 6.5515
111
+ 3600 val perplexity 700.3228
112
+ 3600 train 6.488029 (lr=2.2744e-05) (hash(x)=151140797)
113
+ 3700 val loss 6.5274
114
+ 3700 val perplexity 683.6309
115
+ 3700 train 6.736988 (lr=2.2357e-05) (hash(x)=154608340)
116
+ 3800 val loss 6.5015
117
+ 3800 val perplexity 666.1614
118
+ 3800 train 6.522555 (lr=2.1965e-05) (hash(x)=169641309)
119
+ 3900 val loss 6.4751
120
+ 3900 val perplexity 648.7977
121
+ 3900 train 6.433761 (lr=2.1566e-05) (hash(x)=150592479)
122
+ 4000 val loss 6.4565
123
+ 4000 val perplexity 636.8508
124
+ 4000 train 6.442842 (lr=2.1162e-05) (hash(x)=155495317)
125
+ 4100 val loss 6.4460
126
+ 4100 val perplexity 630.1829
127
+ 4100 train 6.642754 (lr=2.0754e-05) (hash(x)=170323970)
128
+ 4200 val loss 6.4345
129
+ 4200 val perplexity 622.9916
130
+ 4200 train 6.259358 (lr=2.0341e-05) (hash(x)=140854360)
131
+ 4300 val loss 6.4055
132
+ 4300 val perplexity 605.1801
133
+ 4300 train 6.408278 (lr=1.9924e-05) (hash(x)=148926644)
134
+ 4400 val loss 6.3889
135
+ 4400 val perplexity 595.1989
136
+ 4400 train 6.426485 (lr=1.9504e-05) (hash(x)=149997871)
137
+ 4500 val loss 6.3722
138
+ 4500 val perplexity 585.3224
139
+ 4500 train 6.315977 (lr=1.9081e-05) (hash(x)=149219289)
140
+ 4600 val loss 6.3600
141
+ 4600 val perplexity 578.2376
142
+ 4600 train 6.160038 (lr=1.8655e-05) (hash(x)=139836197)
143
+ 4700 val loss 6.3449
144
+ 4700 val perplexity 569.5987
145
+ 4700 train 6.298043 (lr=1.8226e-05) (hash(x)=150722883)
146
+ 4800 val loss 6.3342
147
+ 4800 val perplexity 563.5283
148
+ 4800 train 6.542304 (lr=1.7796e-05) (hash(x)=166864648)
149
+ 4900 val loss 6.3154
150
+ 4900 val perplexity 553.0249
151
+ 4900 train 6.413557 (lr=1.7365e-05) (hash(x)=157746743)
152
+ 5000 val loss 6.3011
153
+ 5000 val perplexity 545.1600
154
+ 5000 train 6.251759 (lr=1.6933e-05) (hash(x)=153032257)
155
+ 5100 val loss 6.2911
156
+ 5100 val perplexity 539.7576
157
+ 5100 train 6.377411 (lr=1.6500e-05) (hash(x)=151862371)
158
+ 5200 val loss 6.2793
159
+ 5200 val perplexity 533.4402
160
+ 5200 train 6.614571 (lr=1.6067e-05) (hash(x)=168862995)
161
+ 5300 val loss 6.2744
162
+ 5300 val perplexity 530.7993
163
+ 5300 train 6.238945 (lr=1.5635e-05) (hash(x)=148243567)
164
+ 5400 val loss 6.2627
165
+ 5400 val perplexity 524.6243
166
+ 5400 train 6.191408 (lr=1.5204e-05) (hash(x)=145381742)
167
+ 5500 val loss 6.2534
168
+ 5500 val perplexity 519.7742
169
+ 5500 train 6.404634 (lr=1.4774e-05) (hash(x)=155602174)
170
+ 5600 val loss 6.2356
171
+ 5600 val perplexity 510.5886
172
+ 5600 train 6.321620 (lr=1.4345e-05) (hash(x)=160637672)
173
+ 5700 val loss 6.2257
174
+ 5700 val perplexity 505.5756
175
+ 5700 train 6.153231 (lr=1.3919e-05) (hash(x)=152101131)
176
+ 5800 val loss 6.2163
177
+ 5800 val perplexity 500.8534
178
+ 5800 train 6.172603 (lr=1.3496e-05) (hash(x)=152327089)
179
+ 5900 val loss 6.2131
180
+ 5900 val perplexity 499.2327
181
+ 5900 train 6.169562 (lr=1.3076e-05) (hash(x)=151094543)
182
+ 6000 val loss 6.2057
183
+ 6000 val perplexity 495.5671
184
+ 6000 train 6.303425 (lr=1.2659e-05) (hash(x)=160233315)
185
+ 6100 val loss 6.1932
186
+ 6100 val perplexity 489.3962
187
+ 6100 train 6.290517 (lr=1.2246e-05) (hash(x)=158295337)
188
+ 6200 val loss 6.1835
189
+ 6200 val perplexity 484.6647
190
+ 6200 train 6.134736 (lr=1.1838e-05) (hash(x)=151269817)
191
+ 6300 val loss 6.1760
192
+ 6300 val perplexity 481.0584
193
+ 6300 train 6.316322 (lr=1.1434e-05) (hash(x)=156912756)
194
+ 6400 val loss 6.1698
195
+ 6400 val perplexity 478.1116
196
+ 6400 train 6.093508 (lr=1.1035e-05) (hash(x)=125779637)
197
+ 6500 val loss 6.1661
198
+ 6500 val perplexity 476.3085
199
+ 6500 train 6.062563 (lr=1.0643e-05) (hash(x)=148157661)
200
+ 6600 val loss 6.1610
201
+ 6600 val perplexity 473.9046
202
+ 6600 train 6.199767 (lr=1.0256e-05) (hash(x)=152820049)
203
+ 6700 val loss 6.1536
204
+ 6700 val perplexity 470.4144
205
+ 6700 train 6.092970 (lr=9.8753e-06) (hash(x)=155818717)
206
+ 6800 val loss 6.1432
207
+ 6800 val perplexity 465.5320
208
+ 6800 train 6.174351 (lr=9.5017e-06) (hash(x)=158833881)
209
+ 6900 val loss 6.1412
210
+ 6900 val perplexity 464.6119
211
+ 6900 train 6.079893 (lr=9.1353e-06) (hash(x)=164984909)
212
+ 7000 val loss 6.1353
213
+ 7000 val perplexity 461.8669
214
+ 7000 train 6.026324 (lr=8.7764e-06) (hash(x)=151172493)
215
+ 7100 val loss 6.1325
216
+ 7100 val perplexity 460.5665
217
+ 7100 train 6.166331 (lr=8.4255e-06) (hash(x)=154023633)
218
+ 7200 val loss 6.1241
219
+ 7200 val perplexity 456.7562
220
+ 7200 train 5.977105 (lr=8.0829e-06) (hash(x)=139351250)
221
+ 7300 val loss 6.1208
222
+ 7300 val perplexity 455.2353
223
+ 7300 train 6.070672 (lr=7.7489e-06) (hash(x)=158719458)
224
+ 7400 val loss 6.1134
225
+ 7400 val perplexity 451.8548
226
+ 7400 train 6.197914 (lr=7.4239e-06) (hash(x)=162250810)
227
+ 7500 val loss 6.1050
228
+ 7500 val perplexity 448.1147
229
+ 7500 train 5.971425 (lr=7.1083e-06) (hash(x)=142827557)
230
+ 7600 val loss 6.1044
231
+ 7600 val perplexity 447.8327
232
+ 7600 train 6.001914 (lr=6.8023e-06) (hash(x)=147830801)
233
+ 7700 val loss 6.1000
234
+ 7700 val perplexity 445.8545
235
+ 7700 train 6.213936 (lr=6.5062e-06) (hash(x)=165233314)
236
+ 7800 val loss 6.0973
237
+ 7800 val perplexity 444.6605
238
+ 7800 train 6.092710 (lr=6.2205e-06) (hash(x)=160654717)
239
+ 7900 val loss 6.0936
240
+ 7900 val perplexity 443.0253
241
+ 7900 train 5.945004 (lr=5.9453e-06) (hash(x)=140712586)
242
+ 8000 val loss 6.0883
243
+ 8000 val perplexity 440.6558
244
+ 8000 train 6.236843 (lr=5.6809e-06) (hash(x)=150309061)
245
+ 8100 val loss 6.0839
246
+ 8100 val perplexity 438.7413
247
+ 8100 train 6.095557 (lr=5.4277e-06) (hash(x)=158805466)
248
+ 8200 val loss 6.0804
249
+ 8200 val perplexity 437.2005
250
+ 8200 train 5.950284 (lr=5.1858e-06) (hash(x)=142814078)
251
+ 8300 val loss 6.0784
252
+ 8300 val perplexity 436.3120
253
+ 8300 train 6.033829 (lr=4.9556e-06) (hash(x)=148340417)
254
+ 8400 val loss 6.0766
255
+ 8400 val perplexity 435.5259
256
+ 8400 train 6.016440 (lr=4.7372e-06) (hash(x)=157132639)
257
+ 8500 val loss 6.0766
258
+ 8500 val perplexity 435.5307
259
+ 8500 train 6.019042 (lr=4.5309e-06) (hash(x)=152994240)
260
+ 8600 val loss 6.0692
261
+ 8600 val perplexity 432.3297
262
+ 8600 train 6.012918 (lr=4.3369e-06) (hash(x)=152210154)
263
+ 8700 val loss 6.0673
264
+ 8700 val perplexity 431.4998
265
+ 8700 train 6.135266 (lr=4.1554e-06) (hash(x)=149631247)
266
+ 8800 val loss 6.0635
267
+ 8800 val perplexity 429.8681
268
+ 8800 train 5.918789 (lr=3.9866e-06) (hash(x)=145617784)
269
+ 8900 val loss 6.0627
270
+ 8900 val perplexity 429.5137
271
+ 8900 train 6.162240 (lr=3.8307e-06) (hash(x)=155984970)
272
+ 9000 val loss 6.0619
273
+ 9000 val perplexity 429.1812
274
+ 9000 train 5.790365 (lr=3.6877e-06) (hash(x)=143433013)
275
+ 9100 val loss 6.0604
276
+ 9100 val perplexity 428.5354
277
+ 9100 train 5.963179 (lr=3.5580e-06) (hash(x)=152507533)
278
+ 9200 val loss 6.0564
279
+ 9200 val perplexity 426.8368
280
+ 9200 train 6.185226 (lr=3.4415e-06) (hash(x)=162400650)
281
+ 9300 val loss 6.0525
282
+ 9300 val perplexity 425.1668
283
+ 9300 train 6.100528 (lr=3.3385e-06) (hash(x)=148419068)
284
+ 9400 val loss 6.0515
285
+ 9400 val perplexity 424.7288
286
+ 9400 train 6.007780 (lr=3.2490e-06) (hash(x)=145025169)
287
+ 9500 val loss 6.0493
288
+ 9500 val perplexity 423.8244
289
+ 9500 train 6.020970 (lr=3.1730e-06) (hash(x)=145104235)
290
+ 9600 val loss 6.0494
291
+ 9600 val perplexity 423.8671
292
+ 9600 train 5.979624 (lr=3.1108e-06) (hash(x)=150499797)
293
+ 9700 val loss 6.0480
294
+ 9700 val perplexity 423.2599
295
+ 9700 train 5.853704 (lr=3.0624e-06) (hash(x)=139690022)
296
+ 9800 val loss 6.0438
297
+ 9800 val perplexity 421.4906
298
+ 9800 train 6.032509 (lr=3.0277e-06) (hash(x)=145645279)
299
+ 9900 val loss 6.0413
300
+ 9900 val perplexity 420.4520
301
+ 9900 train 5.953890 (lr=3.0069e-06) (hash(x)=150103432)
302
+ 9999 val loss 6.0409
303
+ 9999 val perplexity 420.2706
attention_kindselective_n_heads4_seed1339/model_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ced26739fb82bc69f7ee9c640198949689a798ab740c86dbf2eadbd39203dd98
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ffaffcf5c0f6efcc94da68cdc405e6e9862dd0790ef6e99afe7668125fbccdce
3
  size 92843394
attention_kindselective_n_heads4_seed1339/model_05000.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1ebac561767c2492d8e1fb6761bc84e4ec2fbb1367b033d71d0098b4bc2ecaca
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db5b06d727a530bfc9f8d978f75c6c949cca378fb6fbf2ad60ab2c17a58c0ec9
3
  size 92843394
attention_kindselective_n_heads4_seed1339/model_07500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c5d2fda2d90c5ec03013f47a1c3364954d533cc0251601d3a41f6a83cdb40612
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:748e5f7bfc76da340284ca983de81118dd5f42a18f1f1b8f4f780ff779d13549
3
  size 92843394
attention_kindselective_n_heads4_seed1339/model_09999.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:94b6f80f738e19cf0304a1d5d46415fcf9e9d41b48032c77a9af321381704eaa
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bbfb35a4b6f2bc6db9def860938d3a0628e0888b50937b40c4e0687cd2316940
3
  size 92843394
attention_kindselective_n_heads4_seed1339/optimizer_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:858364f02fec2cbc5fc2be92e99cf9942bf42be3bcff923e0b45ccff657789e6
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:baff6d85d9201e86380a48a9008e3ec6f8a1d6910ac6e49f3ad62464aedac188
3
  size 179406214
attention_kindselective_n_heads4_seed1339/optimizer_05000.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b6f8978b05d5a7af2625d71ea3a5d8eb6b6cea10bd5679bc91fe43f405c87e70
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:249f2cee3d0c60145e854ec28f58efd8a1ab7615acee3baee7a18e30dc03a9c9
3
  size 179406214
attention_kindselective_n_heads4_seed1339/optimizer_07500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4ef6fdae2c457a60886109c85a62b83253e66cfe65e65678fe7b4625a3f391a4
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:edd2d28c22022d78b3ba5c4a7fe91806761d53475756f99a6074fc3988bc001e
3
  size 179406214
attention_kindselective_n_heads4_seed1339/optimizer_09999.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0bf90d410f6b64353348c768bb6fd59b5318b1e1dc467572b0b69c5931e62c7e
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c9b87a8ea80a58a13c81dc931e7a310144b15c4825ce7db1065749753691aec4
3
  size 179406214