andrew-healey commited on
Commit
211e466
·
verified ·
1 Parent(s): 4216ac6

Upload folder using huggingface_hub

Browse files
attention_kindselective_n_heads4_seed1341/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_4/attention_kindselective_n_heads4_seed1341", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10000, "warmup_steps": 200, "group": "wider_is_better_4", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1341, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 30720, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 5e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "0.5e-4_30720_4_1341", "n_embd": 256}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_4/attention_kindselective_n_heads4_seed1341", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10000, "warmup_steps": 200, "group": "wider_is_better_4", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1341, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 30720, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 3e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "3e-5_30720_4_1341", "n_embd": 256}
attention_kindselective_n_heads4_seed1341/log2.txt CHANGED
@@ -1,303 +1,303 @@
1
  max_steps: 10000
2
  0 val loss 11.3107
3
  0 val perplexity 81693.5234
4
- 0 train 11.308768 (lr=2.5000e-07) (hash(x)=145079536)
5
- 100 val loss 9.8417
6
- 100 val perplexity 18802.5039
7
- 100 train 9.780692 (lr=2.5250e-05) (hash(x)=154745873)
8
- 200 val loss 8.4172
9
- 200 val perplexity 4524.1753
10
- 200 train 8.496327 (lr=5.0000e-05) (hash(x)=155800595)
11
- 300 val loss 7.7818
12
- 300 val perplexity 2396.6677
13
- 300 train 7.744482 (lr=4.9988e-05) (hash(x)=148595389)
14
- 400 val loss 7.6084
15
- 400 val perplexity 2014.9561
16
- 400 train 7.514979 (lr=4.9954e-05) (hash(x)=145606733)
17
- 500 val loss 7.5315
18
- 500 val perplexity 1865.8522
19
- 500 train 7.364153 (lr=4.9896e-05) (hash(x)=138221231)
20
- 600 val loss 7.4816
21
- 600 val perplexity 1774.9996
22
- 600 train 7.489302 (lr=4.9815e-05) (hash(x)=150367139)
23
- 700 val loss 7.4531
24
- 700 val perplexity 1725.2511
25
- 700 train 7.606433 (lr=4.9712e-05) (hash(x)=155579314)
26
- 800 val loss 7.4290
27
- 800 val perplexity 1684.0526
28
- 800 train 7.510591 (lr=4.9585e-05) (hash(x)=155747374)
29
- 900 val loss 7.4088
30
- 900 val perplexity 1650.4117
31
- 900 train 7.613670 (lr=4.9436e-05) (hash(x)=159334575)
32
- 1000 val loss 7.3825
33
- 1000 val perplexity 1607.6669
34
- 1000 train 7.258404 (lr=4.9264e-05) (hash(x)=140604760)
35
- 1100 val loss 7.3581
36
- 1100 val perplexity 1568.8760
37
- 1100 train 7.201476 (lr=4.9070e-05) (hash(x)=146208052)
38
- 1200 val loss 7.3370
39
- 1200 val perplexity 1536.1671
40
- 1200 train 7.254458 (lr=4.8854e-05) (hash(x)=148404734)
41
- 1300 val loss 7.3153
42
- 1300 val perplexity 1503.1155
43
- 1300 train 7.262329 (lr=4.8616e-05) (hash(x)=155681970)
44
- 1400 val loss 7.2990
45
- 1400 val perplexity 1478.8737
46
- 1400 train 7.255487 (lr=4.8356e-05) (hash(x)=148115934)
47
- 1500 val loss 7.2635
48
- 1500 val perplexity 1427.2244
49
- 1500 train 7.409528 (lr=4.8074e-05) (hash(x)=157074034)
50
- 1600 val loss 7.2426
51
- 1600 val perplexity 1397.6726
52
- 1600 train 7.008507 (lr=4.7772e-05) (hash(x)=137464699)
53
- 1700 val loss 7.2321
54
- 1700 val perplexity 1383.1003
55
- 1700 train 7.397147 (lr=4.7448e-05) (hash(x)=166955614)
56
- 1800 val loss 7.2185
57
- 1800 val perplexity 1364.4261
58
- 1800 train 7.055240 (lr=4.7105e-05) (hash(x)=143886042)
59
- 1900 val loss 7.2195
60
- 1900 val perplexity 1365.8647
61
- 1900 train 7.842936 (lr=4.6741e-05) (hash(x)=193610391)
62
- 2000 val loss 7.2015
63
- 2000 val perplexity 1341.4861
64
- 2000 train 7.366463 (lr=4.6357e-05) (hash(x)=163799796)
65
- 2100 val loss 7.1927
66
- 2100 val perplexity 1329.6649
67
- 2100 train 7.166338 (lr=4.5954e-05) (hash(x)=154107339)
68
- 2200 val loss 7.1802
69
- 2200 val perplexity 1313.1835
70
- 2200 train 7.177769 (lr=4.5532e-05) (hash(x)=144592844)
71
- 2300 val loss 7.1721
72
- 2300 val perplexity 1302.5282
73
- 2300 train 7.247394 (lr=4.5091e-05) (hash(x)=154535861)
74
- 2400 val loss 7.1619
75
- 2400 val perplexity 1289.3762
76
- 2400 train 7.884716 (lr=4.4633e-05) (hash(x)=204706354)
77
- 2500 val loss 7.1551
78
- 2500 val perplexity 1280.5905
79
- 2500 train 6.964126 (lr=4.4156e-05) (hash(x)=143783202)
80
- 2600 val loss 7.1398
81
- 2600 val perplexity 1261.1708
82
- 2600 train 7.030527 (lr=4.3663e-05) (hash(x)=150862210)
83
- 2700 val loss 7.1286
84
- 2700 val perplexity 1247.1561
85
- 2700 train 7.205724 (lr=4.3153e-05) (hash(x)=150753426)
86
- 2800 val loss 7.1344
87
- 2800 val perplexity 1254.4265
88
- 2800 train 7.102917 (lr=4.2627e-05) (hash(x)=147766811)
89
- 2900 val loss 7.1096
90
- 2900 val perplexity 1223.5974
91
- 2900 train 7.040896 (lr=4.2085e-05) (hash(x)=151177814)
92
- 3000 val loss 7.1137
93
- 3000 val perplexity 1228.7455
94
- 3000 train 6.953936 (lr=4.1529e-05) (hash(x)=135925327)
95
- 3100 val loss 7.1056
96
- 3100 val perplexity 1218.7218
97
- 3100 train 6.803243 (lr=4.0957e-05) (hash(x)=124842353)
98
- 3200 val loss 7.1002
99
- 3200 val perplexity 1212.1953
100
- 3200 train 7.201441 (lr=4.0373e-05) (hash(x)=160440642)
101
- 3300 val loss 7.0791
102
- 3300 val perplexity 1186.8615
103
- 3300 train 6.980282 (lr=3.9775e-05) (hash(x)=143017131)
104
- 3400 val loss 7.0622
105
- 3400 val perplexity 1167.0176
106
- 3400 train 7.292302 (lr=3.9164e-05) (hash(x)=148143474)
107
- 3500 val loss 7.0493
108
- 3500 val perplexity 1152.0490
109
- 3500 train 6.866848 (lr=3.8541e-05) (hash(x)=148732639)
110
- 3600 val loss 7.0350
111
- 3600 val perplexity 1135.7089
112
- 3600 train 7.043411 (lr=3.7907e-05) (hash(x)=146333803)
113
- 3700 val loss 7.0251
114
- 3700 val perplexity 1124.5363
115
- 3700 train 6.834583 (lr=3.7262e-05) (hash(x)=143611445)
116
- 3800 val loss 7.0262
117
- 3800 val perplexity 1125.7954
118
- 3800 train 6.827675 (lr=3.6608e-05) (hash(x)=141272115)
119
- 3900 val loss 6.9995
120
- 3900 val perplexity 1096.1355
121
- 3900 train 6.922548 (lr=3.5944e-05) (hash(x)=149161848)
122
- 4000 val loss 6.9854
123
- 4000 val perplexity 1080.7218
124
- 4000 train 7.053916 (lr=3.5271e-05) (hash(x)=152036955)
125
- 4100 val loss 6.9672
126
- 4100 val perplexity 1061.2494
127
- 4100 train 7.029777 (lr=3.4590e-05) (hash(x)=153260968)
128
- 4200 val loss 6.9604
129
- 4200 val perplexity 1054.0402
130
- 4200 train 6.837536 (lr=3.3902e-05) (hash(x)=147629478)
131
- 4300 val loss 6.9398
132
- 4300 val perplexity 1032.5374
133
- 4300 train 6.958030 (lr=3.3207e-05) (hash(x)=161701080)
134
- 4400 val loss 6.9262
135
- 4400 val perplexity 1018.6639
136
- 4400 train 6.980537 (lr=3.2507e-05) (hash(x)=155290524)
137
- 4500 val loss 6.9192
138
- 4500 val perplexity 1011.5548
139
- 4500 train 6.728027 (lr=3.1801e-05) (hash(x)=144726593)
140
- 4600 val loss 6.9101
141
- 4600 val perplexity 1002.3809
142
- 4600 train 6.868497 (lr=3.1091e-05) (hash(x)=149517202)
143
- 4700 val loss 6.8962
144
- 4700 val perplexity 988.5510
145
- 4700 train 6.895830 (lr=3.0377e-05) (hash(x)=139403008)
146
- 4800 val loss 6.8775
147
- 4800 val perplexity 970.2219
148
- 4800 train 6.945558 (lr=2.9661e-05) (hash(x)=153387238)
149
- 4900 val loss 6.8674
150
- 4900 val perplexity 960.4941
151
- 4900 train 6.632361 (lr=2.8942e-05) (hash(x)=139881569)
152
- 5000 val loss 6.8586
153
- 5000 val perplexity 952.0352
154
- 5000 train 6.831977 (lr=2.8221e-05) (hash(x)=152602580)
155
- 5100 val loss 6.8497
156
- 5100 val perplexity 943.5910
157
- 5100 train 6.907746 (lr=2.7500e-05) (hash(x)=150598460)
158
- 5200 val loss 6.8425
159
- 5200 val perplexity 936.8316
160
- 5200 train 6.763305 (lr=2.6779e-05) (hash(x)=147660496)
161
- 5300 val loss 6.8309
162
- 5300 val perplexity 926.0580
163
- 5300 train 6.576593 (lr=2.6058e-05) (hash(x)=147693848)
164
- 5400 val loss 6.8250
165
- 5400 val perplexity 920.5868
166
- 5400 train 6.577909 (lr=2.5339e-05) (hash(x)=136048145)
167
- 5500 val loss 6.8119
168
- 5500 val perplexity 908.5839
169
- 5500 train 6.763752 (lr=2.4623e-05) (hash(x)=140313123)
170
- 5600 val loss 6.8040
171
- 5600 val perplexity 901.4698
172
- 5600 train 6.834018 (lr=2.3909e-05) (hash(x)=156997993)
173
- 5700 val loss 6.8008
174
- 5700 val perplexity 898.6047
175
- 5700 train 6.793005 (lr=2.3199e-05) (hash(x)=143513880)
176
- 5800 val loss 6.7919
177
- 5800 val perplexity 890.6416
178
- 5800 train 6.947620 (lr=2.2493e-05) (hash(x)=153736628)
179
- 5900 val loss 6.7866
180
- 5900 val perplexity 885.8635
181
- 5900 train 6.986406 (lr=2.1793e-05) (hash(x)=166385417)
182
- 6000 val loss 6.7786
183
- 6000 val perplexity 878.8109
184
- 6000 train 6.990710 (lr=2.1098e-05) (hash(x)=153216517)
185
- 6100 val loss 6.7820
186
- 6100 val perplexity 881.8101
187
- 6100 train 6.654541 (lr=2.0410e-05) (hash(x)=145387545)
188
- 6200 val loss 6.7711
189
- 6200 val perplexity 872.3029
190
- 6200 train 6.877204 (lr=1.9729e-05) (hash(x)=156306460)
191
- 6300 val loss 6.7570
192
- 6300 val perplexity 860.0606
193
- 6300 train 6.650858 (lr=1.9056e-05) (hash(x)=148610447)
194
- 6400 val loss 6.7497
195
- 6400 val perplexity 853.8201
196
- 6400 train 6.964346 (lr=1.8392e-05) (hash(x)=158343535)
197
- 6500 val loss 6.7405
198
- 6500 val perplexity 845.9461
199
- 6500 train 6.874510 (lr=1.7738e-05) (hash(x)=157742557)
200
- 6600 val loss 6.7373
201
- 6600 val perplexity 843.3032
202
- 6600 train 6.671496 (lr=1.7093e-05) (hash(x)=150457308)
203
- 6700 val loss 6.7313
204
- 6700 val perplexity 838.2286
205
- 6700 train 6.613233 (lr=1.6459e-05) (hash(x)=142426205)
206
- 6800 val loss 6.7322
207
- 6800 val perplexity 838.9755
208
- 6800 train 6.559487 (lr=1.5836e-05) (hash(x)=151522525)
209
- 6900 val loss 6.7270
210
- 6900 val perplexity 834.6203
211
- 6900 train 6.653696 (lr=1.5225e-05) (hash(x)=152029630)
212
- 7000 val loss 6.7093
213
- 7000 val perplexity 819.9699
214
- 7000 train 6.566632 (lr=1.4627e-05) (hash(x)=142978954)
215
- 7100 val loss 6.7057
216
- 7100 val perplexity 817.0883
217
- 7100 train 6.590717 (lr=1.4043e-05) (hash(x)=148064359)
218
- 7200 val loss 6.6974
219
- 7200 val perplexity 810.2933
220
- 7200 train 6.625768 (lr=1.3471e-05) (hash(x)=155771845)
221
- 7300 val loss 6.6932
222
- 7300 val perplexity 806.9149
223
- 7300 train 6.487107 (lr=1.2915e-05) (hash(x)=141639844)
224
- 7400 val loss 6.6899
225
- 7400 val perplexity 804.2786
226
- 7400 train 6.724742 (lr=1.2373e-05) (hash(x)=153466021)
227
- 7500 val loss 6.6810
228
- 7500 val perplexity 797.0795
229
- 7500 train 6.677974 (lr=1.1847e-05) (hash(x)=148843587)
230
- 7600 val loss 6.6728
231
- 7600 val perplexity 790.5776
232
- 7600 train 6.796443 (lr=1.1337e-05) (hash(x)=152053933)
233
- 7700 val loss 6.6661
234
- 7700 val perplexity 785.3145
235
- 7700 train 6.455796 (lr=1.0844e-05) (hash(x)=141564003)
236
- 7800 val loss 6.6610
237
- 7800 val perplexity 781.3198
238
- 7800 train 6.543863 (lr=1.0367e-05) (hash(x)=148617843)
239
- 7900 val loss 6.6558
240
- 7900 val perplexity 777.2846
241
- 7900 train 6.609339 (lr=9.9088e-06) (hash(x)=146417668)
242
- 8000 val loss 6.6535
243
- 8000 val perplexity 775.4665
244
- 8000 train 6.471217 (lr=9.4682e-06) (hash(x)=150165488)
245
- 8100 val loss 6.6469
246
- 8100 val perplexity 770.3602
247
- 8100 train 6.616197 (lr=9.0461e-06) (hash(x)=144818855)
248
- 8200 val loss 6.6399
249
- 8200 val perplexity 765.0420
250
- 8200 train 6.674468 (lr=8.6430e-06) (hash(x)=143074954)
251
- 8300 val loss 6.6346
252
- 8300 val perplexity 760.9838
253
- 8300 train 6.531285 (lr=8.2593e-06) (hash(x)=145172494)
254
- 8400 val loss 6.6310
255
- 8400 val perplexity 758.2520
256
- 8400 train 6.647119 (lr=7.8953e-06) (hash(x)=152284393)
257
- 8500 val loss 6.6275
258
- 8500 val perplexity 755.5671
259
- 8500 train 6.479924 (lr=7.5515e-06) (hash(x)=147044705)
260
- 8600 val loss 6.6249
261
- 8600 val perplexity 753.5939
262
- 8600 train 6.523197 (lr=7.2282e-06) (hash(x)=147762608)
263
- 8700 val loss 6.6208
264
- 8700 val perplexity 750.5560
265
- 8700 train 6.600430 (lr=6.9257e-06) (hash(x)=141994567)
266
- 8800 val loss 6.6149
267
- 8800 val perplexity 746.1359
268
- 8800 train 6.652915 (lr=6.6444e-06) (hash(x)=152552896)
269
- 8900 val loss 6.6110
270
- 8900 val perplexity 743.2493
271
- 8900 train 6.450344 (lr=6.3845e-06) (hash(x)=139714454)
272
- 9000 val loss 6.6083
273
- 9000 val perplexity 741.2206
274
- 9000 train 6.626579 (lr=6.1462e-06) (hash(x)=152274419)
275
- 9100 val loss 6.6061
276
- 9100 val perplexity 739.6128
277
- 9100 train 6.728067 (lr=5.9300e-06) (hash(x)=160536885)
278
- 9200 val loss 6.6026
279
- 9200 val perplexity 737.0013
280
- 9200 train 6.420851 (lr=5.7359e-06) (hash(x)=138386899)
281
- 9300 val loss 6.5990
282
- 9300 val perplexity 734.3779
283
- 9300 train 6.613641 (lr=5.5641e-06) (hash(x)=149957457)
284
- 9400 val loss 6.5940
285
- 9400 val perplexity 730.6964
286
- 9400 train 6.703440 (lr=5.4149e-06) (hash(x)=161671548)
287
- 9500 val loss 6.5922
288
- 9500 val perplexity 729.3575
289
- 9500 train 6.763606 (lr=5.2884e-06) (hash(x)=171857400)
290
- 9600 val loss 6.5893
291
- 9600 val perplexity 727.2561
292
- 9600 train 6.280600 (lr=5.1847e-06) (hash(x)=135378471)
293
- 9700 val loss 6.5856
294
- 9700 val perplexity 724.6049
295
- 9700 train 6.495109 (lr=5.1040e-06) (hash(x)=149514591)
296
- 9800 val loss 6.5828
297
- 9800 val perplexity 722.5372
298
- 9800 train 6.551695 (lr=5.0462e-06) (hash(x)=152679072)
299
- 9900 val loss 6.5791
300
- 9900 val perplexity 719.8748
301
- 9900 train 6.495956 (lr=5.0116e-06) (hash(x)=150415193)
302
- 9999 val loss 6.5778
303
- 9999 val perplexity 718.9753
 
1
  max_steps: 10000
2
  0 val loss 11.3107
3
  0 val perplexity 81693.5234
4
+ 0 train 11.308768 (lr=1.5000e-07) (hash(x)=145079536)
5
+ 100 val loss 9.9810
6
+ 100 val perplexity 21610.8398
7
+ 100 train 9.920817 (lr=1.5150e-05) (hash(x)=154745873)
8
+ 200 val loss 9.0327
9
+ 200 val perplexity 8372.2393
10
+ 200 train 9.070545 (lr=3.0000e-05) (hash(x)=155800595)
11
+ 300 val loss 7.9078
12
+ 300 val perplexity 2718.5388
13
+ 300 train 7.873547 (lr=2.9993e-05) (hash(x)=148595389)
14
+ 400 val loss 7.6379
15
+ 400 val perplexity 2075.3203
16
+ 400 train 7.546945 (lr=2.9972e-05) (hash(x)=145606733)
17
+ 500 val loss 7.5460
18
+ 500 val perplexity 1893.0764
19
+ 500 train 7.381058 (lr=2.9938e-05) (hash(x)=138221231)
20
+ 600 val loss 7.4750
21
+ 600 val perplexity 1763.4865
22
+ 600 train 7.483062 (lr=2.9889e-05) (hash(x)=150367139)
23
+ 700 val loss 7.4357
24
+ 700 val perplexity 1695.4303
25
+ 700 train 7.589180 (lr=2.9827e-05) (hash(x)=155579314)
26
+ 800 val loss 7.3914
27
+ 800 val perplexity 1621.9232
28
+ 800 train 7.467270 (lr=2.9751e-05) (hash(x)=155747374)
29
+ 900 val loss 7.3559
30
+ 900 val perplexity 1565.4408
31
+ 900 train 7.563746 (lr=2.9662e-05) (hash(x)=159334575)
32
+ 1000 val loss 7.3269
33
+ 1000 val perplexity 1520.6451
34
+ 1000 train 7.201617 (lr=2.9558e-05) (hash(x)=140604760)
35
+ 1100 val loss 7.3036
36
+ 1100 val perplexity 1485.6582
37
+ 1100 train 7.141465 (lr=2.9442e-05) (hash(x)=146208052)
38
+ 1200 val loss 7.2811
39
+ 1200 val perplexity 1452.6318
40
+ 1200 train 7.186431 (lr=2.9312e-05) (hash(x)=148404734)
41
+ 1300 val loss 7.2576
42
+ 1300 val perplexity 1418.9017
43
+ 1300 train 7.193643 (lr=2.9169e-05) (hash(x)=155681970)
44
+ 1400 val loss 7.2337
45
+ 1400 val perplexity 1385.4006
46
+ 1400 train 7.187166 (lr=2.9013e-05) (hash(x)=148115934)
47
+ 1500 val loss 7.2029
48
+ 1500 val perplexity 1343.2867
49
+ 1500 train 7.354657 (lr=2.8845e-05) (hash(x)=157074034)
50
+ 1600 val loss 7.1849
51
+ 1600 val perplexity 1319.3363
52
+ 1600 train 6.942118 (lr=2.8663e-05) (hash(x)=137464699)
53
+ 1700 val loss 7.1570
54
+ 1700 val perplexity 1283.0671
55
+ 1700 train 7.334529 (lr=2.8469e-05) (hash(x)=166955614)
56
+ 1800 val loss 7.1327
57
+ 1800 val perplexity 1252.2875
58
+ 1800 train 6.962445 (lr=2.8263e-05) (hash(x)=143886042)
59
+ 1900 val loss 7.1017
60
+ 1900 val perplexity 1214.0776
61
+ 1900 train 7.764103 (lr=2.8044e-05) (hash(x)=193610391)
62
+ 2000 val loss 7.0833
63
+ 2000 val perplexity 1191.8937
64
+ 2000 train 7.255627 (lr=2.7814e-05) (hash(x)=163799796)
65
+ 2100 val loss 7.0436
66
+ 2100 val perplexity 1145.5090
67
+ 2100 train 7.025841 (lr=2.7572e-05) (hash(x)=154107339)
68
+ 2200 val loss 7.0151
69
+ 2200 val perplexity 1113.3079
70
+ 2200 train 7.011511 (lr=2.7319e-05) (hash(x)=144592844)
71
+ 2300 val loss 6.9857
72
+ 2300 val perplexity 1081.1161
73
+ 2300 train 7.070469 (lr=2.7055e-05) (hash(x)=154535861)
74
+ 2400 val loss 6.9636
75
+ 2400 val perplexity 1057.4553
76
+ 2400 train 7.703825 (lr=2.6780e-05) (hash(x)=204706354)
77
+ 2500 val loss 6.9441
78
+ 2500 val perplexity 1036.9701
79
+ 2500 train 6.739681 (lr=2.6494e-05) (hash(x)=143783202)
80
+ 2600 val loss 6.9068
81
+ 2600 val perplexity 999.0374
82
+ 2600 train 6.801236 (lr=2.6198e-05) (hash(x)=150862210)
83
+ 2700 val loss 6.8760
84
+ 2700 val perplexity 968.7800
85
+ 2700 train 6.937062 (lr=2.5892e-05) (hash(x)=150753426)
86
+ 2800 val loss 6.8560
87
+ 2800 val perplexity 949.5924
88
+ 2800 train 6.852540 (lr=2.5576e-05) (hash(x)=147766811)
89
+ 2900 val loss 6.8234
90
+ 2900 val perplexity 919.1029
91
+ 2900 train 6.759596 (lr=2.5251e-05) (hash(x)=151177814)
92
+ 3000 val loss 6.8052
93
+ 3000 val perplexity 902.5270
94
+ 3000 train 6.647370 (lr=2.4917e-05) (hash(x)=135925327)
95
+ 3100 val loss 6.7907
96
+ 3100 val perplexity 889.5784
97
+ 3100 train 6.448046 (lr=2.4574e-05) (hash(x)=124842353)
98
+ 3200 val loss 6.7714
99
+ 3200 val perplexity 872.5413
100
+ 3200 train 6.888403 (lr=2.4224e-05) (hash(x)=160440642)
101
+ 3300 val loss 6.7501
102
+ 3300 val perplexity 854.1573
103
+ 3300 train 6.648814 (lr=2.3865e-05) (hash(x)=143017131)
104
+ 3400 val loss 6.7283
105
+ 3400 val perplexity 835.7366
106
+ 3400 train 7.011107 (lr=2.3498e-05) (hash(x)=148143474)
107
+ 3500 val loss 6.7111
108
+ 3500 val perplexity 821.4731
109
+ 3500 train 6.533078 (lr=2.3125e-05) (hash(x)=148732639)
110
+ 3600 val loss 6.6867
111
+ 3600 val perplexity 801.6344
112
+ 3600 train 6.676949 (lr=2.2744e-05) (hash(x)=146333803)
113
+ 3700 val loss 6.6650
114
+ 3700 val perplexity 784.4567
115
+ 3700 train 6.462936 (lr=2.2357e-05) (hash(x)=143611445)
116
+ 3800 val loss 6.6509
117
+ 3800 val perplexity 773.4550
118
+ 3800 train 6.446225 (lr=2.1965e-05) (hash(x)=141272115)
119
+ 3900 val loss 6.6264
120
+ 3900 val perplexity 754.7770
121
+ 3900 train 6.550550 (lr=2.1566e-05) (hash(x)=149161848)
122
+ 4000 val loss 6.6091
123
+ 4000 val perplexity 741.8365
124
+ 4000 train 6.691909 (lr=2.1162e-05) (hash(x)=152036955)
125
+ 4100 val loss 6.5838
126
+ 4100 val perplexity 723.3132
127
+ 4100 train 6.641014 (lr=2.0754e-05) (hash(x)=153260968)
128
+ 4200 val loss 6.5629
129
+ 4200 val perplexity 708.3047
130
+ 4200 train 6.442538 (lr=2.0341e-05) (hash(x)=147629478)
131
+ 4300 val loss 6.5478
132
+ 4300 val perplexity 697.7399
133
+ 4300 train 6.550487 (lr=1.9924e-05) (hash(x)=161701080)
134
+ 4400 val loss 6.5317
135
+ 4400 val perplexity 686.5733
136
+ 4400 train 6.570666 (lr=1.9504e-05) (hash(x)=155290524)
137
+ 4500 val loss 6.5223
138
+ 4500 val perplexity 680.1618
139
+ 4500 train 6.317447 (lr=1.9081e-05) (hash(x)=144726593)
140
+ 4600 val loss 6.5042
141
+ 4600 val perplexity 667.9535
142
+ 4600 train 6.444550 (lr=1.8655e-05) (hash(x)=149517202)
143
+ 4700 val loss 6.4828
144
+ 4700 val perplexity 653.8280
145
+ 4700 train 6.483470 (lr=1.8226e-05) (hash(x)=139403008)
146
+ 4800 val loss 6.4693
147
+ 4800 val perplexity 645.0618
148
+ 4800 train 6.548851 (lr=1.7796e-05) (hash(x)=153387238)
149
+ 4900 val loss 6.4597
150
+ 4900 val perplexity 638.8582
151
+ 4900 train 6.207753 (lr=1.7365e-05) (hash(x)=139881569)
152
+ 5000 val loss 6.4438
153
+ 5000 val perplexity 628.7875
154
+ 5000 train 6.428659 (lr=1.6933e-05) (hash(x)=152602580)
155
+ 5100 val loss 6.4318
156
+ 5100 val perplexity 621.3099
157
+ 5100 train 6.479546 (lr=1.6500e-05) (hash(x)=150598460)
158
+ 5200 val loss 6.4224
159
+ 5200 val perplexity 615.5011
160
+ 5200 train 6.313307 (lr=1.6067e-05) (hash(x)=147660496)
161
+ 5300 val loss 6.4107
162
+ 5300 val perplexity 608.3204
163
+ 5300 train 6.151624 (lr=1.5635e-05) (hash(x)=147693848)
164
+ 5400 val loss 6.4053
165
+ 5400 val perplexity 605.0364
166
+ 5400 train 6.148688 (lr=1.5204e-05) (hash(x)=136048145)
167
+ 5500 val loss 6.3898
168
+ 5500 val perplexity 595.7108
169
+ 5500 train 6.384548 (lr=1.4774e-05) (hash(x)=140313123)
170
+ 5600 val loss 6.3793
171
+ 5600 val perplexity 589.5198
172
+ 5600 train 6.400584 (lr=1.4345e-05) (hash(x)=156997993)
173
+ 5700 val loss 6.3661
174
+ 5700 val perplexity 581.7758
175
+ 5700 train 6.370698 (lr=1.3919e-05) (hash(x)=143513880)
176
+ 5800 val loss 6.3591
177
+ 5800 val perplexity 577.7076
178
+ 5800 train 6.495447 (lr=1.3496e-05) (hash(x)=153736628)
179
+ 5900 val loss 6.3509
180
+ 5900 val perplexity 573.0050
181
+ 5900 train 6.522499 (lr=1.3076e-05) (hash(x)=166385417)
182
+ 6000 val loss 6.3421
183
+ 6000 val perplexity 567.9657
184
+ 6000 train 6.563224 (lr=1.2659e-05) (hash(x)=153216517)
185
+ 6100 val loss 6.3358
186
+ 6100 val perplexity 564.4168
187
+ 6100 train 6.212199 (lr=1.2246e-05) (hash(x)=145387545)
188
+ 6200 val loss 6.3286
189
+ 6200 val perplexity 560.3442
190
+ 6200 train 6.448741 (lr=1.1838e-05) (hash(x)=156306460)
191
+ 6300 val loss 6.3156
192
+ 6300 val perplexity 553.1222
193
+ 6300 train 6.230059 (lr=1.1434e-05) (hash(x)=148610447)
194
+ 6400 val loss 6.3071
195
+ 6400 val perplexity 548.4486
196
+ 6400 train 6.526285 (lr=1.1035e-05) (hash(x)=158343535)
197
+ 6500 val loss 6.3001
198
+ 6500 val perplexity 544.6276
199
+ 6500 train 6.452993 (lr=1.0643e-05) (hash(x)=157742557)
200
+ 6600 val loss 6.2955
201
+ 6600 val perplexity 542.1091
202
+ 6600 train 6.213193 (lr=1.0256e-05) (hash(x)=150457308)
203
+ 6700 val loss 6.2904
204
+ 6700 val perplexity 539.3461
205
+ 6700 train 6.166604 (lr=9.8753e-06) (hash(x)=142426205)
206
+ 6800 val loss 6.2867
207
+ 6800 val perplexity 537.3521
208
+ 6800 train 6.090304 (lr=9.5017e-06) (hash(x)=151522525)
209
+ 6900 val loss 6.2809
210
+ 6900 val perplexity 534.2869
211
+ 6900 train 6.224025 (lr=9.1353e-06) (hash(x)=152029630)
212
+ 7000 val loss 6.2692
213
+ 7000 val perplexity 528.0710
214
+ 7000 train 6.124932 (lr=8.7764e-06) (hash(x)=142978954)
215
+ 7100 val loss 6.2643
216
+ 7100 val perplexity 525.4924
217
+ 7100 train 6.144693 (lr=8.4255e-06) (hash(x)=148064359)
218
+ 7200 val loss 6.2599
219
+ 7200 val perplexity 523.1679
220
+ 7200 train 6.171076 (lr=8.0829e-06) (hash(x)=155771845)
221
+ 7300 val loss 6.2554
222
+ 7300 val perplexity 520.8312
223
+ 7300 train 6.050687 (lr=7.7489e-06) (hash(x)=141639844)
224
+ 7400 val loss 6.2522
225
+ 7400 val perplexity 519.1713
226
+ 7400 train 6.302989 (lr=7.4239e-06) (hash(x)=153466021)
227
+ 7500 val loss 6.2444
228
+ 7500 val perplexity 515.1309
229
+ 7500 train 6.255723 (lr=7.1083e-06) (hash(x)=148843587)
230
+ 7600 val loss 6.2382
231
+ 7600 val perplexity 511.9514
232
+ 7600 train 6.392756 (lr=6.8023e-06) (hash(x)=152053933)
233
+ 7700 val loss 6.2335
234
+ 7700 val perplexity 509.5445
235
+ 7700 train 6.063930 (lr=6.5062e-06) (hash(x)=141564003)
236
+ 7800 val loss 6.2306
237
+ 7800 val perplexity 508.0515
238
+ 7800 train 6.104179 (lr=6.2205e-06) (hash(x)=148617843)
239
+ 7900 val loss 6.2282
240
+ 7900 val perplexity 506.8590
241
+ 7900 train 6.183061 (lr=5.9453e-06) (hash(x)=146417668)
242
+ 8000 val loss 6.2244
243
+ 8000 val perplexity 504.9313
244
+ 8000 train 6.031130 (lr=5.6809e-06) (hash(x)=150165488)
245
+ 8100 val loss 6.2213
246
+ 8100 val perplexity 503.3661
247
+ 8100 train 6.198036 (lr=5.4277e-06) (hash(x)=144818855)
248
+ 8200 val loss 6.2150
249
+ 8200 val perplexity 500.2049
250
+ 8200 train 6.276789 (lr=5.1858e-06) (hash(x)=143074954)
251
+ 8300 val loss 6.2112
252
+ 8300 val perplexity 498.3220
253
+ 8300 train 6.106345 (lr=4.9556e-06) (hash(x)=145172494)
254
+ 8400 val loss 6.2099
255
+ 8400 val perplexity 497.6450
256
+ 8400 train 6.234631 (lr=4.7372e-06) (hash(x)=152284393)
257
+ 8500 val loss 6.2073
258
+ 8500 val perplexity 496.3461
259
+ 8500 train 6.067934 (lr=4.5309e-06) (hash(x)=147044705)
260
+ 8600 val loss 6.2057
261
+ 8600 val perplexity 495.5492
262
+ 8600 train 6.101382 (lr=4.3369e-06) (hash(x)=147762608)
263
+ 8700 val loss 6.2016
264
+ 8700 val perplexity 493.5237
265
+ 8700 train 6.190577 (lr=4.1554e-06) (hash(x)=141994567)
266
+ 8800 val loss 6.1973
267
+ 8800 val perplexity 491.4120
268
+ 8800 train 6.262373 (lr=3.9866e-06) (hash(x)=152552896)
269
+ 8900 val loss 6.1948
270
+ 8900 val perplexity 490.2008
271
+ 8900 train 6.026112 (lr=3.8307e-06) (hash(x)=139714454)
272
+ 9000 val loss 6.1927
273
+ 9000 val perplexity 489.1827
274
+ 9000 train 6.210496 (lr=3.6877e-06) (hash(x)=152274419)
275
+ 9100 val loss 6.1915
276
+ 9100 val perplexity 488.5645
277
+ 9100 train 6.273561 (lr=3.5580e-06) (hash(x)=160536885)
278
+ 9200 val loss 6.1912
279
+ 9200 val perplexity 488.4222
280
+ 9200 train 6.001785 (lr=3.4415e-06) (hash(x)=138386899)
281
+ 9300 val loss 6.1875
282
+ 9300 val perplexity 486.6331
283
+ 9300 train 6.231057 (lr=3.3385e-06) (hash(x)=149957457)
284
+ 9400 val loss 6.1838
285
+ 9400 val perplexity 484.8300
286
+ 9400 train 6.253336 (lr=3.2490e-06) (hash(x)=161671548)
287
+ 9500 val loss 6.1822
288
+ 9500 val perplexity 484.0442
289
+ 9500 train 6.334615 (lr=3.1730e-06) (hash(x)=171857400)
290
+ 9600 val loss 6.1806
291
+ 9600 val perplexity 483.2801
292
+ 9600 train 5.907129 (lr=3.1108e-06) (hash(x)=135378471)
293
+ 9700 val loss 6.1796
294
+ 9700 val perplexity 482.8083
295
+ 9700 train 6.100302 (lr=3.0624e-06) (hash(x)=149514591)
296
+ 9800 val loss 6.1767
297
+ 9800 val perplexity 481.3884
298
+ 9800 train 6.151616 (lr=3.0277e-06) (hash(x)=152679072)
299
+ 9900 val loss 6.1744
300
+ 9900 val perplexity 480.2796
301
+ 9900 train 6.098384 (lr=3.0069e-06) (hash(x)=150415193)
302
+ 9999 val loss 6.1729
303
+ 9999 val perplexity 479.5658
attention_kindselective_n_heads4_seed1341/model_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3557c87b2413b94fed8b96ed812999f9868b61e4f2a7a9cbb31ca4b3a4563ef7
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4dd19d55eccbca4cc0d4abab192709a4e04037b423dcf01f739eec9d2889807
3
  size 92843394
attention_kindselective_n_heads4_seed1341/model_05000.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4717e9428293324538fe11c4035e1af70fc80012ac5cc343345eb0c3cd74955b
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:897c2fca90a8a86414cec7ef9f9dba951412bdc1d1b3736048c368ad5d6d206d
3
  size 92843394
attention_kindselective_n_heads4_seed1341/model_07500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6065a27f3da257eb70a5e40b6bf856e43b0690a33b48fb12e7d7b1486d802451
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:53d429564946189dea5d288e8a03b4bfd8eb188459cbc1d8e724ccb87f538f77
3
  size 92843394
attention_kindselective_n_heads4_seed1341/model_09999.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cacc3beef5f4e7df5562f8d07ce0d93f09abf208d2886d95484d215ee99ff5e7
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7002c239e79b53ed1ff51816d5a0819c177cd02442ad4f91edd01037ed04dc90
3
  size 92843394
attention_kindselective_n_heads4_seed1341/optimizer_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3dd6f5bb7dc2ee7b890bfc54be53036b840f8205cf0a2b6debc029cf6efb7832
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:31d88e061c99a7f36e0d25bb9be9cf00fb2ca8080cc9da28f9c5e50fa85dfaa4
3
  size 179406214
attention_kindselective_n_heads4_seed1341/optimizer_05000.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:23a1ed593311582bb436a72dc87c2fc4e5616cd6d81d5fa1b4bf033d549abb21
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:45a23b5438972806ac3ff4f774bbdaca4f8dd88b515c93a9b89cd11d0b848d99
3
  size 179406214
attention_kindselective_n_heads4_seed1341/optimizer_07500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:691b726748981018f4600a85a10fab0ae1f4fe2c389eff1cd5c0b4a14d0742e6
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd8888fc9d1ae8d4db6810cdff83bfd3f1d60b67f0b6a4ebc53db99906b4ba9c
3
  size 179406214
attention_kindselective_n_heads4_seed1341/optimizer_09999.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c8b4f4ddfbbadd0ad5e5c183a2d4f46d64d7d0053e64ddbc412aac4e560214c4
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:26c6af83136a26760446afc9ba36e45eaa02cd5ef16fbb40c5a070b2857403be
3
  size 179406214