andrew-healey commited on
Commit
48fe885
·
verified ·
1 Parent(s): 0d78d22

Upload folder using huggingface_hub

Browse files
attention_kindselective_n_heads4_seed1338/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_3/attention_kindselective_n_heads4_seed1338", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10000, "warmup_steps": 200, "group": "wider_is_better_3", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1338, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 30720, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.00015, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "1.5e-4_30720_4_1338", "n_embd": 256}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_4/attention_kindselective_n_heads4_seed1338", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10000, "warmup_steps": 200, "group": "wider_is_better_4", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1338, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 30720, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.00015, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "1.5e-4_30720_4_1338", "n_embd": 256}
attention_kindselective_n_heads4_seed1338/log2.txt CHANGED
@@ -2,302 +2,302 @@ max_steps: 10000
2
  0 val loss 11.2663
3
  0 val perplexity 78147.0000
4
  0 train 11.263341 (lr=7.5000e-07) (hash(x)=150327452)
5
- 100 val loss 8.6550
6
- 100 val perplexity 5739.0356
7
- 100 train 8.665512 (lr=7.5750e-05) (hash(x)=166441190)
8
- 200 val loss 7.5622
9
- 200 val perplexity 1924.0106
10
- 200 train 7.762118 (lr=1.5000e-04) (hash(x)=166780046)
11
- 300 val loss 7.4673
12
- 300 val perplexity 1749.8013
13
- 300 train 7.422103 (lr=1.4997e-04) (hash(x)=159835303)
14
- 400 val loss 7.4181
15
- 400 val perplexity 1665.9224
16
- 400 train 7.179411 (lr=1.4986e-04) (hash(x)=155040610)
17
- 500 val loss 7.3887
18
- 500 val perplexity 1617.5394
19
- 500 train 7.086969 (lr=1.4969e-04) (hash(x)=130190460)
20
- 600 val loss 7.3440
21
- 600 val perplexity 1546.8597
22
- 600 train 7.368630 (lr=1.4945e-04) (hash(x)=155504036)
23
- 700 val loss 7.3203
24
- 700 val perplexity 1510.6406
25
- 700 train 7.165936 (lr=1.4913e-04) (hash(x)=137347213)
26
- 800 val loss 7.2788
27
- 800 val perplexity 1449.2776
28
- 800 train 7.137177 (lr=1.4876e-04) (hash(x)=143823248)
29
- 900 val loss 7.2363
30
- 900 val perplexity 1388.9182
31
- 900 train 7.323705 (lr=1.4831e-04) (hash(x)=156260416)
32
- 1000 val loss 7.2580
33
- 1000 val perplexity 1419.4784
34
- 1000 train 7.215821 (lr=1.4779e-04) (hash(x)=143734685)
35
- 1100 val loss 7.3114
36
- 1100 val perplexity 1497.2897
37
- 1100 train 7.257675 (lr=1.4721e-04) (hash(x)=160013925)
38
- 1200 val loss 7.3346
39
- 1200 val perplexity 1532.4198
40
- 1200 train 7.167082 (lr=1.4656e-04) (hash(x)=150678249)
41
- 1300 val loss 7.2587
42
- 1300 val perplexity 1420.4479
43
- 1300 train 7.033679 (lr=1.4585e-04) (hash(x)=149073315)
44
- 1400 val loss 7.2140
45
- 1400 val perplexity 1358.2969
46
- 1400 train 7.497233 (lr=1.4507e-04) (hash(x)=175802021)
47
- 1500 val loss 7.1960
48
- 1500 val perplexity 1334.0533
49
- 1500 train 7.436237 (lr=1.4422e-04) (hash(x)=171034639)
50
- 1600 val loss 7.1795
51
- 1600 val perplexity 1312.1996
52
- 1600 train 7.375320 (lr=1.4332e-04) (hash(x)=158681215)
53
- 1700 val loss 7.1637
54
- 1700 val perplexity 1291.6211
55
- 1700 train 7.105666 (lr=1.4235e-04) (hash(x)=152116061)
56
- 1800 val loss 7.1537
57
- 1800 val perplexity 1278.7939
58
- 1800 train 7.099223 (lr=1.4131e-04) (hash(x)=146108145)
59
- 1900 val loss 7.1225
60
- 1900 val perplexity 1239.5602
61
- 1900 train 6.953999 (lr=1.4022e-04) (hash(x)=147598108)
62
- 2000 val loss 7.1305
63
- 2000 val perplexity 1249.5562
64
- 2000 train 6.993223 (lr=1.3907e-04) (hash(x)=154996086)
65
- 2100 val loss 7.1265
66
- 2100 val perplexity 1244.4935
67
- 2100 train 6.919074 (lr=1.3786e-04) (hash(x)=153396183)
68
- 2200 val loss 7.0872
69
- 2200 val perplexity 1196.5553
70
- 2200 train 7.082876 (lr=1.3660e-04) (hash(x)=153885445)
71
- 2300 val loss 7.0497
72
- 2300 val perplexity 1152.4731
73
- 2300 train 7.037508 (lr=1.3527e-04) (hash(x)=159666385)
74
- 2400 val loss 7.0609
75
- 2400 val perplexity 1165.5144
76
- 2400 train 7.040409 (lr=1.3390e-04) (hash(x)=142353087)
77
- 2500 val loss 7.0770
78
- 2500 val perplexity 1184.3530
79
- 2500 train 7.086487 (lr=1.3247e-04) (hash(x)=146491718)
80
- 2600 val loss 7.0654
81
- 2600 val perplexity 1170.7280
82
- 2600 train 7.002256 (lr=1.3099e-04) (hash(x)=150750353)
83
- 2700 val loss 7.0348
84
- 2700 val perplexity 1135.4803
85
- 2700 train 6.780224 (lr=1.2946e-04) (hash(x)=129849193)
86
- 2800 val loss 7.0484
87
- 2800 val perplexity 1151.0122
88
- 2800 train 6.927810 (lr=1.2788e-04) (hash(x)=152767913)
89
- 2900 val loss 7.0414
90
- 2900 val perplexity 1143.0150
91
- 2900 train 6.818239 (lr=1.2626e-04) (hash(x)=146531140)
92
- 3000 val loss 7.0160
93
- 3000 val perplexity 1114.3568
94
- 3000 train 7.076974 (lr=1.2459e-04) (hash(x)=151562048)
95
- 3100 val loss 6.9971
96
- 3100 val perplexity 1093.4324
97
- 3100 train 6.987829 (lr=1.2287e-04) (hash(x)=146001424)
98
- 3200 val loss 6.9538
99
- 3200 val perplexity 1047.0964
100
- 3200 train 7.006474 (lr=1.2112e-04) (hash(x)=166486165)
101
- 3300 val loss 6.9583
102
- 3300 val perplexity 1051.8140
103
- 3300 train 6.897943 (lr=1.1932e-04) (hash(x)=150866680)
104
- 3400 val loss 6.9559
105
- 3400 val perplexity 1049.2837
106
- 3400 train 6.872110 (lr=1.1749e-04) (hash(x)=143900419)
107
- 3500 val loss 6.9829
108
- 3500 val perplexity 1078.0233
109
- 3500 train 6.830639 (lr=1.1562e-04) (hash(x)=148845794)
110
- 3600 val loss 6.9487
111
- 3600 val perplexity 1041.7463
112
- 3600 train 6.816872 (lr=1.1372e-04) (hash(x)=145667796)
113
- 3700 val loss 6.9561
114
- 3700 val perplexity 1049.5679
115
- 3700 train 6.973476 (lr=1.1179e-04) (hash(x)=163563851)
116
- 3800 val loss 6.9661
117
- 3800 val perplexity 1060.0791
118
- 3800 train 6.935971 (lr=1.0982e-04) (hash(x)=147488689)
119
- 3900 val loss 6.9352
120
- 3900 val perplexity 1027.8176
121
- 3900 train 6.920702 (lr=1.0783e-04) (hash(x)=148186608)
122
- 4000 val loss 6.8950
123
- 4000 val perplexity 987.3676
124
- 4000 train 6.763925 (lr=1.0581e-04) (hash(x)=142970187)
125
- 4100 val loss 6.9069
126
- 4100 val perplexity 999.1064
127
- 4100 train 6.910237 (lr=1.0377e-04) (hash(x)=141584883)
128
- 4200 val loss 6.8708
129
- 4200 val perplexity 963.6940
130
- 4200 train 6.701746 (lr=1.0171e-04) (hash(x)=145664585)
131
- 4300 val loss 6.8682
132
- 4300 val perplexity 961.1713
133
- 4300 train 6.726182 (lr=9.9622e-05) (hash(x)=143736499)
134
- 4400 val loss 6.8514
135
- 4400 val perplexity 945.1613
136
- 4400 train 6.674716 (lr=9.7520e-05) (hash(x)=151883322)
137
- 4500 val loss 6.8442
138
- 4500 val perplexity 938.4389
139
- 4500 train 6.743720 (lr=9.5403e-05) (hash(x)=153904871)
140
- 4600 val loss 6.8544
141
- 4600 val perplexity 947.9967
142
- 4600 train 6.910264 (lr=9.3273e-05) (hash(x)=154893521)
143
- 4700 val loss 6.8130
144
- 4700 val perplexity 909.5671
145
- 4700 train 6.868094 (lr=9.1132e-05) (hash(x)=152323949)
146
- 4800 val loss 6.7850
147
- 4800 val perplexity 884.5167
148
- 4800 train 6.726544 (lr=8.8982e-05) (hash(x)=154104619)
149
- 4900 val loss 6.7750
150
- 4900 val perplexity 875.6627
151
- 4900 train 6.839969 (lr=8.6825e-05) (hash(x)=146311426)
152
- 5000 val loss 6.7802
153
- 5000 val perplexity 880.2397
154
- 5000 train 6.811939 (lr=8.4663e-05) (hash(x)=156741847)
155
- 5100 val loss 6.8186
156
- 5100 val perplexity 914.6631
157
- 5100 train 6.625038 (lr=8.2500e-05) (hash(x)=142086346)
158
- 5200 val loss 6.8095
159
- 5200 val perplexity 906.4241
160
- 5200 train 6.609668 (lr=8.0337e-05) (hash(x)=150265428)
161
- 5300 val loss 6.8070
162
- 5300 val perplexity 904.1855
163
- 5300 train 6.721405 (lr=7.8175e-05) (hash(x)=151339108)
164
- 5400 val loss 6.7747
165
- 5400 val perplexity 875.4039
166
- 5400 train 6.778082 (lr=7.6018e-05) (hash(x)=154654372)
167
- 5500 val loss 6.7680
168
- 5500 val perplexity 869.5363
169
- 5500 train 6.757663 (lr=7.3868e-05) (hash(x)=150575051)
170
- 5600 val loss 6.7493
171
- 5600 val perplexity 853.4575
172
- 5600 train 6.652130 (lr=7.1727e-05) (hash(x)=140396423)
173
- 5700 val loss 6.7432
174
- 5700 val perplexity 848.2428
175
- 5700 train 6.634809 (lr=6.9597e-05) (hash(x)=144678758)
176
- 5800 val loss 6.7379
177
- 5800 val perplexity 843.7819
178
- 5800 train 6.839241 (lr=6.7480e-05) (hash(x)=151992743)
179
- 5900 val loss 6.7451
180
- 5900 val perplexity 849.8788
181
- 5900 train 6.541656 (lr=6.5378e-05) (hash(x)=144396927)
182
- 6000 val loss 6.7338
183
- 6000 val perplexity 840.3497
184
- 6000 train 6.688858 (lr=6.3294e-05) (hash(x)=165478625)
185
- 6100 val loss 6.7536
186
- 6100 val perplexity 857.1795
187
- 6100 train 6.538942 (lr=6.1230e-05) (hash(x)=147088621)
188
- 6200 val loss 6.7280
189
- 6200 val perplexity 835.4413
190
- 6200 train 6.533285 (lr=5.9188e-05) (hash(x)=140794994)
191
- 6300 val loss 6.7068
192
- 6300 val perplexity 817.9677
193
- 6300 train 6.634907 (lr=5.7169e-05) (hash(x)=134780906)
194
- 6400 val loss 6.6841
195
- 6400 val perplexity 799.6194
196
- 6400 train 6.686557 (lr=5.5177e-05) (hash(x)=149023655)
197
- 6500 val loss 6.6806
198
- 6500 val perplexity 796.8002
199
- 6500 train 6.581760 (lr=5.3213e-05) (hash(x)=147497796)
200
- 6600 val loss 6.6870
201
- 6600 val perplexity 801.9154
202
- 6600 train 6.556812 (lr=5.1279e-05) (hash(x)=152902689)
203
- 6700 val loss 6.6708
204
- 6700 val perplexity 789.0045
205
- 6700 train 6.713040 (lr=4.9377e-05) (hash(x)=153846046)
206
- 6800 val loss 6.6602
207
- 6800 val perplexity 780.6896
208
- 6800 train 6.773557 (lr=4.7509e-05) (hash(x)=158512738)
209
- 6900 val loss 6.6533
210
- 6900 val perplexity 775.3393
211
- 6900 train 7.021071 (lr=4.5676e-05) (hash(x)=156849968)
212
- 7000 val loss 6.6413
213
- 7000 val perplexity 766.1014
214
- 7000 train 6.475348 (lr=4.3882e-05) (hash(x)=142395855)
215
- 7100 val loss 6.6401
216
- 7100 val perplexity 765.1912
217
- 7100 train 6.519809 (lr=4.2128e-05) (hash(x)=147114884)
218
- 7200 val loss 6.6253
219
- 7200 val perplexity 753.9130
220
- 7200 train 6.653028 (lr=4.0414e-05) (hash(x)=156979839)
221
- 7300 val loss 6.6312
222
- 7300 val perplexity 758.3539
223
- 7300 train 6.406144 (lr=3.8745e-05) (hash(x)=145584373)
224
- 7400 val loss 6.6381
225
- 7400 val perplexity 763.6647
226
- 7400 train 6.395841 (lr=3.7120e-05) (hash(x)=141508204)
227
- 7500 val loss 6.6150
228
- 7500 val perplexity 746.1946
229
- 7500 train 6.663067 (lr=3.5541e-05) (hash(x)=148803965)
230
- 7600 val loss 6.6029
231
- 7600 val perplexity 737.2593
232
- 7600 train 6.661316 (lr=3.4011e-05) (hash(x)=151019676)
233
- 7700 val loss 6.6007
234
- 7700 val perplexity 735.5807
235
- 7700 train 6.594604 (lr=3.2531e-05) (hash(x)=143155750)
236
- 7800 val loss 6.5994
237
- 7800 val perplexity 734.6207
238
- 7800 train 6.602177 (lr=3.1102e-05) (hash(x)=152569653)
239
- 7900 val loss 6.5959
240
- 7900 val perplexity 732.0568
241
- 7900 train 6.470634 (lr=2.9726e-05) (hash(x)=143519455)
242
- 8000 val loss 6.5947
243
- 8000 val perplexity 731.1951
244
- 8000 train 6.702043 (lr=2.8405e-05) (hash(x)=161180944)
245
- 8100 val loss 6.5933
246
- 8100 val perplexity 730.1704
247
- 8100 train 6.609785 (lr=2.7138e-05) (hash(x)=154107345)
248
- 8200 val loss 6.5931
249
- 8200 val perplexity 730.0322
250
- 8200 train 6.603320 (lr=2.5929e-05) (hash(x)=152486517)
251
- 8300 val loss 6.5839
252
- 8300 val perplexity 723.3425
253
- 8300 train 6.612710 (lr=2.4778e-05) (hash(x)=156167749)
254
- 8400 val loss 6.5769
255
- 8400 val perplexity 718.2834
256
- 8400 train 6.650872 (lr=2.3686e-05) (hash(x)=149155006)
257
- 8500 val loss 6.5718
258
- 8500 val perplexity 714.6675
259
- 8500 train 6.545753 (lr=2.2655e-05) (hash(x)=147844390)
260
- 8600 val loss 6.5630
261
- 8600 val perplexity 708.4192
262
- 8600 train 6.785943 (lr=2.1685e-05) (hash(x)=165753320)
263
- 8700 val loss 6.5601
264
- 8700 val perplexity 706.3683
265
- 8700 train 6.360015 (lr=2.0777e-05) (hash(x)=146079979)
266
- 8800 val loss 6.5567
267
- 8800 val perplexity 703.9554
268
- 8800 train 6.748188 (lr=1.9933e-05) (hash(x)=172259509)
269
- 8900 val loss 6.5590
270
- 8900 val perplexity 705.5823
271
- 8900 train 6.276247 (lr=1.9153e-05) (hash(x)=145148314)
272
- 9000 val loss 6.5564
273
- 9000 val perplexity 703.7161
274
- 9000 train 6.482366 (lr=1.8439e-05) (hash(x)=144250633)
275
- 9100 val loss 6.5477
276
- 9100 val perplexity 697.6514
277
- 9100 train 6.653626 (lr=1.7790e-05) (hash(x)=157219797)
278
- 9200 val loss 6.5406
279
- 9200 val perplexity 692.6686
280
- 9200 train 6.510061 (lr=1.7208e-05) (hash(x)=142743778)
281
- 9300 val loss 6.5375
282
- 9300 val perplexity 690.5378
283
- 9300 train 6.438917 (lr=1.6692e-05) (hash(x)=139669771)
284
- 9400 val loss 6.5340
285
- 9400 val perplexity 688.1193
286
- 9400 train 6.500862 (lr=1.6245e-05) (hash(x)=145916843)
287
- 9500 val loss 6.5310
288
- 9500 val perplexity 686.0958
289
- 9500 train 6.452840 (lr=1.5865e-05) (hash(x)=150196125)
290
- 9600 val loss 6.5286
291
- 9600 val perplexity 684.4724
292
- 9600 train 6.808760 (lr=1.5554e-05) (hash(x)=160041419)
293
- 9700 val loss 6.5288
294
- 9700 val perplexity 684.5671
295
- 9700 train 6.348433 (lr=1.5312e-05) (hash(x)=139931627)
296
- 9800 val loss 6.5260
297
- 9800 val perplexity 682.6550
298
- 9800 train 6.475728 (lr=1.5139e-05) (hash(x)=150370792)
299
- 9900 val loss 6.5226
300
- 9900 val perplexity 680.3587
301
- 9900 train 6.393685 (lr=1.5035e-05) (hash(x)=153014886)
302
- 9999 val loss 6.5211
303
- 9999 val perplexity 679.3424
 
2
  0 val loss 11.2663
3
  0 val perplexity 78147.0000
4
  0 train 11.263341 (lr=7.5000e-07) (hash(x)=150327452)
5
+ 100 val loss 8.7777
6
+ 100 val perplexity 6488.2373
7
+ 100 train 8.785193 (lr=7.5750e-05) (hash(x)=166441190)
8
+ 200 val loss 7.5621
9
+ 200 val perplexity 1923.9125
10
+ 200 train 7.771656 (lr=1.5000e-04) (hash(x)=166780046)
11
+ 300 val loss 7.4551
12
+ 300 val perplexity 1728.7188
13
+ 300 train 7.411583 (lr=1.4997e-04) (hash(x)=159835303)
14
+ 400 val loss 7.4287
15
+ 400 val perplexity 1683.5997
16
+ 400 train 7.185664 (lr=1.4986e-04) (hash(x)=155040610)
17
+ 500 val loss 7.3802
18
+ 500 val perplexity 1603.9310
19
+ 500 train 7.082130 (lr=1.4969e-04) (hash(x)=130190460)
20
+ 600 val loss 7.2594
21
+ 600 val perplexity 1421.4446
22
+ 600 train 7.258858 (lr=1.4945e-04) (hash(x)=155504036)
23
+ 700 val loss 7.2145
24
+ 700 val perplexity 1359.0205
25
+ 700 train 7.076564 (lr=1.4913e-04) (hash(x)=137347213)
26
+ 800 val loss 7.1688
27
+ 800 val perplexity 1298.3121
28
+ 800 train 7.029332 (lr=1.4876e-04) (hash(x)=143823248)
29
+ 900 val loss 7.1819
30
+ 900 val perplexity 1315.4233
31
+ 900 train 7.241729 (lr=1.4831e-04) (hash(x)=156260416)
32
+ 1000 val loss 7.1672
33
+ 1000 val perplexity 1296.2437
34
+ 1000 train 7.118083 (lr=1.4779e-04) (hash(x)=143734685)
35
+ 1100 val loss 7.2244
36
+ 1100 val perplexity 1372.5431
37
+ 1100 train 7.164720 (lr=1.4721e-04) (hash(x)=160013925)
38
+ 1200 val loss 7.2210
39
+ 1200 val perplexity 1367.8552
40
+ 1200 train 6.965759 (lr=1.4656e-04) (hash(x)=150678249)
41
+ 1300 val loss 7.3149
42
+ 1300 val perplexity 1502.4619
43
+ 1300 train 7.109991 (lr=1.4585e-04) (hash(x)=149073315)
44
+ 1400 val loss 7.1382
45
+ 1400 val perplexity 1259.1836
46
+ 1400 train 7.414626 (lr=1.4507e-04) (hash(x)=175802021)
47
+ 1500 val loss 7.1499
48
+ 1500 val perplexity 1273.9269
49
+ 1500 train 7.380340 (lr=1.4422e-04) (hash(x)=171034639)
50
+ 1600 val loss 7.1045
51
+ 1600 val perplexity 1217.4829
52
+ 1600 train 7.302282 (lr=1.4332e-04) (hash(x)=158681215)
53
+ 1700 val loss 6.9913
54
+ 1700 val perplexity 1087.1879
55
+ 1700 train 6.945822 (lr=1.4235e-04) (hash(x)=152116061)
56
+ 1800 val loss 6.9835
57
+ 1800 val perplexity 1078.7386
58
+ 1800 train 6.930097 (lr=1.4131e-04) (hash(x)=146108145)
59
+ 1900 val loss 6.9365
60
+ 1900 val perplexity 1029.1526
61
+ 1900 train 6.772396 (lr=1.4022e-04) (hash(x)=147598108)
62
+ 2000 val loss 6.9423
63
+ 2000 val perplexity 1035.1649
64
+ 2000 train 6.790772 (lr=1.3907e-04) (hash(x)=154996086)
65
+ 2100 val loss 6.9431
66
+ 2100 val perplexity 1035.9935
67
+ 2100 train 6.721818 (lr=1.3786e-04) (hash(x)=153396183)
68
+ 2200 val loss 6.8870
69
+ 2200 val perplexity 979.4998
70
+ 2200 train 6.875124 (lr=1.3660e-04) (hash(x)=153885445)
71
+ 2300 val loss 6.8602
72
+ 2300 val perplexity 953.6027
73
+ 2300 train 6.850685 (lr=1.3527e-04) (hash(x)=159666385)
74
+ 2400 val loss 6.8501
75
+ 2400 val perplexity 943.9668
76
+ 2400 train 6.843330 (lr=1.3390e-04) (hash(x)=142353087)
77
+ 2500 val loss 6.8303
78
+ 2500 val perplexity 925.4475
79
+ 2500 train 6.832170 (lr=1.3247e-04) (hash(x)=146491718)
80
+ 2600 val loss 6.8423
81
+ 2600 val perplexity 936.5989
82
+ 2600 train 6.770065 (lr=1.3099e-04) (hash(x)=150750353)
83
+ 2700 val loss 6.8501
84
+ 2700 val perplexity 943.9425
85
+ 2700 train 6.593210 (lr=1.2946e-04) (hash(x)=129849193)
86
+ 2800 val loss 6.8524
87
+ 2800 val perplexity 946.1226
88
+ 2800 train 6.707880 (lr=1.2788e-04) (hash(x)=152767913)
89
+ 2900 val loss 6.8688
90
+ 2900 val perplexity 961.8150
91
+ 2900 train 6.658890 (lr=1.2626e-04) (hash(x)=146531140)
92
+ 3000 val loss 6.8300
93
+ 3000 val perplexity 925.2194
94
+ 3000 train 6.891155 (lr=1.2459e-04) (hash(x)=151562048)
95
+ 3100 val loss 6.7475
96
+ 3100 val perplexity 851.8889
97
+ 3100 train 6.743584 (lr=1.2287e-04) (hash(x)=146001424)
98
+ 3200 val loss 6.7215
99
+ 3200 val perplexity 830.0739
100
+ 3200 train 6.776397 (lr=1.2112e-04) (hash(x)=166486165)
101
+ 3300 val loss 6.7267
102
+ 3300 val perplexity 834.3564
103
+ 3300 train 6.665500 (lr=1.1932e-04) (hash(x)=150866680)
104
+ 3400 val loss 6.7039
105
+ 3400 val perplexity 815.5745
106
+ 3400 train 6.621673 (lr=1.1749e-04) (hash(x)=143900419)
107
+ 3500 val loss 6.6849
108
+ 3500 val perplexity 800.2004
109
+ 3500 train 6.524900 (lr=1.1562e-04) (hash(x)=148845794)
110
+ 3600 val loss 6.6943
111
+ 3600 val perplexity 807.8208
112
+ 3600 train 6.528556 (lr=1.1372e-04) (hash(x)=145667796)
113
+ 3700 val loss 6.7007
114
+ 3700 val perplexity 813.0059
115
+ 3700 train 6.712806 (lr=1.1179e-04) (hash(x)=163563851)
116
+ 3800 val loss 6.6819
117
+ 3800 val perplexity 797.8279
118
+ 3800 train 6.630010 (lr=1.0982e-04) (hash(x)=147488689)
119
+ 3900 val loss 6.6543
120
+ 3900 val perplexity 776.1143
121
+ 3900 train 6.648211 (lr=1.0783e-04) (hash(x)=148186608)
122
+ 4000 val loss 6.6528
123
+ 4000 val perplexity 774.9475
124
+ 4000 train 6.522067 (lr=1.0581e-04) (hash(x)=142970187)
125
+ 4100 val loss 6.6204
126
+ 4100 val perplexity 750.2690
127
+ 4100 train 6.626215 (lr=1.0377e-04) (hash(x)=141584883)
128
+ 4200 val loss 6.6039
129
+ 4200 val perplexity 737.9395
130
+ 4200 train 6.426653 (lr=1.0171e-04) (hash(x)=145664585)
131
+ 4300 val loss 6.6064
132
+ 4300 val perplexity 739.8495
133
+ 4300 train 6.458395 (lr=9.9622e-05) (hash(x)=143736499)
134
+ 4400 val loss 6.6145
135
+ 4400 val perplexity 745.8353
136
+ 4400 train 6.438884 (lr=9.7520e-05) (hash(x)=151883322)
137
+ 4500 val loss 6.6172
138
+ 4500 val perplexity 747.8121
139
+ 4500 train 6.509636 (lr=9.5403e-05) (hash(x)=153904871)
140
+ 4600 val loss 6.5999
141
+ 4600 val perplexity 735.0285
142
+ 4600 train 6.651087 (lr=9.3273e-05) (hash(x)=154893521)
143
+ 4700 val loss 6.5870
144
+ 4700 val perplexity 725.5793
145
+ 4700 train 6.638263 (lr=9.1132e-05) (hash(x)=152323949)
146
+ 4800 val loss 6.5637
147
+ 4800 val perplexity 708.8798
148
+ 4800 train 6.509049 (lr=8.8982e-05) (hash(x)=154104619)
149
+ 4900 val loss 6.5480
150
+ 4900 val perplexity 697.8463
151
+ 4900 train 6.612286 (lr=8.6825e-05) (hash(x)=146311426)
152
+ 5000 val loss 6.5250
153
+ 5000 val perplexity 682.0062
154
+ 5000 train 6.535800 (lr=8.4663e-05) (hash(x)=156741847)
155
+ 5100 val loss 6.5168
156
+ 5100 val perplexity 676.3936
157
+ 5100 train 6.322477 (lr=8.2500e-05) (hash(x)=142086346)
158
+ 5200 val loss 6.5240
159
+ 5200 val perplexity 681.3028
160
+ 5200 train 6.301110 (lr=8.0337e-05) (hash(x)=150265428)
161
+ 5300 val loss 6.5186
162
+ 5300 val perplexity 677.6261
163
+ 5300 train 6.414671 (lr=7.8175e-05) (hash(x)=151339108)
164
+ 5400 val loss 6.5056
165
+ 5400 val perplexity 668.8740
166
+ 5400 train 6.499859 (lr=7.6018e-05) (hash(x)=154654372)
167
+ 5500 val loss 6.4750
168
+ 5500 val perplexity 648.7399
169
+ 5500 train 6.470890 (lr=7.3868e-05) (hash(x)=150575051)
170
+ 5600 val loss 6.4751
171
+ 5600 val perplexity 648.7730
172
+ 5600 train 6.368444 (lr=7.1727e-05) (hash(x)=140396423)
173
+ 5700 val loss 6.4550
174
+ 5700 val perplexity 635.8600
175
+ 5700 train 6.341667 (lr=6.9597e-05) (hash(x)=144678758)
176
+ 5800 val loss 6.4481
177
+ 5800 val perplexity 631.4761
178
+ 5800 train 6.545341 (lr=6.7480e-05) (hash(x)=151992743)
179
+ 5900 val loss 6.4590
180
+ 5900 val perplexity 638.4401
181
+ 5900 train 6.260499 (lr=6.5378e-05) (hash(x)=144396927)
182
+ 6000 val loss 6.4424
183
+ 6000 val perplexity 627.9254
184
+ 6000 train 6.415303 (lr=6.3294e-05) (hash(x)=165478625)
185
+ 6100 val loss 6.4487
186
+ 6100 val perplexity 631.9029
187
+ 6100 train 6.227667 (lr=6.1230e-05) (hash(x)=147088621)
188
+ 6200 val loss 6.4394
189
+ 6200 val perplexity 626.0315
190
+ 6200 train 6.227491 (lr=5.9188e-05) (hash(x)=140794994)
191
+ 6300 val loss 6.4250
192
+ 6300 val perplexity 617.0739
193
+ 6300 train 6.354124 (lr=5.7169e-05) (hash(x)=134780906)
194
+ 6400 val loss 6.4077
195
+ 6400 val perplexity 606.5021
196
+ 6400 train 6.402531 (lr=5.5177e-05) (hash(x)=149023655)
197
+ 6500 val loss 6.4014
198
+ 6500 val perplexity 602.7130
199
+ 6500 train 6.293524 (lr=5.3213e-05) (hash(x)=147497796)
200
+ 6600 val loss 6.3992
201
+ 6600 val perplexity 601.3729
202
+ 6600 train 6.260563 (lr=5.1279e-05) (hash(x)=152902689)
203
+ 6700 val loss 6.4055
204
+ 6700 val perplexity 605.1715
205
+ 6700 train 6.442645 (lr=4.9377e-05) (hash(x)=153846046)
206
+ 6800 val loss 6.3892
207
+ 6800 val perplexity 595.3674
208
+ 6800 train 6.513471 (lr=4.7509e-05) (hash(x)=158512738)
209
+ 6900 val loss 6.3770
210
+ 6900 val perplexity 588.1899
211
+ 6900 train 6.826787 (lr=4.5676e-05) (hash(x)=156849968)
212
+ 7000 val loss 6.3729
213
+ 7000 val perplexity 585.7264
214
+ 7000 train 6.216920 (lr=4.3882e-05) (hash(x)=142395855)
215
+ 7100 val loss 6.3687
216
+ 7100 val perplexity 583.2773
217
+ 7100 train 6.247492 (lr=4.2128e-05) (hash(x)=147114884)
218
+ 7200 val loss 6.3600
219
+ 7200 val perplexity 578.2189
220
+ 7200 train 6.391733 (lr=4.0414e-05) (hash(x)=156979839)
221
+ 7300 val loss 6.3694
222
+ 7300 val perplexity 583.7050
223
+ 7300 train 6.152255 (lr=3.8745e-05) (hash(x)=145584373)
224
+ 7400 val loss 6.3727
225
+ 7400 val perplexity 585.6144
226
+ 7400 train 6.115252 (lr=3.7120e-05) (hash(x)=141508204)
227
+ 7500 val loss 6.3690
228
+ 7500 val perplexity 583.4774
229
+ 7500 train 6.418751 (lr=3.5541e-05) (hash(x)=148803965)
230
+ 7600 val loss 6.3510
231
+ 7600 val perplexity 573.0728
232
+ 7600 train 6.423841 (lr=3.4011e-05) (hash(x)=151019676)
233
+ 7700 val loss 6.3513
234
+ 7700 val perplexity 573.2105
235
+ 7700 train 6.371610 (lr=3.2531e-05) (hash(x)=143155750)
236
+ 7800 val loss 6.3405
237
+ 7800 val perplexity 567.0922
238
+ 7800 train 6.346223 (lr=3.1102e-05) (hash(x)=152569653)
239
+ 7900 val loss 6.3456
240
+ 7900 val perplexity 570.0016
241
+ 7900 train 6.216721 (lr=2.9726e-05) (hash(x)=143519455)
242
+ 8000 val loss 6.3375
243
+ 8000 val perplexity 565.3734
244
+ 8000 train 6.459105 (lr=2.8405e-05) (hash(x)=161180944)
245
+ 8100 val loss 6.3372
246
+ 8100 val perplexity 565.2216
247
+ 8100 train 6.351504 (lr=2.7138e-05) (hash(x)=154107345)
248
+ 8200 val loss 6.3397
249
+ 8200 val perplexity 566.6518
250
+ 8200 train 6.360279 (lr=2.5929e-05) (hash(x)=152486517)
251
+ 8300 val loss 6.3294
252
+ 8300 val perplexity 560.8088
253
+ 8300 train 6.357333 (lr=2.4778e-05) (hash(x)=156167749)
254
+ 8400 val loss 6.3236
255
+ 8400 val perplexity 557.5955
256
+ 8400 train 6.391531 (lr=2.3686e-05) (hash(x)=149155006)
257
+ 8500 val loss 6.3202
258
+ 8500 val perplexity 555.6780
259
+ 8500 train 6.275575 (lr=2.2655e-05) (hash(x)=147844390)
260
+ 8600 val loss 6.3152
261
+ 8600 val perplexity 552.9109
262
+ 8600 train 6.559919 (lr=2.1685e-05) (hash(x)=165753320)
263
+ 8700 val loss 6.3136
264
+ 8700 val perplexity 552.0432
265
+ 8700 train 6.108851 (lr=2.0777e-05) (hash(x)=146079979)
266
+ 8800 val loss 6.3150
267
+ 8800 val perplexity 552.8068
268
+ 8800 train 6.508952 (lr=1.9933e-05) (hash(x)=172259509)
269
+ 8900 val loss 6.3184
270
+ 8900 val perplexity 554.6667
271
+ 8900 train 6.030042 (lr=1.9153e-05) (hash(x)=145148314)
272
+ 9000 val loss 6.3184
273
+ 9000 val perplexity 554.6789
274
+ 9000 train 6.243028 (lr=1.8439e-05) (hash(x)=144250633)
275
+ 9100 val loss 6.3110
276
+ 9100 val perplexity 550.6165
277
+ 9100 train 6.421357 (lr=1.7790e-05) (hash(x)=157219797)
278
+ 9200 val loss 6.2984
279
+ 9200 val perplexity 543.7205
280
+ 9200 train 6.287928 (lr=1.7208e-05) (hash(x)=142743778)
281
+ 9300 val loss 6.2968
282
+ 9300 val perplexity 542.8534
283
+ 9300 train 6.204850 (lr=1.6692e-05) (hash(x)=139669771)
284
+ 9400 val loss 6.2941
285
+ 9400 val perplexity 541.3662
286
+ 9400 train 6.284011 (lr=1.6245e-05) (hash(x)=145916843)
287
+ 9500 val loss 6.2922
288
+ 9500 val perplexity 540.3427
289
+ 9500 train 6.226115 (lr=1.5865e-05) (hash(x)=150196125)
290
+ 9600 val loss 6.2924
291
+ 9600 val perplexity 540.4338
292
+ 9600 train 6.589438 (lr=1.5554e-05) (hash(x)=160041419)
293
+ 9700 val loss 6.2922
294
+ 9700 val perplexity 540.3143
295
+ 9700 train 6.096705 (lr=1.5312e-05) (hash(x)=139931627)
296
+ 9800 val loss 6.2895
297
+ 9800 val perplexity 538.9012
298
+ 9800 train 6.239325 (lr=1.5139e-05) (hash(x)=150370792)
299
+ 9900 val loss 6.2896
300
+ 9900 val perplexity 538.9210
301
+ 9900 train 6.155778 (lr=1.5035e-05) (hash(x)=153014886)
302
+ 9999 val loss 6.2886
303
+ 9999 val perplexity 538.4057
attention_kindselective_n_heads4_seed1338/model_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d791b9df5a73ce7373c8c3633d7ef61a92efffb082c40753771a5af9686616b6
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d95e1fb93ae7eb339b0abe0762d19c1ada616bfdb4620397f8ab04ccf7f18393
3
  size 92843394
attention_kindselective_n_heads4_seed1338/model_05000.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ec252cc9736a9564ca93925735d67637349118511176e491ebd99ebc5ab280e2
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a6a87e17518a8ee6ac9c9e67f765ee942a503fcfcafd73a884926483d93e544a
3
  size 92843394
attention_kindselective_n_heads4_seed1338/model_07500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e2c3687223cc276736023709fb477bf883ed750f52a3733196fbe90fb0c3d493
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe708e276b43c181ed8d4484ae990f2df21dc02afeedda5ba49462fbfbc68fcf
3
  size 92843394
attention_kindselective_n_heads4_seed1338/model_09999.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d22cc128cf4ed809ae94894629d9b6cb11500983fdc2887db59483fc59d84406
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e9f0b09e98b5430893e91ea690dfb350667d0a5c5947ea2696e639d62776ecc9
3
  size 92843394
attention_kindselective_n_heads4_seed1338/optimizer_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4957bc0a1124b62de2ad847f9bab34b5be4f4bf18c8d7e6e908770af55cbbff8
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9009f01dc7ff0e740d4cb3f16c47753a63d0b67ba1638f0d6d86fe8958087103
3
  size 179406214
attention_kindselective_n_heads4_seed1338/optimizer_05000.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b5ec9251ee7644d9c14036dded38a955e9e61897405fc03587dff8d4614fa33b
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:993dd71322a0ba12777ebdfaea3bd660675b164837b455e94ea08d218041e7fe
3
  size 179406214
attention_kindselective_n_heads4_seed1338/optimizer_07500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:71e112b5d7a75337aac523f9377464968f888fd2fb95b1c001e11144064dad91
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6baabd14ddfef800a950a6f88b02d5fca5ca757bc537ce6d93bba62b4d80eda
3
  size 179406214
attention_kindselective_n_heads4_seed1338/optimizer_09999.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d7af355bdcbdda09d35c074c218779f17c5de3a922e738ba2fb1ee7c2802edb5
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:267f47f0cd7666df01048f45ff2935cc98edb5bc13e50682d842989caa528a21
3
  size 179406214