andrew-healey commited on
Commit
ecf19af
·
verified ·
1 Parent(s): 58697ab

Upload folder using huggingface_hub

Browse files
attention_kindselective_n_heads4_seed1338/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_6/attention_kindselective_n_heads4_seed1338", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10000, "warmup_steps": 200, "group": "wider_is_better_6", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1338, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 40, "total_batch_size": 10240, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.00015, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "15e-5_10240_4_1338", "n_embd": 256}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_7/attention_kindselective_n_heads4_seed1338", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 50000, "warmup_steps": 200, "group": "wider_is_better_7", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1338, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 40, "total_batch_size": 10240, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 7e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "7e-5_10240_4_1338", "n_embd": 256}
attention_kindselective_n_heads4_seed1338/dataloader_49999.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:47776cddb8021172f048a950b83f25b692cb340214b800ce3837c15ceb58907c
3
+ size 964
attention_kindselective_n_heads4_seed1338/log2.txt CHANGED
@@ -1,606 +1,84 @@
1
- max_steps: 10000
2
- max_steps: 10000
 
 
 
 
 
3
  0 val loss 11.2672
4
  0 val perplexity 78216.1172
5
- 0 val loss 11.2672
6
- 0 val perplexity 78216.1172
7
- 0 train 11.243532 (lr=5.0000e-07) (hash(x)=50671684)
8
- 0 train 11.243532 (lr=7.5000e-07) (hash(x)=50671684)
9
- 100 val loss 9.4644
10
- 100 val perplexity 12893.0068
11
- 100 train 9.423753 (lr=5.0500e-05) (hash(x)=52740221)
12
- 100 val loss 8.9130
13
- 100 val perplexity 7428.2593
14
- 100 train 8.871554 (lr=7.5750e-05) (hash(x)=52740221)
15
- 200 val loss 8.0121
16
- 200 val perplexity 3017.3508
17
- 200 train 8.020210 (lr=1.0000e-04) (hash(x)=49034180)
18
- 200 val loss 7.6449
19
- 200 val perplexity 2089.9563
20
- 200 train 7.647759 (lr=1.5000e-04) (hash(x)=49034180)
21
- 300 val loss 7.6529
22
- 300 val perplexity 2106.7397
23
- 300 train 7.897432 (lr=9.9977e-05) (hash(x)=63180688)
24
- 300 val loss 7.5541
25
- 300 val perplexity 1908.5491
26
- 300 train 7.872751 (lr=1.4997e-04) (hash(x)=63180688)
27
- 400 val loss 7.5721
28
- 400 val perplexity 1943.1393
29
- 400 train 7.605824 (lr=9.9908e-05) (hash(x)=50373500)
30
- 400 val loss 7.4946
31
- 400 val perplexity 1798.3453
32
- 400 train 7.527841 (lr=1.4986e-04) (hash(x)=50373500)
33
- 500 val loss 7.4846
34
- 500 val perplexity 1780.4001
35
- 500 train 7.357911 (lr=9.9792e-05) (hash(x)=44547422)
36
- 500 val loss 7.5244
37
- 500 val perplexity 1852.7662
38
- 500 train 7.396894 (lr=1.4969e-04) (hash(x)=44547422)
39
- 600 val loss 7.4334
40
- 600 val perplexity 1691.5486
41
- 600 train 7.283990 (lr=9.9631e-05) (hash(x)=47184699)
42
- 600 val loss 7.5122
43
- 600 val perplexity 1830.2747
44
- 600 train 7.370455 (lr=1.4945e-04) (hash(x)=47184699)
45
- 700 val loss 7.4044
46
- 700 val perplexity 1643.2063
47
- 700 train 7.288874 (lr=9.9423e-05) (hash(x)=51374582)
48
- 700 val loss 7.4825
49
- 700 val perplexity 1776.7271
50
- 700 train 7.374347 (lr=1.4913e-04) (hash(x)=51374582)
51
- 800 val loss 7.3361
52
- 800 val perplexity 1534.7445
53
- 800 train 7.053239 (lr=9.9170e-05) (hash(x)=46264805)
54
- 800 val loss 7.4709
55
- 800 val perplexity 1756.2185
56
- 800 train 7.196687 (lr=1.4876e-04) (hash(x)=46264805)
57
- 900 val loss 7.3176
58
- 900 val perplexity 1506.6115
59
- 900 train 7.489451 (lr=9.8872e-05) (hash(x)=61178712)
60
- 900 val loss 7.4714
61
- 900 val perplexity 1757.0553
62
- 900 train 7.673396 (lr=1.4831e-04) (hash(x)=61178712)
63
- 1000 val loss 7.3403
64
- 1000 val perplexity 1541.1753
65
- 1000 train 7.262061 (lr=9.8528e-05) (hash(x)=50886520)
66
- 1000 val loss 7.3873
67
- 1000 val perplexity 1615.3920
68
- 1000 train 7.324912 (lr=1.4779e-04) (hash(x)=50886520)
69
- 1100 val loss 7.3250
70
- 1100 val perplexity 1517.7859
71
- 1100 train 7.088538 (lr=9.8140e-05) (hash(x)=48600099)
72
- 1100 val loss 7.3256
73
- 1100 val perplexity 1518.6517
74
- 1200 val loss 7.3049
75
- 1200 val perplexity 1487.5367
76
- 1100 train 7.074596 (lr=1.4721e-04) (hash(x)=48600099)
77
- 1200 train 7.037484 (lr=9.7708e-05) (hash(x)=50146792)
78
- 1300 val loss 7.3053
79
- 1300 val perplexity 1488.1028
80
- 1300 train 7.189948 (lr=9.7231e-05) (hash(x)=52617313)
81
- 1200 val loss 7.3250
82
- 1200 val perplexity 1517.8387
83
- 1200 train 7.013913 (lr=1.4656e-04) (hash(x)=50146792)
84
- 1400 val loss 7.2776
85
- 1400 val perplexity 1447.5337
86
- 1400 train 6.951150 (lr=9.6711e-05) (hash(x)=49794446)
87
- 1300 val loss 7.3213
88
- 1300 val perplexity 1512.1924
89
- 1300 train 7.189167 (lr=1.4585e-04) (hash(x)=52617313)
90
- 1500 val loss 7.2571
91
- 1500 val perplexity 1418.1956
92
- 1500 train 6.886528 (lr=9.6149e-05) (hash(x)=50766317)
93
- 1400 val loss 7.2495
94
- 1400 val perplexity 1407.3571
95
- 1400 train 6.927425 (lr=1.4507e-04) (hash(x)=49794446)
96
- 1600 val loss 7.2494
97
- 1600 val perplexity 1407.2947
98
- 1600 train 7.111308 (lr=9.5544e-05) (hash(x)=55551175)
99
- 1500 val loss 7.2380
100
- 1500 val perplexity 1391.2421
101
- 1500 train 6.864151 (lr=1.4422e-04) (hash(x)=50766317)
102
- 1700 val loss 7.2126
103
- 1700 val perplexity 1356.4755
104
- 1700 train 7.403630 (lr=9.4897e-05) (hash(x)=56717172)
105
- 1600 val loss 7.2205
106
- 1600 val perplexity 1367.1372
107
- 1600 train 7.083136 (lr=1.4332e-04) (hash(x)=55551175)
108
- 1800 val loss 7.1766
109
- 1800 val perplexity 1308.4944
110
- 1800 train 7.384186 (lr=9.4209e-05) (hash(x)=55376447)
111
- 1700 val loss 7.1596
112
- 1700 val perplexity 1286.4058
113
- 1700 train 7.339043 (lr=1.4235e-04) (hash(x)=56717172)
114
- 1900 val loss 7.1801
115
- 1900 val perplexity 1313.0984
116
- 1900 train 6.906479 (lr=9.3481e-05) (hash(x)=43810837)
117
- 1800 val loss 7.1396
118
- 1800 val perplexity 1260.8654
119
- 1800 train 7.333357 (lr=1.4131e-04) (hash(x)=55376447)
120
- 2000 val loss 7.1425
121
- 2000 val perplexity 1264.5587
122
- 2000 train 7.176294 (lr=9.2714e-05) (hash(x)=50881655)
123
- 1900 val loss 7.1254
124
- 1900 val perplexity 1243.1460
125
- 1900 train 6.832570 (lr=1.4022e-04) (hash(x)=43810837)
126
- 2100 val loss 7.2296
127
- 2100 val perplexity 1379.7310
128
- 2100 train 7.120895 (lr=9.1908e-05) (hash(x)=49386015)
129
- 2000 val loss 7.1011
130
- 2000 val perplexity 1213.3536
131
- 2000 train 7.124122 (lr=1.3907e-04) (hash(x)=50881655)
132
- 2200 val loss 7.1386
133
- 2200 val perplexity 1259.6010
134
- 2200 train 7.047742 (lr=9.1064e-05) (hash(x)=48572079)
135
- 2100 val loss 7.0934
136
- 2100 val perplexity 1203.9359
137
- 2100 train 6.971863 (lr=1.3786e-04) (hash(x)=49386015)
138
- 2300 val loss 7.0981
139
- 2300 val perplexity 1209.6259
140
- 2300 train 7.165452 (lr=9.0182e-05) (hash(x)=54950719)
141
- 2200 val loss 7.0903
142
- 2200 val perplexity 1200.2914
143
- 2200 train 7.014431 (lr=1.3660e-04) (hash(x)=48572079)
144
- 2400 val loss 7.0712
145
- 2400 val perplexity 1177.5944
146
- 2400 train 6.723047 (lr=8.9265e-05) (hash(x)=42190240)
147
- 2300 val loss 7.0517
148
- 2300 val perplexity 1154.8149
149
- 2300 train 7.107642 (lr=1.3527e-04) (hash(x)=54950719)
150
- 2500 val loss 7.0506
151
- 2500 val perplexity 1153.5056
152
- 2500 train 7.140409 (lr=8.8313e-05) (hash(x)=45223539)
153
- 2400 val loss 7.0396
154
- 2400 val perplexity 1140.9332
155
- 2400 train 6.697920 (lr=1.3390e-04) (hash(x)=42190240)
156
- 2600 val loss 7.0575
157
- 2600 val perplexity 1161.4955
158
- 2600 train 7.087414 (lr=8.7326e-05) (hash(x)=54037353)
159
- 2500 val loss 7.0079
160
- 2500 val perplexity 1105.2921
161
- 2500 train 7.105545 (lr=1.3247e-04) (hash(x)=45223539)
162
- 2700 val loss 7.0724
163
- 2700 val perplexity 1178.9181
164
- 2700 train 7.474761 (lr=8.6306e-05) (hash(x)=59131616)
165
- 2600 val loss 7.0085
166
- 2600 val perplexity 1105.9528
167
- 2600 train 7.022472 (lr=1.3099e-04) (hash(x)=54037353)
168
- 2800 val loss 7.0502
169
- 2800 val perplexity 1153.1416
170
- 2800 train 6.891716 (lr=8.5254e-05) (hash(x)=45882743)
171
- 2900 val loss 7.0373
172
- 2900 val perplexity 1138.2781
173
- 2900 train 6.647887 (lr=8.4170e-05) (hash(x)=43758910)
174
- 2700 val loss 6.9875
175
- 2700 val perplexity 1083.0179
176
- 2700 train 7.446748 (lr=1.2946e-04) (hash(x)=59131616)
177
- 3000 val loss 7.0306
178
- 3000 val perplexity 1130.6954
179
- 3000 train 6.926464 (lr=8.3057e-05) (hash(x)=47965974)
180
- 2800 val loss 6.9796
181
- 2800 val perplexity 1074.4767
182
- 2800 train 6.807279 (lr=1.2788e-04) (hash(x)=45882743)
183
- 3100 val loss 7.0047
184
- 3100 val perplexity 1101.8275
185
- 3100 train 6.867185 (lr=8.1915e-05) (hash(x)=48205243)
186
- 2900 val loss 6.9719
187
- 2900 val perplexity 1066.2426
188
- 2900 train 6.585280 (lr=1.2626e-04) (hash(x)=43758910)
189
- 3200 val loss 6.9690
190
- 3200 val perplexity 1063.1376
191
- 3200 train 7.034950 (lr=8.0745e-05) (hash(x)=54511383)
192
- 3000 val loss 6.9817
193
- 3000 val perplexity 1076.7251
194
- 3000 train 6.878994 (lr=1.2459e-04) (hash(x)=47965974)
195
- 3300 val loss 6.9480
196
- 3300 val perplexity 1041.0303
197
- 3300 train 6.908898 (lr=7.9549e-05) (hash(x)=54428388)
198
- 3100 val loss 7.0116
199
- 3100 val perplexity 1109.4271
200
- 3100 train 6.902014 (lr=1.2287e-04) (hash(x)=48205243)
201
- 3400 val loss 6.9392
202
- 3400 val perplexity 1031.9812
203
- 3400 train 6.903240 (lr=7.8328e-05) (hash(x)=48115990)
204
- 3200 val loss 6.9755
205
- 3200 val perplexity 1070.0830
206
- 3200 train 7.060159 (lr=1.2112e-04) (hash(x)=54511383)
207
- 3500 val loss 6.9174
208
- 3500 val perplexity 1009.7245
209
- 3500 train 6.518302 (lr=7.7082e-05) (hash(x)=41137345)
210
- 3300 val loss 6.9934
211
- 3300 val perplexity 1089.4271
212
- 3300 train 6.936038 (lr=1.1932e-04) (hash(x)=54428388)
213
- 3600 val loss 6.8992
214
- 3600 val perplexity 991.4840
215
- 3600 train 6.770505 (lr=7.5814e-05) (hash(x)=55186224)
216
- 3400 val loss 6.9826
217
- 3400 val perplexity 1077.6775
218
- 3400 train 6.970102 (lr=1.1749e-04) (hash(x)=48115990)
219
- 3700 val loss 6.8897
220
- 3700 val perplexity 982.1020
221
- 3700 train 6.766921 (lr=7.4525e-05) (hash(x)=54990049)
222
- 3500 val loss 6.9695
223
- 3500 val perplexity 1063.6649
224
- 3500 train 6.588171 (lr=1.1562e-04) (hash(x)=41137345)
225
- 3800 val loss 6.8763
226
- 3800 val perplexity 969.0707
227
- 3800 train 6.612755 (lr=7.3215e-05) (hash(x)=46288812)
228
- 3600 val loss 6.9800
229
- 3600 val perplexity 1074.9384
230
- 3600 train 6.839913 (lr=1.1372e-04) (hash(x)=55186224)
231
- 3900 val loss 6.8501
232
- 3900 val perplexity 943.9573
233
- 3900 train 6.493022 (lr=7.1887e-05) (hash(x)=45829773)
234
- 3700 val loss 7.0054
235
- 3700 val perplexity 1102.6222
236
- 3700 train 6.871941 (lr=1.1179e-04) (hash(x)=54990049)
237
- 4000 val loss 6.8168
238
- 4000 val perplexity 913.0935
239
- 4000 train 6.589319 (lr=7.0541e-05) (hash(x)=52499943)
240
- 3800 val loss 6.9941
241
- 3800 val perplexity 1090.1292
242
- 3800 train 6.732395 (lr=1.0982e-04) (hash(x)=46288812)
243
- 4100 val loss 6.7927
244
- 4100 val perplexity 891.2980
245
- 4100 train 6.689274 (lr=6.9180e-05) (hash(x)=48563796)
246
- 3900 val loss 6.9787
247
- 3900 val perplexity 1073.5215
248
- 3900 train 6.650596 (lr=1.0783e-04) (hash(x)=45829773)
249
- 4200 val loss 6.7404
250
- 4200 val perplexity 845.9372
251
- 4200 train 6.706513 (lr=6.7804e-05) (hash(x)=49165143)
252
- 4000 val loss 7.0015
253
- 4000 val perplexity 1098.2679
254
- 4000 train 6.807262 (lr=1.0581e-04) (hash(x)=52499943)
255
- 4300 val loss 6.7189
256
- 4300 val perplexity 827.8753
257
- 4300 train 6.787599 (lr=6.6414e-05) (hash(x)=50973176)
258
- 4100 val loss 6.9248
259
- 4100 val perplexity 1017.2004
260
- 4100 train 6.818427 (lr=1.0377e-04) (hash(x)=48563796)
261
- 4400 val loss 6.7172
262
- 4400 val perplexity 826.5346
263
- 4400 train 6.744534 (lr=6.5013e-05) (hash(x)=55275124)
264
- 4200 val loss 6.9132
265
- 4200 val perplexity 1005.4734
266
- 4200 train 6.895554 (lr=1.0171e-04) (hash(x)=49165143)
267
- 4500 val loss 6.6856
268
- 4500 val perplexity 800.7920
269
- 4500 train 7.036499 (lr=6.3602e-05) (hash(x)=58646505)
270
- 4300 val loss 6.8964
271
- 4300 val perplexity 988.7141
272
- 4300 train 6.942973 (lr=9.9622e-05) (hash(x)=50973176)
273
- 4600 val loss 6.6902
274
- 4600 val perplexity 804.4627
275
- 4600 train 6.504189 (lr=6.2182e-05) (hash(x)=42554666)
276
- 4700 val loss 6.6663
277
- 4700 val perplexity 785.4816
278
- 4700 train 6.585249 (lr=6.0754e-05) (hash(x)=47846764)
279
- 4400 val loss 6.8641
280
- 4400 val perplexity 957.2898
281
- 4400 train 6.903096 (lr=9.7520e-05) (hash(x)=55275124)
282
- 4800 val loss 6.6564
283
- 4800 val perplexity 777.7417
284
- 4800 train 7.171761 (lr=5.9321e-05) (hash(x)=58239019)
285
- 4500 val loss 6.8455
286
- 4500 val perplexity 939.6260
287
- 4500 train 7.169474 (lr=9.5403e-05) (hash(x)=58646505)
288
- 4900 val loss 6.6314
289
- 4900 val perplexity 758.5380
290
- 4900 train 6.680739 (lr=5.7883e-05) (hash(x)=50711220)
291
- 4600 val loss 6.8403
292
- 4600 val perplexity 934.8008
293
- 4600 train 6.658933 (lr=9.3273e-05) (hash(x)=42554666)
294
- 5000 val loss 6.6318
295
- 5000 val perplexity 758.8361
296
- 5000 train 6.556371 (lr=5.6442e-05) (hash(x)=45994194)
297
- 4700 val loss 6.8375
298
- 4700 val perplexity 932.1772
299
- 4700 train 6.749435 (lr=9.1132e-05) (hash(x)=47846764)
300
- 5100 val loss 6.6022
301
- 5100 val perplexity 736.6913
302
- 5100 train 6.473241 (lr=5.5000e-05) (hash(x)=48659050)
303
- 4800 val loss 6.8225
304
- 4800 val perplexity 918.2614
305
- 4800 train 7.336039 (lr=8.8982e-05) (hash(x)=58239019)
306
- 5200 val loss 6.5936
307
- 5200 val perplexity 730.3978
308
- 5200 train 6.494736 (lr=5.3558e-05) (hash(x)=49369682)
309
- 4900 val loss 6.8213
310
- 4900 val perplexity 917.2037
311
- 4900 train 6.874603 (lr=8.6825e-05) (hash(x)=50711220)
312
- 5300 val loss 6.5739
313
- 5300 val perplexity 716.1760
314
- 5300 train 6.977914 (lr=5.2117e-05) (hash(x)=57787700)
315
- 5000 val loss 6.8285
316
- 5000 val perplexity 923.7590
317
- 5000 train 6.757581 (lr=8.4663e-05) (hash(x)=45994194)
318
- 5400 val loss 6.5788
319
- 5400 val perplexity 719.6705
320
- 5400 train 6.496595 (lr=5.0679e-05) (hash(x)=49365400)
321
- 5100 val loss 6.8643
322
- 5100 val perplexity 957.4678
323
- 5100 train 6.740224 (lr=8.2500e-05) (hash(x)=48659050)
324
- 5500 val loss 6.5617
325
- 5500 val perplexity 707.4382
326
- 5500 train 6.527363 (lr=4.9246e-05) (hash(x)=48720412)
327
- 5200 val loss 6.8571
328
- 5200 val perplexity 950.6530
329
- 5200 train 6.783351 (lr=8.0337e-05) (hash(x)=49369682)
330
- 5600 val loss 6.5543
331
- 5600 val perplexity 702.2325
332
- 5600 train 6.896903 (lr=4.7818e-05) (hash(x)=55784800)
333
- 5300 val loss 6.8308
334
- 5300 val perplexity 925.9326
335
- 5300 train 7.230634 (lr=7.8175e-05) (hash(x)=57787700)
336
- 5700 val loss 6.5544
337
- 5700 val perplexity 702.3484
338
- 5700 train 6.351742 (lr=4.6398e-05) (hash(x)=50073634)
339
- 5400 val loss 6.8443
340
- 5400 val perplexity 938.5306
341
- 5400 train 6.757688 (lr=7.6018e-05) (hash(x)=49365400)
342
- 5800 val loss 6.5458
343
- 5800 val perplexity 696.3289
344
- 5800 train 6.357050 (lr=4.4987e-05) (hash(x)=50170324)
345
- 5500 val loss 6.8375
346
- 5500 val perplexity 932.1928
347
- 5500 train 6.792514 (lr=7.3868e-05) (hash(x)=48720412)
348
- 5900 val loss 6.5448
349
- 5900 val perplexity 695.6356
350
- 5900 train 6.281956 (lr=4.3586e-05) (hash(x)=48410268)
351
- 5600 val loss 6.8354
352
- 5600 val perplexity 930.1561
353
- 5600 train 7.154342 (lr=7.1727e-05) (hash(x)=55784800)
354
- 6000 val loss 6.5445
355
- 6000 val perplexity 695.3796
356
- 6000 train 6.403130 (lr=4.2196e-05) (hash(x)=49527342)
357
- 5700 val loss 6.8305
358
- 5700 val perplexity 925.6902
359
- 5700 train 6.690002 (lr=6.9597e-05) (hash(x)=50073634)
360
- 6100 val loss 6.5424
361
- 6100 val perplexity 693.9520
362
- 6100 train 6.311454 (lr=4.0820e-05) (hash(x)=49550294)
363
- 5800 val loss 6.8101
364
- 5800 val perplexity 906.9468
365
- 5800 train 6.646985 (lr=6.7480e-05) (hash(x)=50170324)
366
- 6200 val loss 6.5349
367
- 6200 val perplexity 688.7952
368
- 6200 train 6.060361 (lr=3.9459e-05) (hash(x)=42126106)
369
- 5900 val loss 6.8290
370
- 5900 val perplexity 924.2956
371
- 5900 train 6.594857 (lr=6.5378e-05) (hash(x)=48410268)
372
- 6300 val loss 6.5397
373
- 6300 val perplexity 692.1000
374
- 6300 train 6.248365 (lr=3.8113e-05) (hash(x)=49608772)
375
- 6000 val loss 6.8361
376
- 6000 val perplexity 930.8496
377
- 6000 train 6.714923 (lr=6.3294e-05) (hash(x)=49527342)
378
- 6400 val loss 6.5177
379
- 6400 val perplexity 677.0044
380
- 6400 train 6.047335 (lr=3.6785e-05) (hash(x)=52324417)
381
- 6100 val loss 6.8411
382
- 6100 val perplexity 935.5361
383
- 6100 train 6.655815 (lr=6.1230e-05) (hash(x)=49550294)
384
- 6500 val loss 6.4825
385
- 6500 val perplexity 653.5927
386
- 6500 train 6.518034 (lr=3.5475e-05) (hash(x)=46207215)
387
- 6200 val loss 6.8357
388
- 6200 val perplexity 930.4697
389
- 6200 train 6.424112 (lr=5.9188e-05) (hash(x)=42126106)
390
- 6600 val loss 6.4708
391
- 6600 val perplexity 645.9698
392
- 6600 train 6.417377 (lr=3.4186e-05) (hash(x)=49027014)
393
- 6300 val loss 6.8234
394
- 6300 val perplexity 919.1419
395
- 6700 val loss 6.4602
396
- 6700 val perplexity 639.2180
397
- 6300 train 6.580616 (lr=5.7169e-05) (hash(x)=49608772)
398
- 6700 train 6.447961 (lr=3.2918e-05) (hash(x)=46232513)
399
- 6800 val loss 6.4634
400
- 6800 val perplexity 641.2296
401
- 6800 train 6.388715 (lr=3.1672e-05) (hash(x)=47348403)
402
- 6400 val loss 6.8215
403
- 6400 val perplexity 917.3555
404
- 6400 train 6.404756 (lr=5.5177e-05) (hash(x)=52324417)
405
- 6900 val loss 6.4629
406
- 6900 val perplexity 640.9477
407
- 6900 train 6.463593 (lr=3.0451e-05) (hash(x)=49806647)
408
- 6500 val loss 6.7813
409
- 6500 val perplexity 881.2090
410
- 6500 train 6.812519 (lr=5.3213e-05) (hash(x)=46207215)
411
- 7000 val loss 6.4393
412
- 7000 val perplexity 625.9420
413
- 7000 train 6.481827 (lr=2.9255e-05) (hash(x)=50893018)
414
- 7100 val loss 6.4346
415
- 7100 val perplexity 623.0591
416
- 7100 train 6.455360 (lr=2.8085e-05) (hash(x)=49157639)
417
- 6600 val loss 6.7839
418
- 6600 val perplexity 883.4759
419
- 6600 train 6.716589 (lr=5.1279e-05) (hash(x)=49027014)
420
- 7200 val loss 6.4335
421
- 7200 val perplexity 622.3497
422
- 7200 train 6.423996 (lr=2.6943e-05) (hash(x)=47014759)
423
- 7300 val loss 6.4264
424
- 7300 val perplexity 617.9357
425
- 6700 val loss 6.7723
426
- 6700 val perplexity 873.2922
427
- 7300 train 6.418630 (lr=2.5830e-05) (hash(x)=47325591)
428
- 6700 train 6.704530 (lr=4.9377e-05) (hash(x)=46232513)
429
- 7400 val loss 6.4176
430
- 7400 val perplexity 612.5477
431
- 7400 train 6.297763 (lr=2.4746e-05) (hash(x)=49184604)
432
- 6800 val loss 6.7646
433
- 6800 val perplexity 866.5962
434
- 6800 train 6.692985 (lr=4.7509e-05) (hash(x)=47348403)
435
- 7500 val loss 6.4129
436
- 7500 val perplexity 609.6670
437
- 7500 train 6.624906 (lr=2.3694e-05) (hash(x)=55053584)
438
- 7600 val loss 6.4065
439
- 7600 val perplexity 605.7946
440
- 7600 train 6.347634 (lr=2.2674e-05) (hash(x)=48693923)
441
- 6900 val loss 6.7554
442
- 6900 val perplexity 858.6456
443
- 6900 train 6.736991 (lr=4.5676e-05) (hash(x)=49806647)
444
- 7700 val loss 6.4069
445
- 7700 val perplexity 606.0300
446
- 7700 train 5.973474 (lr=2.1687e-05) (hash(x)=40952882)
447
- 7000 val loss 6.7292
448
- 7000 val perplexity 836.4849
449
- 7000 train 6.770545 (lr=4.3882e-05) (hash(x)=50893018)
450
- 7800 val loss 6.4051
451
- 7800 val perplexity 604.9369
452
- 7800 train 6.497974 (lr=2.0735e-05) (hash(x)=52487845)
453
- 7900 val loss 6.4003
454
- 7900 val perplexity 602.0388
455
- 7900 train 6.478120 (lr=1.9818e-05) (hash(x)=50221547)
456
- 7100 val loss 6.7273
457
- 7100 val perplexity 834.8870
458
- 7100 train 6.753430 (lr=4.2128e-05) (hash(x)=49157639)
459
- 8000 val loss 6.3975
460
- 8000 val perplexity 600.3358
461
- 8000 train 6.573148 (lr=1.8936e-05) (hash(x)=62294204)
462
- 7200 val loss 6.7275
463
- 7200 val perplexity 835.0944
464
- 7200 train 6.709560 (lr=4.0414e-05) (hash(x)=47014759)
465
- 8100 val loss 6.3919
466
- 8100 val perplexity 596.9896
467
- 8100 train 6.095082 (lr=1.8092e-05) (hash(x)=44401967)
468
- 8200 val loss 6.4002
469
- 8200 val perplexity 601.9611
470
- 8200 train 6.315521 (lr=1.7286e-05) (hash(x)=52769095)
471
- 7300 val loss 6.7547
472
- 7300 val perplexity 858.0505
473
- 7300 train 6.756430 (lr=3.8745e-05) (hash(x)=47325591)
474
- 8300 val loss 6.4044
475
- 8300 val perplexity 604.5234
476
- 8300 train 6.341102 (lr=1.6519e-05) (hash(x)=56829883)
477
- 7400 val loss 6.7174
478
- 7400 val perplexity 826.7045
479
- 7400 train 6.603754 (lr=3.7120e-05) (hash(x)=49184604)
480
- 8400 val loss 6.4014
481
- 8400 val perplexity 602.6667
482
- 8400 train 6.319430 (lr=1.5791e-05) (hash(x)=52147375)
483
- 8500 val loss 6.4058
484
- 8500 val perplexity 605.3235
485
- 8500 train 6.661501 (lr=1.5103e-05) (hash(x)=60197820)
486
- 7500 val loss 6.7173
487
- 7500 val perplexity 826.5941
488
- 7500 train 6.937334 (lr=3.5541e-05) (hash(x)=55053584)
489
- 8600 val loss 6.3932
490
- 8600 val perplexity 597.7507
491
- 8600 train 6.123180 (lr=1.4456e-05) (hash(x)=49377068)
492
- 7600 val loss 6.7204
493
- 7600 val perplexity 829.1573
494
- 7600 train 6.676544 (lr=3.4011e-05) (hash(x)=48693923)
495
- 8700 val loss 6.4008
496
- 8700 val perplexity 602.3013
497
- 8700 train 6.362261 (lr=1.3851e-05) (hash(x)=51092724)
498
- 8800 val loss 6.3950
499
- 8800 val perplexity 598.8679
500
- 8800 train 6.373779 (lr=1.3289e-05) (hash(x)=48642928)
501
- 7700 val loss 6.7132
502
- 7700 val perplexity 823.2086
503
- 7700 train 6.307161 (lr=3.2531e-05) (hash(x)=40952882)
504
- 8900 val loss 6.3938
505
- 8900 val perplexity 598.1085
506
- 8900 train 6.588235 (lr=1.2769e-05) (hash(x)=55342246)
507
- 7800 val loss 6.7107
508
- 7800 val perplexity 821.1057
509
- 7800 train 6.792049 (lr=3.1102e-05) (hash(x)=52487845)
510
- 9000 val loss 6.3691
511
- 9000 val perplexity 583.5227
512
- 9000 train 6.369802 (lr=1.2292e-05) (hash(x)=48093368)
513
- 9100 val loss 6.3650
514
- 9100 val perplexity 581.1680
515
- 9100 train 6.428511 (lr=1.1860e-05) (hash(x)=48578183)
516
- 7900 val loss 6.7038
517
- 7900 val perplexity 815.5079
518
- 7900 train 6.772099 (lr=2.9726e-05) (hash(x)=50221547)
519
- 9200 val loss 6.3610
520
- 9200 val perplexity 578.8246
521
- 9200 train 6.526727 (lr=1.1472e-05) (hash(x)=50794720)
522
- 8000 val loss 6.7079
523
- 8000 val perplexity 818.8376
524
- 8000 train 6.906245 (lr=2.8405e-05) (hash(x)=62294204)
525
- 9300 val loss 6.3576
526
- 9300 val perplexity 576.8474
527
- 9300 train 6.110257 (lr=1.1128e-05) (hash(x)=46513190)
528
- 8100 val loss 6.7063
529
- 8100 val perplexity 817.5142
530
- 8100 train 6.437887 (lr=2.7138e-05) (hash(x)=44401967)
531
- 9400 val loss 6.3550
532
- 9400 val perplexity 575.3516
533
- 9400 train 6.034645 (lr=1.0830e-05) (hash(x)=43808238)
534
- 8200 val loss 6.7039
535
- 8200 val perplexity 815.5768
536
- 8200 train 6.657659 (lr=2.5929e-05) (hash(x)=52769095)
537
- 9500 val loss 6.3542
538
- 9500 val perplexity 574.9025
539
- 9500 train 6.153276 (lr=1.0577e-05) (hash(x)=45021888)
540
- 8300 val loss 6.7199
541
- 8300 val perplexity 828.7142
542
- 8300 train 6.664728 (lr=2.4778e-05) (hash(x)=56829883)
543
- 9600 val loss 6.3513
544
- 9600 val perplexity 573.2155
545
- 9600 train 6.448984 (lr=1.0369e-05) (hash(x)=56525570)
546
- 8400 val loss 6.7256
547
- 8400 val perplexity 833.4963
548
- 8400 train 6.668602 (lr=2.3686e-05) (hash(x)=52147375)
549
- 9700 val loss 6.3490
550
- 9700 val perplexity 571.9229
551
- 9700 train 6.538293 (lr=1.0208e-05) (hash(x)=52585913)
552
- 8500 val loss 6.7119
553
- 8500 val perplexity 822.1240
554
- 8500 train 6.907236 (lr=2.2655e-05) (hash(x)=60197820)
555
- 9800 val loss 6.3465
556
- 9800 val perplexity 570.4843
557
- 9800 train 6.530194 (lr=1.0092e-05) (hash(x)=52344698)
558
- 8600 val loss 6.7095
559
- 8600 val perplexity 820.1893
560
- 8600 train 6.462970 (lr=2.1685e-05) (hash(x)=49377068)
561
- 9900 val loss 6.3456
562
- 9900 val perplexity 569.9951
563
- 9900 train 6.335598 (lr=1.0023e-05) (hash(x)=51740945)
564
- 8700 val loss 6.7079
565
- 8700 val perplexity 818.8337
566
- 8700 train 6.667594 (lr=2.0777e-05) (hash(x)=51092724)
567
- 9999 val loss 6.3418
568
- 9999 val perplexity 567.8141
569
- 8800 val loss 6.7131
570
- 8800 val perplexity 823.0983
571
- 8800 train 6.702538 (lr=1.9933e-05) (hash(x)=48642928)
572
- 8900 val loss 6.6863
573
- 8900 val perplexity 801.3810
574
- 8900 train 6.856863 (lr=1.9153e-05) (hash(x)=55342246)
575
- 9000 val loss 6.6785
576
- 9000 val perplexity 795.0923
577
- 9000 train 6.670873 (lr=1.8439e-05) (hash(x)=48093368)
578
- 9100 val loss 6.6747
579
- 9100 val perplexity 792.0711
580
- 9100 train 6.731931 (lr=1.7790e-05) (hash(x)=48578183)
581
- 9200 val loss 6.6710
582
- 9200 val perplexity 789.1569
583
- 9200 train 6.815366 (lr=1.7208e-05) (hash(x)=50794720)
584
- 9300 val loss 6.6665
585
- 9300 val perplexity 785.6486
586
- 9300 train 6.457387 (lr=1.6692e-05) (hash(x)=46513190)
587
- 9400 val loss 6.6622
588
- 9400 val perplexity 782.2387
589
- 9400 train 6.348344 (lr=1.6245e-05) (hash(x)=43808238)
590
- 9500 val loss 6.6600
591
- 9500 val perplexity 780.5348
592
- 9500 train 6.504676 (lr=1.5865e-05) (hash(x)=45021888)
593
- 9600 val loss 6.6551
594
- 9600 val perplexity 776.7688
595
- 9600 train 6.715017 (lr=1.5554e-05) (hash(x)=56525570)
596
- 9700 val loss 6.6502
597
- 9700 val perplexity 772.9388
598
- 9700 train 6.868919 (lr=1.5312e-05) (hash(x)=52585913)
599
- 9800 val loss 6.6466
600
- 9800 val perplexity 770.1832
601
- 9800 train 6.803319 (lr=1.5139e-05) (hash(x)=52344698)
602
- 9900 val loss 6.6428
603
- 9900 val perplexity 767.2672
604
- 9900 train 6.650891 (lr=1.5035e-05) (hash(x)=51740945)
605
- 9999 val loss 6.6431
606
- 9999 val perplexity 767.4703
 
1
+ max_steps: 50000
2
+ 48900 val loss 5.6637
3
+ 48900 val perplexity 288.2159
4
+ 48900 train 5.430394 (lr=5.0542e-06) (hash(x)=46623158)
5
+ 2300 val loss 7.1191
6
+ 2300 val perplexity 1235.3901
7
+ 2300 train 7.172195 (lr=9.9606e-05) (hash(x)=54950719)
8
  0 val loss 11.2672
9
  0 val perplexity 78216.1172
10
+ 49000 val loss 5.6643
11
+ 49000 val perplexity 288.3737
12
+ 49000 train 5.511695 (lr=5.0448e-06) (hash(x)=48558395)
13
+ 49100 val loss 5.6627
14
+ 49100 val perplexity 287.9186
15
+ 49100 train 5.495073 (lr=5.0363e-06) (hash(x)=48791085)
16
+ 2400 val loss 7.1211
17
+ 2400 val perplexity 1237.8207
18
+ 2400 train 6.775694 (lr=9.9567e-05) (hash(x)=42190240)
19
+ 49200 val loss 5.6630
20
+ 49200 val perplexity 288.0055
21
+ 49200 train 6.383331 (lr=5.0286e-06) (hash(x)=58625942)
22
+ 0 train 11.243532 (lr=3.5000e-07) (hash(x)=50671684)
23
+ 2500 val loss 7.1119
24
+ 2500 val perplexity 1226.4176
25
+ 49300 val loss 5.6599
26
+ 49300 val perplexity 287.1288
27
+ 49300 train 5.632152 (lr=5.0219e-06) (hash(x)=52680896)
28
+ 2500 train 7.199808 (lr=9.9527e-05) (hash(x)=45223539)
29
+ 49400 val loss 5.6605
30
+ 49400 val perplexity 287.2808
31
+ 49400 train 5.787863 (lr=5.0161e-06) (hash(x)=59381598)
32
+ 100 val loss 9.6845
33
+ 100 val perplexity 16067.0596
34
+ 100 train 9.643931 (lr=3.5350e-05) (hash(x)=52740221)
35
+ 2600 val loss 7.1137
36
+ 2600 val perplexity 1228.6565
37
+ 2600 train 7.151042 (lr=9.9485e-05) (hash(x)=54037353)
38
+ 49500 val loss 5.6608
39
+ 49500 val perplexity 287.3849
40
+ 49500 train 5.961763 (lr=5.0112e-06) (hash(x)=51678773)
41
+ 49600 val loss 5.6607
42
+ 49600 val perplexity 287.3404
43
+ 49600 train 5.436832 (lr=5.0072e-06) (hash(x)=49092923)
44
+ 200 val loss 8.3575
45
+ 200 val perplexity 4261.9453
46
+ 200 train 8.375558 (lr=7.0000e-05) (hash(x)=49034180)
47
+ 2700 val loss 7.1290
48
+ 2700 val perplexity 1247.5837
49
+ 2700 train 7.617805 (lr=9.9442e-05) (hash(x)=59131616)
50
+ 49700 val loss 5.6630
51
+ 49700 val perplexity 288.0005
52
+ 49700 train 5.880443 (lr=5.0040e-06) (hash(x)=55550116)
53
+ 300 val loss 7.7565
54
+ 300 val perplexity 2336.7234
55
+ 300 train 7.972932 (lr=6.9999e-05) (hash(x)=63180688)
56
+ 49800 val loss 5.6618
57
+ 49800 val perplexity 287.6587
58
+ 49800 train 5.536071 (lr=5.0018e-06) (hash(x)=48422352)
59
+ 2800 val loss 7.1325
60
+ 2800 val perplexity 1252.0618
61
+ 2800 train 6.939841 (lr=9.9396e-05) (hash(x)=45882743)
62
+ 49900 val loss 5.6632
63
+ 49900 val perplexity 288.0570
64
+ 49900 train 5.760822 (lr=5.0004e-06) (hash(x)=52576880)
65
+ 400 val loss 7.5845
66
+ 400 val perplexity 1967.3807
67
+ 400 train 7.613533 (lr=6.9997e-05) (hash(x)=50373500)
68
+ 2900 val loss 7.1002
69
+ 2900 val perplexity 1212.2438
70
+ 2900 train 6.733154 (lr=9.9349e-05) (hash(x)=43758910)
71
+ 49999 val loss 5.6624
72
+ 49999 val perplexity 287.8370
73
+ 500 val loss 7.5309
74
+ 500 val perplexity 1864.8755
75
+ 500 train 7.425209 (lr=6.9994e-05) (hash(x)=44547422)
76
+ 3000 val loss 7.1177
77
+ 3000 val perplexity 1233.6171
78
+ 3000 train 7.030602 (lr=9.9300e-05) (hash(x)=47965974)
79
+ 600 val loss 7.5026
80
+ 600 val perplexity 1812.7593
81
+ 600 train 7.359186 (lr=6.9990e-05) (hash(x)=47184699)
82
+ 3100 val loss 7.1450
83
+ 3100 val perplexity 1267.7646
84
+ 3100 train 7.054578 (lr=9.9249e-05) (hash(x)=48205243)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
attention_kindselective_n_heads4_seed1338/model_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c6c4c19a72387426aac984a63189433411f8e4e9389ecf6b26a622c2b2749876
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c6648672e2ce46ef2c72f27a7e35646a9f331dc839e0c3de86954ef8d7d8291
3
  size 92843394
attention_kindselective_n_heads4_seed1338/model_49999.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a666cbfe8b8d81c87e55ca0a40ddd37c3e00a2d57171edc5c86e7430b2847152
3
+ size 92843394
attention_kindselective_n_heads4_seed1338/optimizer_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:41bf0981f8b6d4089814e0fb051998079849d5d6fd2217075c015126b4025e5f
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b5ca883a2c1f5af0edfbb3484142a9889ad2c189ad3100e539e5ec66ad649251
3
  size 179406214
attention_kindselective_n_heads4_seed1338/optimizer_49999.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:56eb99f3c98d6a875ebbab8c90747767f406ce6b433b92d93d2426f24586efcd
3
+ size 179406214