andrew-healey commited on
Commit
075d9e5
·
verified ·
1 Parent(s): 3f9635c

Upload folder using huggingface_hub

Browse files
attention_kindselective_n_heads4_seed1340/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_4/attention_kindselective_n_heads4_seed1340", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10000, "warmup_steps": 200, "group": "wider_is_better_4", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1340, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 30720, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.00015, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "1.5e-4_30720_4_1340", "n_embd": 256}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_4/attention_kindselective_n_heads4_seed1340", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10000, "warmup_steps": 200, "group": "wider_is_better_4", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1340, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 30720, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 5e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "0.5e-4_30720_4_1340", "n_embd": 256}
attention_kindselective_n_heads4_seed1340/log2.txt CHANGED
@@ -1,303 +1,303 @@
1
  max_steps: 10000
2
  0 val loss 11.2714
3
  0 val perplexity 78545.6094
4
- 0 train 11.276529 (lr=7.5000e-07) (hash(x)=164406924)
5
- 100 val loss 9.0381
6
- 100 val perplexity 8417.9463
7
- 100 train 9.196967 (lr=7.5750e-05) (hash(x)=177407419)
8
- 200 val loss 7.6693
9
- 200 val perplexity 2141.5881
10
- 200 train 7.579660 (lr=1.5000e-04) (hash(x)=144903932)
11
- 300 val loss 7.5340
12
- 300 val perplexity 1870.6281
13
- 300 train 7.896007 (lr=1.4997e-04) (hash(x)=173839165)
14
- 400 val loss 7.4474
15
- 400 val perplexity 1715.3689
16
- 400 train 7.538729 (lr=1.4986e-04) (hash(x)=167734596)
17
- 500 val loss 7.3606
18
- 500 val perplexity 1572.7972
19
- 500 train 7.367005 (lr=1.4969e-04) (hash(x)=153224076)
20
- 600 val loss 7.3408
21
- 600 val perplexity 1541.9294
22
- 600 train 7.236241 (lr=1.4945e-04) (hash(x)=149619098)
23
- 700 val loss 7.3572
24
- 700 val perplexity 1567.4814
25
- 700 train 7.288768 (lr=1.4913e-04) (hash(x)=146539909)
26
- 800 val loss 7.2903
27
- 800 val perplexity 1466.0669
28
- 800 train 7.251941 (lr=1.4876e-04) (hash(x)=153710890)
29
- 900 val loss 7.2480
30
- 900 val perplexity 1405.2601
31
- 900 train 7.202537 (lr=1.4831e-04) (hash(x)=155873620)
32
- 1000 val loss 7.1965
33
- 1000 val perplexity 1334.7125
34
- 1000 train 7.122793 (lr=1.4779e-04) (hash(x)=145450636)
35
- 1100 val loss 7.1624
36
- 1100 val perplexity 1289.9513
37
- 1100 train 7.217214 (lr=1.4721e-04) (hash(x)=154123388)
38
- 1200 val loss 7.1348
39
- 1200 val perplexity 1254.8914
40
- 1200 train 7.028561 (lr=1.4656e-04) (hash(x)=145249251)
41
- 1300 val loss 7.1244
42
- 1300 val perplexity 1241.8899
43
- 1300 train 7.036378 (lr=1.4585e-04) (hash(x)=148937127)
44
- 1400 val loss 7.1494
45
- 1400 val perplexity 1273.3578
46
- 1400 train 7.247538 (lr=1.4507e-04) (hash(x)=150475545)
47
- 1500 val loss 7.1809
48
- 1500 val perplexity 1314.0698
49
- 1500 train 7.120584 (lr=1.4422e-04) (hash(x)=154653428)
50
- 1600 val loss 7.2193
51
- 1600 val perplexity 1365.5410
52
- 1600 train 7.176501 (lr=1.4332e-04) (hash(x)=144483776)
53
- 1700 val loss 7.0776
54
- 1700 val perplexity 1185.1469
55
- 1700 train 7.226075 (lr=1.4235e-04) (hash(x)=157395496)
56
- 1800 val loss 7.0167
57
- 1800 val perplexity 1115.0973
58
- 1800 train 7.032615 (lr=1.4131e-04) (hash(x)=157916369)
59
- 1900 val loss 6.9944
60
- 1900 val perplexity 1090.5461
61
- 1900 train 7.183726 (lr=1.4022e-04) (hash(x)=166073923)
62
- 2000 val loss 6.9464
63
- 2000 val perplexity 1039.3651
64
- 2000 train 6.996574 (lr=1.3907e-04) (hash(x)=154856891)
65
- 2100 val loss 6.9176
66
- 2100 val perplexity 1009.8993
67
- 2100 train 6.904967 (lr=1.3786e-04) (hash(x)=151925203)
68
- 2200 val loss 6.8919
69
- 2200 val perplexity 984.2449
70
- 2200 train 6.614451 (lr=1.3660e-04) (hash(x)=136191502)
71
- 2300 val loss 6.8251
72
- 2300 val perplexity 920.6675
73
- 2300 train 6.939867 (lr=1.3527e-04) (hash(x)=153273362)
74
- 2400 val loss 6.8751
75
- 2400 val perplexity 967.8990
76
- 2400 train 6.812264 (lr=1.3390e-04) (hash(x)=148021541)
77
- 2500 val loss 6.8260
78
- 2500 val perplexity 921.5288
79
- 2500 train 6.754780 (lr=1.3247e-04) (hash(x)=141356608)
80
- 2600 val loss 6.8221
81
- 2600 val perplexity 917.9173
82
- 2600 train 6.776220 (lr=1.3099e-04) (hash(x)=146005217)
83
- 2700 val loss 6.8013
84
- 2700 val perplexity 898.9861
85
- 2700 train 6.667194 (lr=1.2946e-04) (hash(x)=144511718)
86
- 2800 val loss 6.7671
87
- 2800 val perplexity 868.7567
88
- 2800 train 6.655071 (lr=1.2788e-04) (hash(x)=146019502)
89
- 2900 val loss 6.7840
90
- 2900 val perplexity 883.6208
91
- 2900 train 6.688586 (lr=1.2626e-04) (hash(x)=146496200)
92
- 3000 val loss 6.7810
93
- 3000 val perplexity 880.9838
94
- 3000 train 6.711241 (lr=1.2459e-04) (hash(x)=150127281)
95
- 3100 val loss 6.7099
96
- 3100 val perplexity 820.5069
97
- 3100 train 6.685182 (lr=1.2287e-04) (hash(x)=142022255)
98
- 3200 val loss 6.6986
99
- 3200 val perplexity 811.2672
100
- 3200 train 6.748562 (lr=1.2112e-04) (hash(x)=154120875)
101
- 3300 val loss 6.6998
102
- 3300 val perplexity 812.2066
103
- 3300 train 6.751196 (lr=1.1932e-04) (hash(x)=153999717)
104
- 3400 val loss 6.6799
105
- 3400 val perplexity 796.2400
106
- 3400 train 6.459930 (lr=1.1749e-04) (hash(x)=139694097)
107
- 3500 val loss 6.6282
108
- 3500 val perplexity 756.0915
109
- 3500 train 6.809334 (lr=1.1562e-04) (hash(x)=162992732)
110
- 3600 val loss 6.6049
111
- 3600 val perplexity 738.7014
112
- 3600 train 6.586004 (lr=1.1372e-04) (hash(x)=147574101)
113
- 3700 val loss 6.5722
114
- 3700 val perplexity 714.9374
115
- 3700 train 6.681458 (lr=1.1179e-04) (hash(x)=157763099)
116
- 3800 val loss 6.5505
117
- 3800 val perplexity 699.6288
118
- 3800 train 6.692687 (lr=1.0982e-04) (hash(x)=170800034)
119
- 3900 val loss 6.5508
120
- 3900 val perplexity 699.7797
121
- 3900 train 6.629467 (lr=1.0783e-04) (hash(x)=164984528)
122
- 4000 val loss 6.5329
123
- 4000 val perplexity 687.3677
124
- 4000 train 6.404568 (lr=1.0581e-04) (hash(x)=141743323)
125
- 4100 val loss 6.5432
126
- 4100 val perplexity 694.5349
127
- 4100 train 6.561630 (lr=1.0377e-04) (hash(x)=153392872)
128
- 4200 val loss 6.5313
129
- 4200 val perplexity 686.2911
130
- 4200 train 6.403113 (lr=1.0171e-04) (hash(x)=149074933)
131
- 4300 val loss 6.5191
132
- 4300 val perplexity 677.9529
133
- 4300 train 6.874164 (lr=9.9622e-05) (hash(x)=167823423)
134
- 4400 val loss 6.4876
135
- 4400 val perplexity 656.9694
136
- 4400 train 6.258415 (lr=9.7520e-05) (hash(x)=141203114)
137
- 4500 val loss 6.5010
138
- 4500 val perplexity 665.8226
139
- 4500 train 6.505226 (lr=9.5403e-05) (hash(x)=146284780)
140
- 4600 val loss 6.4811
141
- 4600 val perplexity 652.6643
142
- 4600 train 6.300009 (lr=9.3273e-05) (hash(x)=141126464)
143
- 4700 val loss 6.4968
144
- 4700 val perplexity 663.0125
145
- 4700 train 6.525679 (lr=9.1132e-05) (hash(x)=154751926)
146
- 4800 val loss 6.4780
147
- 4800 val perplexity 650.6808
148
- 4800 train 6.554049 (lr=8.8982e-05) (hash(x)=154793198)
149
- 4900 val loss 6.4792
150
- 4900 val perplexity 651.4274
151
- 4900 train 6.231535 (lr=8.6825e-05) (hash(x)=139406392)
152
- 5000 val loss 6.4978
153
- 5000 val perplexity 663.7080
154
- 5000 train 6.241301 (lr=8.4663e-05) (hash(x)=153548741)
155
- 5100 val loss 6.4781
156
- 5100 val perplexity 650.7271
157
- 5100 train 6.534034 (lr=8.2500e-05) (hash(x)=160488568)
158
- 5200 val loss 6.4697
159
- 5200 val perplexity 645.2586
160
- 5200 train 6.485910 (lr=8.0337e-05) (hash(x)=149645053)
161
- 5300 val loss 6.4504
162
- 5300 val perplexity 632.9360
163
- 5300 train 6.529132 (lr=7.8175e-05) (hash(x)=155820556)
164
- 5400 val loss 6.4430
165
- 5400 val perplexity 628.2618
166
- 5400 train 6.396791 (lr=7.6018e-05) (hash(x)=147538134)
167
- 5500 val loss 6.4343
168
- 5500 val perplexity 622.8408
169
- 5500 train 6.642034 (lr=7.3868e-05) (hash(x)=166889307)
170
- 5600 val loss 6.4312
171
- 5600 val perplexity 620.8967
172
- 5600 train 6.160884 (lr=7.1727e-05) (hash(x)=139516699)
173
- 5700 val loss 6.4238
174
- 5700 val perplexity 616.3513
175
- 5700 train 6.132991 (lr=6.9597e-05) (hash(x)=140453511)
176
- 5800 val loss 6.4369
177
- 5800 val perplexity 624.4707
178
- 5800 train 6.383884 (lr=6.7480e-05) (hash(x)=162964847)
179
- 5900 val loss 6.4048
180
- 5900 val perplexity 604.7402
181
- 5900 train 6.415184 (lr=6.5378e-05) (hash(x)=150606634)
182
- 6000 val loss 6.3960
183
- 6000 val perplexity 599.4601
184
- 6000 train 6.499843 (lr=6.3294e-05) (hash(x)=149890857)
185
- 6100 val loss 6.3879
186
- 6100 val perplexity 594.5801
187
- 6100 train 6.491496 (lr=6.1230e-05) (hash(x)=173884145)
188
- 6200 val loss 6.3796
189
- 6200 val perplexity 589.6749
190
- 6200 train 6.443524 (lr=5.9188e-05) (hash(x)=151987098)
191
- 6300 val loss 6.3793
192
- 6300 val perplexity 589.4962
193
- 6300 train 6.343131 (lr=5.7169e-05) (hash(x)=148853562)
194
- 6400 val loss 6.3634
195
- 6400 val perplexity 580.2146
196
- 6400 train 6.131594 (lr=5.5177e-05) (hash(x)=141530101)
197
- 6500 val loss 6.3555
198
- 6500 val perplexity 575.6595
199
- 6500 train 6.214462 (lr=5.3213e-05) (hash(x)=142297809)
200
- 6600 val loss 6.3444
201
- 6600 val perplexity 569.2677
202
- 6600 train 6.239904 (lr=5.1279e-05) (hash(x)=142447782)
203
- 6700 val loss 6.3507
204
- 6700 val perplexity 572.8687
205
- 6700 train 6.295353 (lr=4.9377e-05) (hash(x)=147004686)
206
- 6800 val loss 6.3404
207
- 6800 val perplexity 567.0021
208
- 6800 train 6.077054 (lr=4.7509e-05) (hash(x)=133438702)
209
- 6900 val loss 6.3437
210
- 6900 val perplexity 568.9139
211
- 6900 train 6.377135 (lr=4.5676e-05) (hash(x)=157085143)
212
- 7000 val loss 6.3345
213
- 7000 val perplexity 563.6675
214
- 7000 train 6.220019 (lr=4.3882e-05) (hash(x)=139437666)
215
- 7100 val loss 6.3297
216
- 7100 val perplexity 560.9677
217
- 7100 train 6.403553 (lr=4.2128e-05) (hash(x)=159792986)
218
- 7200 val loss 6.3186
219
- 7200 val perplexity 554.7760
220
- 7200 train 6.285450 (lr=4.0414e-05) (hash(x)=144930687)
221
- 7300 val loss 6.3137
222
- 7300 val perplexity 552.0679
223
- 7300 train 6.387995 (lr=3.8745e-05) (hash(x)=156242690)
224
- 7400 val loss 6.3059
225
- 7400 val perplexity 547.7939
226
- 7400 train 6.166557 (lr=3.7120e-05) (hash(x)=148183719)
227
- 7500 val loss 6.3071
228
- 7500 val perplexity 548.4332
229
- 7500 train 6.323625 (lr=3.5541e-05) (hash(x)=152494758)
230
- 7600 val loss 6.3082
231
- 7600 val perplexity 549.0623
232
- 7600 train 6.021165 (lr=3.4011e-05) (hash(x)=142485027)
233
- 7700 val loss 6.2966
234
- 7700 val perplexity 542.7446
235
- 7700 train 6.145420 (lr=3.2531e-05) (hash(x)=147512165)
236
- 7800 val loss 6.2939
237
- 7800 val perplexity 541.2756
238
- 7800 train 6.209620 (lr=3.1102e-05) (hash(x)=160346994)
239
- 7900 val loss 6.2867
240
- 7900 val perplexity 537.3802
241
- 7900 train 6.138093 (lr=2.9726e-05) (hash(x)=144488254)
242
- 8000 val loss 6.2816
243
- 8000 val perplexity 534.6220
244
- 8000 train 6.089755 (lr=2.8405e-05) (hash(x)=147637019)
245
- 8100 val loss 6.2795
246
- 8100 val perplexity 533.5262
247
- 8100 train 6.136616 (lr=2.7138e-05) (hash(x)=147340534)
248
- 8200 val loss 6.2769
249
- 8200 val perplexity 532.1191
250
- 8200 train 6.305046 (lr=2.5929e-05) (hash(x)=151630665)
251
- 8300 val loss 6.2704
252
- 8300 val perplexity 528.6994
253
- 8300 train 6.352071 (lr=2.4778e-05) (hash(x)=149747064)
254
- 8400 val loss 6.2696
255
- 8400 val perplexity 528.2690
256
- 8400 train 6.457778 (lr=2.3686e-05) (hash(x)=154245770)
257
- 8500 val loss 6.2686
258
- 8500 val perplexity 527.7471
259
- 8500 train 6.158002 (lr=2.2655e-05) (hash(x)=152559100)
260
- 8600 val loss 6.2606
261
- 8600 val perplexity 523.5390
262
- 8600 train 6.811657 (lr=2.1685e-05) (hash(x)=181365926)
263
- 8700 val loss 6.2652
264
- 8700 val perplexity 525.9496
265
- 8700 train 6.102303 (lr=2.0777e-05) (hash(x)=154405991)
266
- 8800 val loss 6.2593
267
- 8800 val perplexity 522.8442
268
- 8800 train 6.197897 (lr=1.9933e-05) (hash(x)=153755904)
269
- 8900 val loss 6.2577
270
- 8900 val perplexity 522.0072
271
- 8900 train 6.199480 (lr=1.9153e-05) (hash(x)=152120568)
272
- 9000 val loss 6.2502
273
- 9000 val perplexity 518.1027
274
- 9000 train 6.055476 (lr=1.8439e-05) (hash(x)=142797279)
275
- 9100 val loss 6.2482
276
- 9100 val perplexity 517.1046
277
- 9100 train 6.107931 (lr=1.7790e-05) (hash(x)=143037503)
278
- 9200 val loss 6.2448
279
- 9200 val perplexity 515.3453
280
- 9200 train 6.117033 (lr=1.7208e-05) (hash(x)=113690273)
281
- 9300 val loss 6.2433
282
- 9300 val perplexity 514.5726
283
- 9300 train 6.173688 (lr=1.6692e-05) (hash(x)=158025077)
284
- 9400 val loss 6.2395
285
- 9400 val perplexity 512.5978
286
- 9400 train 6.295310 (lr=1.6245e-05) (hash(x)=158251718)
287
- 9500 val loss 6.2356
288
- 9500 val perplexity 510.6154
289
- 9500 train 6.264369 (lr=1.5865e-05) (hash(x)=154752610)
290
- 9600 val loss 6.2365
291
- 9600 val perplexity 511.0636
292
- 9600 train 6.110316 (lr=1.5554e-05) (hash(x)=146889093)
293
- 9700 val loss 6.2315
294
- 9700 val perplexity 508.5052
295
- 9700 train 6.236925 (lr=1.5312e-05) (hash(x)=156906516)
296
- 9800 val loss 6.2308
297
- 9800 val perplexity 508.1559
298
- 9800 train 6.083229 (lr=1.5139e-05) (hash(x)=153841927)
299
- 9900 val loss 6.2303
300
- 9900 val perplexity 507.8923
301
- 9900 train 6.427679 (lr=1.5035e-05) (hash(x)=163514334)
302
- 9999 val loss 6.2221
303
- 9999 val perplexity 503.7782
 
1
  max_steps: 10000
2
  0 val loss 11.2714
3
  0 val perplexity 78545.6094
4
+ 0 train 11.276529 (lr=2.5000e-07) (hash(x)=164406924)
5
+ 100 val loss 9.8154
6
+ 100 val perplexity 18313.5645
7
+ 100 train 9.875526 (lr=2.5250e-05) (hash(x)=177407419)
8
+ 200 val loss 8.2338
9
+ 200 val perplexity 3766.0889
10
+ 200 train 8.183726 (lr=5.0000e-05) (hash(x)=144903932)
11
+ 300 val loss 7.6691
12
+ 300 val perplexity 2141.1450
13
+ 300 train 7.985724 (lr=4.9988e-05) (hash(x)=173839165)
14
+ 400 val loss 7.5500
15
+ 400 val perplexity 1900.7639
16
+ 400 train 7.632100 (lr=4.9954e-05) (hash(x)=167734596)
17
+ 500 val loss 7.4711
18
+ 500 val perplexity 1756.4982
19
+ 500 train 7.484628 (lr=4.9896e-05) (hash(x)=153224076)
20
+ 600 val loss 7.3888
21
+ 600 val perplexity 1617.7747
22
+ 600 train 7.289467 (lr=4.9815e-05) (hash(x)=149619098)
23
+ 700 val loss 7.3274
24
+ 700 val perplexity 1521.4415
25
+ 700 train 7.250896 (lr=4.9712e-05) (hash(x)=146539909)
26
+ 800 val loss 7.2574
27
+ 800 val perplexity 1418.5006
28
+ 800 train 7.213085 (lr=4.9585e-05) (hash(x)=153710890)
29
+ 900 val loss 7.1994
30
+ 900 val perplexity 1338.6649
31
+ 900 train 7.131366 (lr=4.9436e-05) (hash(x)=155873620)
32
+ 1000 val loss 7.1452
33
+ 1000 val perplexity 1267.9447
34
+ 1000 train 7.060977 (lr=4.9264e-05) (hash(x)=145450636)
35
+ 1100 val loss 7.0898
36
+ 1100 val perplexity 1199.6580
37
+ 1100 train 7.176812 (lr=4.9070e-05) (hash(x)=154123388)
38
+ 1200 val loss 7.0376
39
+ 1200 val perplexity 1138.6163
40
+ 1200 train 6.907458 (lr=4.8854e-05) (hash(x)=145249251)
41
+ 1300 val loss 6.9744
42
+ 1300 val perplexity 1068.9224
43
+ 1300 train 6.860754 (lr=4.8616e-05) (hash(x)=148937127)
44
+ 1400 val loss 6.9257
45
+ 1400 val perplexity 1018.1540
46
+ 1400 train 7.042790 (lr=4.8356e-05) (hash(x)=150475545)
47
+ 1500 val loss 6.8788
48
+ 1500 val perplexity 971.4904
49
+ 1500 train 6.801968 (lr=4.8074e-05) (hash(x)=154653428)
50
+ 1600 val loss 6.8310
51
+ 1600 val perplexity 926.0797
52
+ 1600 train 6.806830 (lr=4.7772e-05) (hash(x)=144483776)
53
+ 1700 val loss 6.7804
54
+ 1700 val perplexity 880.4584
55
+ 1700 train 6.921985 (lr=4.7448e-05) (hash(x)=157395496)
56
+ 1800 val loss 6.7448
57
+ 1800 val perplexity 849.6143
58
+ 1800 train 6.765642 (lr=4.7105e-05) (hash(x)=157916369)
59
+ 1900 val loss 6.7027
60
+ 1900 val perplexity 814.5706
61
+ 1900 train 6.902748 (lr=4.6741e-05) (hash(x)=166073923)
62
+ 2000 val loss 6.6750
63
+ 2000 val perplexity 792.3612
64
+ 2000 train 6.733498 (lr=4.6357e-05) (hash(x)=154856891)
65
+ 2100 val loss 6.6482
66
+ 2100 val perplexity 771.3788
67
+ 2100 train 6.638843 (lr=4.5954e-05) (hash(x)=151925203)
68
+ 2200 val loss 6.6066
69
+ 2200 val perplexity 739.9871
70
+ 2200 train 6.287358 (lr=4.5532e-05) (hash(x)=136191502)
71
+ 2300 val loss 6.5721
72
+ 2300 val perplexity 714.8580
73
+ 2300 train 6.767154 (lr=4.5091e-05) (hash(x)=153273362)
74
+ 2400 val loss 6.5409
75
+ 2400 val perplexity 692.8893
76
+ 2400 train 6.494888 (lr=4.4633e-05) (hash(x)=148021541)
77
+ 2500 val loss 6.5148
78
+ 2500 val perplexity 675.0336
79
+ 2500 train 6.460410 (lr=4.4156e-05) (hash(x)=141356608)
80
+ 2600 val loss 6.4847
81
+ 2600 val perplexity 655.0107
82
+ 2600 train 6.427143 (lr=4.3663e-05) (hash(x)=146005217)
83
+ 2700 val loss 6.4599
84
+ 2700 val perplexity 638.9852
85
+ 2700 train 6.306155 (lr=4.3153e-05) (hash(x)=144511718)
86
+ 2800 val loss 6.4346
87
+ 2800 val perplexity 623.0291
88
+ 2800 train 6.313002 (lr=4.2627e-05) (hash(x)=146019502)
89
+ 2900 val loss 6.4083
90
+ 2900 val perplexity 606.8544
91
+ 2900 train 6.329792 (lr=4.2085e-05) (hash(x)=146496200)
92
+ 3000 val loss 6.4059
93
+ 3000 val perplexity 605.3804
94
+ 3000 train 6.345868 (lr=4.1529e-05) (hash(x)=150127281)
95
+ 3100 val loss 6.3632
96
+ 3100 val perplexity 580.0782
97
+ 3100 train 6.357021 (lr=4.0957e-05) (hash(x)=142022255)
98
+ 3200 val loss 6.3417
99
+ 3200 val perplexity 567.7715
100
+ 3200 train 6.389840 (lr=4.0373e-05) (hash(x)=154120875)
101
+ 3300 val loss 6.3309
102
+ 3300 val perplexity 561.6668
103
+ 3300 train 6.418671 (lr=3.9775e-05) (hash(x)=153999717)
104
+ 3400 val loss 6.3119
105
+ 3400 val perplexity 551.1045
106
+ 3400 train 6.081605 (lr=3.9164e-05) (hash(x)=139694097)
107
+ 3500 val loss 6.2892
108
+ 3500 val perplexity 538.6971
109
+ 3500 train 6.477514 (lr=3.8541e-05) (hash(x)=162992732)
110
+ 3600 val loss 6.2635
111
+ 3600 val perplexity 525.0790
112
+ 3600 train 6.247020 (lr=3.7907e-05) (hash(x)=147574101)
113
+ 3700 val loss 6.2520
114
+ 3700 val perplexity 519.0330
115
+ 3700 train 6.364397 (lr=3.7262e-05) (hash(x)=157763099)
116
+ 3800 val loss 6.2303
117
+ 3800 val perplexity 507.9313
118
+ 3800 train 6.389091 (lr=3.6608e-05) (hash(x)=170800034)
119
+ 3900 val loss 6.2183
120
+ 3900 val perplexity 501.8706
121
+ 3900 train 6.302568 (lr=3.5944e-05) (hash(x)=164984528)
122
+ 4000 val loss 6.2018
123
+ 4000 val perplexity 493.6586
124
+ 4000 train 6.060319 (lr=3.5271e-05) (hash(x)=141743323)
125
+ 4100 val loss 6.2016
126
+ 4100 val perplexity 493.5134
127
+ 4100 train 6.226849 (lr=3.4590e-05) (hash(x)=153392872)
128
+ 4200 val loss 6.1929
129
+ 4200 val perplexity 489.2509
130
+ 4200 train 6.065385 (lr=3.3902e-05) (hash(x)=149074933)
131
+ 4300 val loss 6.1750
132
+ 4300 val perplexity 480.6028
133
+ 4300 train 6.553733 (lr=3.3207e-05) (hash(x)=167823423)
134
+ 4400 val loss 6.1533
135
+ 4400 val perplexity 470.2520
136
+ 4400 train 5.930851 (lr=3.2507e-05) (hash(x)=141203114)
137
+ 4500 val loss 6.1365
138
+ 4500 val perplexity 462.4289
139
+ 4500 train 6.135710 (lr=3.1801e-05) (hash(x)=146284780)
140
+ 4600 val loss 6.1188
141
+ 4600 val perplexity 454.3152
142
+ 4600 train 5.924687 (lr=3.1091e-05) (hash(x)=141126464)
143
+ 4700 val loss 6.1100
144
+ 4700 val perplexity 450.3566
145
+ 4700 train 6.132822 (lr=3.0377e-05) (hash(x)=154751926)
146
+ 4800 val loss 6.0998
147
+ 4800 val perplexity 445.7567
148
+ 4800 train 6.181572 (lr=2.9661e-05) (hash(x)=154793198)
149
+ 4900 val loss 6.0956
150
+ 4900 val perplexity 443.9148
151
+ 4900 train 5.804508 (lr=2.8942e-05) (hash(x)=139406392)
152
+ 5000 val loss 6.1043
153
+ 5000 val perplexity 447.7915
154
+ 5000 train 5.813259 (lr=2.8221e-05) (hash(x)=153548741)
155
+ 5100 val loss 6.0828
156
+ 5100 val perplexity 438.2686
157
+ 5100 train 6.164180 (lr=2.7500e-05) (hash(x)=160488568)
158
+ 5200 val loss 6.0660
159
+ 5200 val perplexity 430.9618
160
+ 5200 train 6.082884 (lr=2.6779e-05) (hash(x)=149645053)
161
+ 5300 val loss 6.0438
162
+ 5300 val perplexity 421.4874
163
+ 5300 train 6.135334 (lr=2.6058e-05) (hash(x)=155820556)
164
+ 5400 val loss 6.0364
165
+ 5400 val perplexity 418.3896
166
+ 5400 train 5.987836 (lr=2.5339e-05) (hash(x)=147538134)
167
+ 5500 val loss 6.0267
168
+ 5500 val perplexity 414.3346
169
+ 5500 train 6.239408 (lr=2.4623e-05) (hash(x)=166889307)
170
+ 5600 val loss 6.0201
171
+ 5600 val perplexity 411.6386
172
+ 5600 train 5.719359 (lr=2.3909e-05) (hash(x)=139516699)
173
+ 5700 val loss 6.0218
174
+ 5700 val perplexity 412.3290
175
+ 5700 train 5.710136 (lr=2.3199e-05) (hash(x)=140453511)
176
+ 5800 val loss 6.0138
177
+ 5800 val perplexity 409.0398
178
+ 5800 train 5.955937 (lr=2.2493e-05) (hash(x)=162964847)
179
+ 5900 val loss 6.0018
180
+ 5900 val perplexity 404.1522
181
+ 5900 train 6.014284 (lr=2.1793e-05) (hash(x)=150606634)
182
+ 6000 val loss 5.9860
183
+ 6000 val perplexity 397.8111
184
+ 6000 train 6.067945 (lr=2.1098e-05) (hash(x)=149890857)
185
+ 6100 val loss 5.9842
186
+ 6100 val perplexity 397.0892
187
+ 6100 train 6.078321 (lr=2.0410e-05) (hash(x)=173884145)
188
+ 6200 val loss 5.9740
189
+ 6200 val perplexity 393.0685
190
+ 6200 train 6.016133 (lr=1.9729e-05) (hash(x)=151987098)
191
+ 6300 val loss 5.9673
192
+ 6300 val perplexity 390.4381
193
+ 6300 train 5.915937 (lr=1.9056e-05) (hash(x)=148853562)
194
+ 6400 val loss 5.9593
195
+ 6400 val perplexity 387.3463
196
+ 6400 train 5.721579 (lr=1.8392e-05) (hash(x)=141530101)
197
+ 6500 val loss 5.9553
198
+ 6500 val perplexity 385.8022
199
+ 6500 train 5.821025 (lr=1.7738e-05) (hash(x)=142297809)
200
+ 6600 val loss 5.9475
201
+ 6600 val perplexity 382.7892
202
+ 6600 train 5.840808 (lr=1.7093e-05) (hash(x)=142447782)
203
+ 6700 val loss 5.9435
204
+ 6700 val perplexity 381.2514
205
+ 6700 train 5.940805 (lr=1.6459e-05) (hash(x)=147004686)
206
+ 6800 val loss 5.9352
207
+ 6800 val perplexity 378.1024
208
+ 6800 train 5.679741 (lr=1.5836e-05) (hash(x)=133438702)
209
+ 6900 val loss 5.9317
210
+ 6900 val perplexity 376.8032
211
+ 6900 train 5.951016 (lr=1.5225e-05) (hash(x)=157085143)
212
+ 7000 val loss 5.9286
213
+ 7000 val perplexity 375.6203
214
+ 7000 train 5.805225 (lr=1.4627e-05) (hash(x)=139437666)
215
+ 7100 val loss 5.9237
216
+ 7100 val perplexity 373.7811
217
+ 7100 train 5.940050 (lr=1.4043e-05) (hash(x)=159792986)
218
+ 7200 val loss 5.9126
219
+ 7200 val perplexity 369.6597
220
+ 7200 train 5.891582 (lr=1.3471e-05) (hash(x)=144930687)
221
+ 7300 val loss 5.9092
222
+ 7300 val perplexity 368.4073
223
+ 7300 train 5.956987 (lr=1.2915e-05) (hash(x)=156242690)
224
+ 7400 val loss 5.9077
225
+ 7400 val perplexity 367.8558
226
+ 7400 train 5.739932 (lr=1.2373e-05) (hash(x)=148183719)
227
+ 7500 val loss 5.9025
228
+ 7500 val perplexity 365.9624
229
+ 7500 train 5.946316 (lr=1.1847e-05) (hash(x)=152494758)
230
+ 7600 val loss 5.8989
231
+ 7600 val perplexity 364.6480
232
+ 7600 train 5.597721 (lr=1.1337e-05) (hash(x)=142485027)
233
+ 7700 val loss 5.9010
234
+ 7700 val perplexity 365.4037
235
+ 7700 train 5.767652 (lr=1.0844e-05) (hash(x)=147512165)
236
+ 7800 val loss 5.8856
237
+ 7800 val perplexity 359.8116
238
+ 7800 train 5.766011 (lr=1.0367e-05) (hash(x)=160346994)
239
+ 7900 val loss 5.8847
240
+ 7900 val perplexity 359.4823
241
+ 7900 train 5.756016 (lr=9.9088e-06) (hash(x)=144488254)
242
+ 8000 val loss 5.8796
243
+ 8000 val perplexity 357.6544
244
+ 8000 train 5.692674 (lr=9.4682e-06) (hash(x)=147637019)
245
+ 8100 val loss 5.8788
246
+ 8100 val perplexity 357.3631
247
+ 8100 train 5.741425 (lr=9.0461e-06) (hash(x)=147340534)
248
+ 8200 val loss 5.8760
249
+ 8200 val perplexity 356.3931
250
+ 8200 train 5.935407 (lr=8.6430e-06) (hash(x)=151630665)
251
+ 8300 val loss 5.8703
252
+ 8300 val perplexity 354.3390
253
+ 8300 train 5.975782 (lr=8.2593e-06) (hash(x)=149747064)
254
+ 8400 val loss 5.8661
255
+ 8400 val perplexity 352.8584
256
+ 8400 train 6.060540 (lr=7.8953e-06) (hash(x)=154245770)
257
+ 8500 val loss 5.8655
258
+ 8500 val perplexity 352.6601
259
+ 8500 train 5.755071 (lr=7.5515e-06) (hash(x)=152559100)
260
+ 8600 val loss 5.8608
261
+ 8600 val perplexity 351.0195
262
+ 8600 train 6.293483 (lr=7.2282e-06) (hash(x)=181365926)
263
+ 8700 val loss 5.8604
264
+ 8700 val perplexity 350.8503
265
+ 8700 train 5.698334 (lr=6.9257e-06) (hash(x)=154405991)
266
+ 8800 val loss 5.8571
267
+ 8800 val perplexity 349.6920
268
+ 8800 train 5.809381 (lr=6.6444e-06) (hash(x)=153755904)
269
+ 8900 val loss 5.8525
270
+ 8900 val perplexity 348.0933
271
+ 8900 train 5.771592 (lr=6.3845e-06) (hash(x)=152120568)
272
+ 9000 val loss 5.8484
273
+ 9000 val perplexity 346.6963
274
+ 9000 train 5.638720 (lr=6.1462e-06) (hash(x)=142797279)
275
+ 9100 val loss 5.8454
276
+ 9100 val perplexity 345.6317
277
+ 9100 train 5.712313 (lr=5.9300e-06) (hash(x)=143037503)
278
+ 9200 val loss 5.8462
279
+ 9200 val perplexity 345.9006
280
+ 9200 train 5.736977 (lr=5.7359e-06) (hash(x)=113690273)
281
+ 9300 val loss 5.8437
282
+ 9300 val perplexity 345.0645
283
+ 9300 train 5.782093 (lr=5.5641e-06) (hash(x)=158025077)
284
+ 9400 val loss 5.8422
285
+ 9400 val perplexity 344.5193
286
+ 9400 train 5.916287 (lr=5.4149e-06) (hash(x)=158251718)
287
+ 9500 val loss 5.8380
288
+ 9500 val perplexity 343.0774
289
+ 9500 train 5.881953 (lr=5.2884e-06) (hash(x)=154752610)
290
+ 9600 val loss 5.8383
291
+ 9600 val perplexity 343.1870
292
+ 9600 train 5.720537 (lr=5.1847e-06) (hash(x)=146889093)
293
+ 9700 val loss 5.8335
294
+ 9700 val perplexity 341.5587
295
+ 9700 train 5.841027 (lr=5.1040e-06) (hash(x)=156906516)
296
+ 9800 val loss 5.8335
297
+ 9800 val perplexity 341.5619
298
+ 9800 train 5.668470 (lr=5.0462e-06) (hash(x)=153841927)
299
+ 9900 val loss 5.8336
300
+ 9900 val perplexity 341.5966
301
+ 9900 train 6.047590 (lr=5.0116e-06) (hash(x)=163514334)
302
+ 9999 val loss 5.8284
303
+ 9999 val perplexity 339.8243
attention_kindselective_n_heads4_seed1340/model_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:dc8ab622ad0bced5af47c95b062cb4142a172205f16a19af25bfeb37ae7e6ead
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:abc5e719ab2d39dd15542a62fab76a12f211bdda8cb49d1f4cc978df6d5c511c
3
  size 92843394
attention_kindselective_n_heads4_seed1340/model_05000.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5101401041308ab5e3b94226fac88ca83468b023ae6cb4b47849d5c1a7d152b7
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4adea175cfbc2aec7fa1818cc4955862e5783f4af8eed99bc8c0c0004a21b105
3
  size 92843394
attention_kindselective_n_heads4_seed1340/model_07500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c0101f627a40dd410fab6f508a3a497c046dbbf1dbfe43ec81e6646b715a6e9b
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd26e0326526daf53111cc178c81ef9d8d0794fbec4f45b266907ee2a63100eb
3
  size 92843394
attention_kindselective_n_heads4_seed1340/model_09999.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:aa0f34e44744d28c8fc4936ee2d79509aae3e04975bced22d255115e2576cf62
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4c18c0c2379102f3d5a0dc91c07f339598affa0059ba1b204ddcd7b59fcc2156
3
  size 92843394
attention_kindselective_n_heads4_seed1340/optimizer_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9d98a3486500cda9b7a78aff66d543fb4e921587f58675349d8d1a02e4977349
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:73bd395e697f222dad88aa6e4932f1c2132ab82b4a763c51958033424856cf1d
3
  size 179406214
attention_kindselective_n_heads4_seed1340/optimizer_05000.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9b0d77a77d8a5c48775bb7f2c0c19972e9a8fa816c04146058b96849cf820186
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dceafff09f962434445411aaf6b406fbf74c4d91675fcc6b1a4b2ad2fc0187ef
3
  size 179406214
attention_kindselective_n_heads4_seed1340/optimizer_07500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7b64cf3e05ef6defb6d5634f73857bb471e6418aeab79e0d84177c0e7193b147
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68e4b2823d6671f41998ab15b43eb99146129f535fd8bdeb27769d668f347ff7
3
  size 179406214
attention_kindselective_n_heads4_seed1340/optimizer_09999.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:28f88b813cbb7cebd77efdbeb49842210d132f1331d2a9518f163562c7fa3f8b
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e1d34305e46c99a66f795b9ae398c282c929450872c4d4310042322dc6f4748
3
  size 179406214