andrew-healey commited on
Commit
bf83b9e
·
verified ·
1 Parent(s): 03ec7c4

Upload folder using huggingface_hub

Browse files
attention_kindselective_n_heads4_seed1339/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_9/attention_kindselective_n_heads4_seed1339", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_9", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1339, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 5e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "5e-5_61440_4_1339", "n_embd": 256}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_9/attention_kindselective_n_heads4_seed1339", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_9", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1339, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 6e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "6e-5_61440_4_1339", "n_embd": 256}
attention_kindselective_n_heads4_seed1339/log2.txt CHANGED
@@ -1,49 +1,267 @@
1
- 8000 train 5.839575 (lr=3.5468e-06) (hash(x)=156122973)
2
  max_steps: 8750
3
  0 val loss 11.2808
4
  0 val perplexity 79287.6797
5
- 8100 val loss 5.8084
6
- 8100 val perplexity 333.0969
7
- 8100 train 5.844770 (lr=3.4114e-06) (hash(x)=156153179)
8
- 0 train 11.289339 (lr=1.0000e-07) (hash(x)=150724848)
9
- 100 val loss 10.0102
10
- 100 val perplexity 22253.1465
11
- 100 train 10.025309 (lr=1.0100e-05) (hash(x)=149910534)
12
- 8200 val loss 5.8078
13
- 8200 val perplexity 332.8854
14
- 8200 train 5.844475 (lr=3.2950e-06) (hash(x)=146430698)
15
- 200 val loss 9.2383
16
- 200 val perplexity 10283.3877
17
- 200 train 9.265762 (lr=2.0100e-05) (hash(x)=148123706)
18
- 8300 val loss 5.8067
19
- 8300 val perplexity 332.5358
20
- 8300 train 5.676077 (lr=3.1977e-06) (hash(x)=143507257)
21
- 300 val loss 8.0321
22
- 300 val perplexity 3078.1367
23
- 300 train 7.902669 (lr=3.0100e-05) (hash(x)=146678221)
24
- 8400 val loss 5.8027
25
- 8400 val perplexity 331.1894
26
- 8400 train 5.896213 (lr=3.1197e-06) (hash(x)=166272643)
27
- 400 val loss 7.5622
28
- 400 val perplexity 1924.0914
29
- 400 train 7.451694 (lr=4.0100e-05) (hash(x)=151700982)
30
- 8500 val loss 5.8010
31
- 8500 val perplexity 330.6212
32
- 8500 train 5.702298 (lr=3.0611e-06) (hash(x)=143887848)
33
- 500 val loss 7.4032
34
- 500 val perplexity 1641.3018
35
- 500 train 7.482969 (lr=5.0000e-05) (hash(x)=156182087)
36
- 8600 val loss 5.8008
37
- 8600 val perplexity 330.5657
38
- 8600 train 5.850898 (lr=3.0220e-06) (hash(x)=156900341)
39
- 600 val loss 7.2893
40
- 600 val perplexity 1464.6086
41
- 600 train 7.262670 (lr=4.9984e-05) (hash(x)=149318660)
42
- 8700 val loss 5.7967
43
- 8700 val perplexity 329.2034
44
- 8700 train 6.024566 (lr=3.0024e-06) (hash(x)=146417632)
45
- 700 val loss 7.1942
46
- 700 val perplexity 1331.6840
47
- 700 train 7.235372 (lr=4.9935e-05) (hash(x)=150482428)
48
- 8749 val loss 5.7958
49
- 8749 val perplexity 328.9303
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  max_steps: 8750
2
  0 val loss 11.2808
3
  0 val perplexity 79287.6797
4
+ 0 train 11.289339 (lr=1.2000e-07) (hash(x)=150724848)
5
+ 100 val loss 9.9708
6
+ 100 val perplexity 21393.6426
7
+ 100 train 9.984427 (lr=1.2120e-05) (hash(x)=149910534)
8
+ 200 val loss 8.9782
9
+ 200 val perplexity 7928.1284
10
+ 200 train 9.005048 (lr=2.4120e-05) (hash(x)=148123706)
11
+ 300 val loss 7.7778
12
+ 300 val perplexity 2386.9246
13
+ 300 train 7.620437 (lr=3.6120e-05) (hash(x)=146678221)
14
+ 400 val loss 7.4544
15
+ 400 val perplexity 1727.4391
16
+ 400 train 7.335504 (lr=4.8120e-05) (hash(x)=151700982)
17
+ 500 val loss 7.2946
18
+ 500 val perplexity 1472.3665
19
+ 500 train 7.381680 (lr=6.0000e-05) (hash(x)=156182087)
20
+ 600 val loss 7.1667
21
+ 600 val perplexity 1295.6157
22
+ 600 train 7.135485 (lr=5.9980e-05) (hash(x)=149318660)
23
+ 700 val loss 7.0523
24
+ 700 val perplexity 1155.4626
25
+ 700 train 7.087841 (lr=5.9922e-05) (hash(x)=150482428)
26
+ 800 val loss 6.9016
27
+ 800 val perplexity 993.8256
28
+ 800 train 6.779600 (lr=5.9824e-05) (hash(x)=143268605)
29
+ 900 val loss 6.7801
30
+ 900 val perplexity 880.1960
31
+ 900 train 6.748869 (lr=5.9687e-05) (hash(x)=152322423)
32
+ 1000 val loss 6.6820
33
+ 1000 val perplexity 797.9325
34
+ 1000 train 6.548263 (lr=5.9512e-05) (hash(x)=147904298)
35
+ 1100 val loss 6.5889
36
+ 1100 val perplexity 726.9638
37
+ 1100 train 6.798097 (lr=5.9298e-05) (hash(x)=154343147)
38
+ 1200 val loss 6.4981
39
+ 1200 val perplexity 663.8964
40
+ 1200 train 6.485293 (lr=5.9046e-05) (hash(x)=141843115)
41
+ 1300 val loss 6.4315
42
+ 1300 val perplexity 621.1253
43
+ 1300 train 6.326127 (lr=5.8757e-05) (hash(x)=145279030)
44
+ 1400 val loss 6.4002
45
+ 1400 val perplexity 601.9705
46
+ 1400 train 6.344112 (lr=5.8430e-05) (hash(x)=152507639)
47
+ 1500 val loss 6.3592
48
+ 1500 val perplexity 577.7610
49
+ 1500 train 6.397813 (lr=5.8066e-05) (hash(x)=148473774)
50
+ 1600 val loss 6.2977
51
+ 1600 val perplexity 543.3112
52
+ 1600 train 6.342596 (lr=5.7666e-05) (hash(x)=151117002)
53
+ 1700 val loss 6.2554
54
+ 1700 val perplexity 520.8115
55
+ 1700 train 6.206963 (lr=5.7230e-05) (hash(x)=138011335)
56
+ 1800 val loss 6.2183
57
+ 1800 val perplexity 501.8493
58
+ 1800 train 6.337492 (lr=5.6759e-05) (hash(x)=171180926)
59
+ 1900 val loss 6.1788
60
+ 1900 val perplexity 482.4208
61
+ 1900 train 6.123930 (lr=5.6253e-05) (hash(x)=141769419)
62
+ 2000 val loss 6.1479
63
+ 2000 val perplexity 467.7460
64
+ 2000 train 6.107029 (lr=5.5714e-05) (hash(x)=151963443)
65
+ 2100 val loss 6.1301
66
+ 2100 val perplexity 459.4699
67
+ 2100 train 6.177304 (lr=5.5142e-05) (hash(x)=162947470)
68
+ 2200 val loss 6.0852
69
+ 2200 val perplexity 439.3070
70
+ 2200 train 6.288651 (lr=5.4537e-05) (hash(x)=154954810)
71
+ 2300 val loss 6.0757
72
+ 2300 val perplexity 435.1639
73
+ 2300 train 6.050261 (lr=5.3902e-05) (hash(x)=151878111)
74
+ 2400 val loss 6.0571
75
+ 2400 val perplexity 427.1534
76
+ 2400 train 5.953463 (lr=5.3236e-05) (hash(x)=158661057)
77
+ 2500 val loss 6.0207
78
+ 2500 val perplexity 411.8731
79
+ 2500 train 6.041787 (lr=5.2541e-05) (hash(x)=150925584)
80
+ 2600 val loss 5.9995
81
+ 2600 val perplexity 403.2292
82
+ 2600 train 5.913114 (lr=5.1817e-05) (hash(x)=144515755)
83
+ 2700 val loss 5.9761
84
+ 2700 val perplexity 393.9177
85
+ 2700 train 5.959197 (lr=5.1067e-05) (hash(x)=153109144)
86
+ 2800 val loss 5.9473
87
+ 2800 val perplexity 382.7029
88
+ 2800 train 5.855392 (lr=5.0290e-05) (hash(x)=151152897)
89
+ 2900 val loss 5.9254
90
+ 2900 val perplexity 374.4294
91
+ 2900 train 5.869029 (lr=4.9487e-05) (hash(x)=145800210)
92
+ 3000 val loss 5.9079
93
+ 3000 val perplexity 367.9445
94
+ 3000 train 5.716622 (lr=4.8662e-05) (hash(x)=141997485)
95
+ 3100 val loss 5.8772
96
+ 3100 val perplexity 356.8146
97
+ 3100 train 5.803132 (lr=4.7813e-05) (hash(x)=154049740)
98
+ 3200 val loss 5.8575
99
+ 3200 val perplexity 349.8481
100
+ 3200 train 5.783772 (lr=4.6943e-05) (hash(x)=150471842)
101
+ 3300 val loss 5.8503
102
+ 3300 val perplexity 347.3422
103
+ 3300 train 5.798738 (lr=4.6052e-05) (hash(x)=149048126)
104
+ 3400 val loss 5.8291
105
+ 3400 val perplexity 340.0508
106
+ 3400 train 5.949715 (lr=4.5143e-05) (hash(x)=161261339)
107
+ 3500 val loss 5.8089
108
+ 3500 val perplexity 333.2634
109
+ 3500 train 5.763816 (lr=4.4216e-05) (hash(x)=157495564)
110
+ 3600 val loss 5.7943
111
+ 3600 val perplexity 328.4197
112
+ 3600 train 5.711343 (lr=4.3273e-05) (hash(x)=144352932)
113
+ 3700 val loss 5.7753
114
+ 3700 val perplexity 322.2256
115
+ 3700 train 5.737307 (lr=4.2315e-05) (hash(x)=149389012)
116
+ 3800 val loss 5.7588
117
+ 3800 val perplexity 316.9792
118
+ 3800 train 5.704242 (lr=4.1343e-05) (hash(x)=146607620)
119
+ 3900 val loss 5.7450
120
+ 3900 val perplexity 312.6133
121
+ 3900 train 5.664580 (lr=4.0360e-05) (hash(x)=143754617)
122
+ 4000 val loss 5.7298
123
+ 4000 val perplexity 307.9217
124
+ 4000 train 5.700660 (lr=3.9365e-05) (hash(x)=156930722)
125
+ 4100 val loss 5.7148
126
+ 4100 val perplexity 303.3377
127
+ 4100 train 5.554720 (lr=3.8362e-05) (hash(x)=147500519)
128
+ 4200 val loss 5.7047
129
+ 4200 val perplexity 300.2655
130
+ 4200 train 5.543267 (lr=3.7351e-05) (hash(x)=143232237)
131
+ 4300 val loss 5.6875
132
+ 4300 val perplexity 295.1630
133
+ 4300 train 5.596540 (lr=3.6333e-05) (hash(x)=146811670)
134
+ 4400 val loss 5.6755
135
+ 4400 val perplexity 291.6416
136
+ 4400 train 5.703570 (lr=3.5311e-05) (hash(x)=158418746)
137
+ 4500 val loss 5.6685
138
+ 4500 val perplexity 289.5956
139
+ 4500 train 5.627564 (lr=3.4285e-05) (hash(x)=156695778)
140
+ 4600 val loss 5.6525
141
+ 4600 val perplexity 284.9904
142
+ 4600 train 5.663891 (lr=3.3257e-05) (hash(x)=147791497)
143
+ 4700 val loss 5.6407
144
+ 4700 val perplexity 281.6539
145
+ 4700 train 5.535069 (lr=3.2229e-05) (hash(x)=155533088)
146
+ 4800 val loss 5.6311
147
+ 4800 val perplexity 278.9583
148
+ 4800 train 5.483893 (lr=3.1202e-05) (hash(x)=138350044)
149
+ 4900 val loss 5.6160
150
+ 4900 val perplexity 274.7812
151
+ 4900 train 5.606287 (lr=3.0178e-05) (hash(x)=143735284)
152
+ 5000 val loss 5.6043
153
+ 5000 val perplexity 271.5802
154
+ 5000 train 5.561104 (lr=2.9157e-05) (hash(x)=154976463)
155
+ 5100 val loss 5.6031
156
+ 5100 val perplexity 271.2580
157
+ 5100 train 5.495253 (lr=2.8143e-05) (hash(x)=149894982)
158
+ 5200 val loss 5.5930
159
+ 5200 val perplexity 268.5366
160
+ 5200 train 5.358963 (lr=2.7135e-05) (hash(x)=159326689)
161
+ 5300 val loss 5.5795
162
+ 5300 val perplexity 264.9523
163
+ 5300 train 5.596076 (lr=2.6136e-05) (hash(x)=159484800)
164
+ 5400 val loss 5.5655
165
+ 5400 val perplexity 261.2596
166
+ 5400 train 5.476205 (lr=2.5147e-05) (hash(x)=140385615)
167
+ 5500 val loss 5.5596
168
+ 5500 val perplexity 259.7308
169
+ 5500 train 5.390671 (lr=2.4169e-05) (hash(x)=148498335)
170
+ 5600 val loss 5.5589
171
+ 5600 val perplexity 259.5334
172
+ 5600 train 5.302035 (lr=2.3204e-05) (hash(x)=151907614)
173
+ 5700 val loss 5.5491
174
+ 5700 val perplexity 257.0125
175
+ 5700 train 5.521362 (lr=2.2253e-05) (hash(x)=155192267)
176
+ 5800 val loss 5.5356
177
+ 5800 val perplexity 253.5545
178
+ 5800 train 5.586134 (lr=2.1318e-05) (hash(x)=153132158)
179
+ 5900 val loss 5.5336
180
+ 5900 val perplexity 253.0408
181
+ 5900 train 5.508742 (lr=2.0400e-05) (hash(x)=161446764)
182
+ 6000 val loss 5.5270
183
+ 6000 val perplexity 251.3763
184
+ 6000 train 5.310840 (lr=1.9500e-05) (hash(x)=151512446)
185
+ 6100 val loss 5.5166
186
+ 6100 val perplexity 248.7991
187
+ 6100 train 5.594731 (lr=1.8620e-05) (hash(x)=188094053)
188
+ 6200 val loss 5.5064
189
+ 6200 val perplexity 246.2617
190
+ 6200 train 5.382390 (lr=1.7760e-05) (hash(x)=149389789)
191
+ 6300 val loss 5.5008
192
+ 6300 val perplexity 244.8927
193
+ 6300 train 5.318002 (lr=1.6923e-05) (hash(x)=138212820)
194
+ 6400 val loss 5.4985
195
+ 6400 val perplexity 244.3206
196
+ 6400 train 5.281260 (lr=1.6108e-05) (hash(x)=146535423)
197
+ 6500 val loss 5.4905
198
+ 6500 val perplexity 242.3754
199
+ 6500 train 5.364082 (lr=1.5319e-05) (hash(x)=145950843)
200
+ 6600 val loss 5.4853
201
+ 6600 val perplexity 241.1134
202
+ 6600 train 5.395124 (lr=1.4555e-05) (hash(x)=141162902)
203
+ 6700 val loss 5.4824
204
+ 6700 val perplexity 240.4124
205
+ 6700 train 5.401274 (lr=1.3817e-05) (hash(x)=153018737)
206
+ 6800 val loss 5.4738
207
+ 6800 val perplexity 238.3549
208
+ 6800 train 5.385234 (lr=1.3108e-05) (hash(x)=155640155)
209
+ 6900 val loss 5.4705
210
+ 6900 val perplexity 237.5830
211
+ 6900 train 5.447811 (lr=1.2427e-05) (hash(x)=153722115)
212
+ 7000 val loss 5.4645
213
+ 7000 val perplexity 236.1570
214
+ 7000 train 5.380439 (lr=1.1777e-05) (hash(x)=146953450)
215
+ 7100 val loss 5.4627
216
+ 7100 val perplexity 235.7310
217
+ 7100 train 5.315409 (lr=1.1157e-05) (hash(x)=137663885)
218
+ 7200 val loss 5.4541
219
+ 7200 val perplexity 233.7043
220
+ 7200 train 5.542728 (lr=1.0568e-05) (hash(x)=146172950)
221
+ 7300 val loss 5.4505
222
+ 7300 val perplexity 232.8709
223
+ 7300 train 5.346293 (lr=1.0012e-05) (hash(x)=150018163)
224
+ 7400 val loss 5.4469
225
+ 7400 val perplexity 232.0418
226
+ 7400 train 5.477500 (lr=9.4899e-06) (hash(x)=145351166)
227
+ 7500 val loss 5.4470
228
+ 7500 val perplexity 232.0578
229
+ 7500 train 5.224993 (lr=9.0014e-06) (hash(x)=145292116)
230
+ 7600 val loss 5.4397
231
+ 7600 val perplexity 230.3639
232
+ 7600 train 5.361894 (lr=8.5478e-06) (hash(x)=150235132)
233
+ 7700 val loss 5.4359
234
+ 7700 val perplexity 229.5044
235
+ 7700 train 5.321193 (lr=8.1297e-06) (hash(x)=154543455)
236
+ 7800 val loss 5.4347
237
+ 7800 val perplexity 229.2233
238
+ 7800 train 5.339501 (lr=7.7476e-06) (hash(x)=142456852)
239
+ 7900 val loss 5.4337
240
+ 7900 val perplexity 229.0003
241
+ 7900 train 5.211949 (lr=7.4021e-06) (hash(x)=147363479)
242
+ 8000 val loss 5.4269
243
+ 8000 val perplexity 227.4454
244
+ 8000 train 5.460956 (lr=7.0937e-06) (hash(x)=156122973)
245
+ 8100 val loss 5.4243
246
+ 8100 val perplexity 226.8484
247
+ 8100 train 5.472881 (lr=6.8229e-06) (hash(x)=156153179)
248
+ 8200 val loss 5.4227
249
+ 8200 val perplexity 226.4853
250
+ 8200 train 5.486504 (lr=6.5900e-06) (hash(x)=146430698)
251
+ 8300 val loss 5.4212
252
+ 8300 val perplexity 226.1495
253
+ 8300 train 5.301962 (lr=6.3954e-06) (hash(x)=143507257)
254
+ 8400 val loss 5.4153
255
+ 8400 val perplexity 224.8256
256
+ 8400 train 5.358385 (lr=6.2395e-06) (hash(x)=166272643)
257
+ 8500 val loss 5.4145
258
+ 8500 val perplexity 224.6475
259
+ 8500 train 5.319081 (lr=6.1223e-06) (hash(x)=143887848)
260
+ 8600 val loss 5.4134
261
+ 8600 val perplexity 224.3872
262
+ 8600 train 5.440458 (lr=6.0440e-06) (hash(x)=156900341)
263
+ 8700 val loss 5.4085
264
+ 8700 val perplexity 223.2952
265
+ 8700 train 5.625781 (lr=6.0049e-06) (hash(x)=146417632)
266
+ 8749 val loss 5.4078
267
+ 8749 val perplexity 223.1394
attention_kindselective_n_heads4_seed1339/model_08749.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e5e246a3164bacf50817c15a67bd6b599ff7acc11ae461b713148076d24a4299
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:670863be01d7591c43c5c574c64109eb1a0e950c101c5d0b4496599d38dd0bd9
3
  size 92843394
attention_kindselective_n_heads4_seed1339/optimizer_08749.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4ee398ac385f1f29570d770fc19076827ace193d5ebf660ef5f349bc78d93979
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:92d1b73e300d1a0662b1eec7fad7b3088041dc4bda11603e2d39b3a02feba96e
3
  size 179406214