andrew-healey commited on
Commit
3e97235
·
verified ·
1 Parent(s): 52d2247

Upload folder using huggingface_hub

Browse files
trying_new_latent_masks/baseline_no_head_seed_1338/args.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "trying_new_latent_masks/baseline_no_head_seed_1338", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 12, "n_embd": 768, "head_dim": 64, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": null, "warmup_steps": null, "group": "trying_new_latent_masks", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1338, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none_with_no_head", "selection_head_linear_combo_scale": 1.0, "disable_selection_head_linear_combo_bias": false, "assert_latent_matches_no_head": false, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": null, "batch_size": 4, "total_batch_size": null, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "init_latent_masks_to_identity": false, "init_latent_masks_to_inverse": false, "latent_mask_scale": null, "latent_mask_sigmoid": false, "S_layernorm": false, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": false, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": null, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "baseline_no_head", "latent_mask_precision": "bfloat16"}
trying_new_latent_masks/baseline_no_head_seed_1338/dataloader_02499.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86bf2d12aa2d7d065a4e8fd1660928dd629c2417a5062dd8aecee75fb283886f
3
+ size 964
trying_new_latent_masks/baseline_no_head_seed_1338/log2.txt ADDED
@@ -0,0 +1,303 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ max_steps: 2500
2
+ 0 val loss 10.9701
3
+ 0 val perplexity 58109.5391
4
+ 0 train 10.974605 (lr=8.3916e-07) (hash(x)=20472174)
5
+ 10 train 10.169651 (lr=9.2308e-06) (hash(x)=18772821)
6
+ 20 train 9.655478 (lr=1.7622e-05) (hash(x)=18169834)
7
+ 30 train 9.425161 (lr=2.6014e-05) (hash(x)=21772648)
8
+ 40 train 9.197157 (lr=3.4406e-05) (hash(x)=19737653)
9
+ 50 train 8.886475 (lr=4.2797e-05) (hash(x)=18397495)
10
+ 60 train 8.532780 (lr=5.1189e-05) (hash(x)=19313033)
11
+ 70 train 8.135405 (lr=5.9580e-05) (hash(x)=19235256)
12
+ 80 train 7.878307 (lr=6.7972e-05) (hash(x)=15263116)
13
+ 90 train 7.597981 (lr=7.6364e-05) (hash(x)=20660013)
14
+ 100 val loss 7.4542
15
+ 100 val perplexity 1727.1410
16
+ 100 train 7.413381 (lr=8.4755e-05) (hash(x)=19046599)
17
+ 110 train 7.304238 (lr=9.3147e-05) (hash(x)=24079550)
18
+ 120 train 7.071337 (lr=1.0154e-04) (hash(x)=26797340)
19
+ 130 train 7.097270 (lr=1.0993e-04) (hash(x)=20619718)
20
+ 140 train 6.898930 (lr=1.1832e-04) (hash(x)=18313305)
21
+ 150 train 6.850006 (lr=1.2671e-04) (hash(x)=20825391)
22
+ 160 train 6.621400 (lr=1.3510e-04) (hash(x)=20505694)
23
+ 170 train 6.514496 (lr=1.4350e-04) (hash(x)=24020301)
24
+ 180 train 6.566259 (lr=1.5189e-04) (hash(x)=23124134)
25
+ 190 train 6.418679 (lr=1.6028e-04) (hash(x)=17851881)
26
+ 200 val loss 6.3627
27
+ 200 val perplexity 579.7912
28
+ 200 train 6.345577 (lr=1.6867e-04) (hash(x)=21138724)
29
+ 210 train 6.175342 (lr=1.7706e-04) (hash(x)=16978198)
30
+ 220 train 6.108572 (lr=1.8545e-04) (hash(x)=20812935)
31
+ 230 train 6.183254 (lr=1.9385e-04) (hash(x)=20619471)
32
+ 240 train 6.116443 (lr=2.0224e-04) (hash(x)=17706413)
33
+ 250 train 5.932513 (lr=2.1063e-04) (hash(x)=21557598)
34
+ 260 train 5.869222 (lr=2.1902e-04) (hash(x)=18077648)
35
+ 270 train 5.981472 (lr=2.2741e-04) (hash(x)=17053957)
36
+ 280 train 5.913751 (lr=2.3580e-04) (hash(x)=20526478)
37
+ 290 train 5.743469 (lr=2.4420e-04) (hash(x)=22226244)
38
+ 300 val loss 5.7556
39
+ 300 val perplexity 315.9584
40
+ 300 train 5.667214 (lr=2.5259e-04) (hash(x)=19583531)
41
+ 310 train 5.521681 (lr=2.6098e-04) (hash(x)=18986313)
42
+ 320 train 5.744861 (lr=2.6937e-04) (hash(x)=23629157)
43
+ 330 train 5.659211 (lr=2.7776e-04) (hash(x)=21312406)
44
+ 340 train 5.474382 (lr=2.8615e-04) (hash(x)=19278398)
45
+ 350 train 5.485051 (lr=2.9455e-04) (hash(x)=19370729)
46
+ 360 train 5.325763 (lr=3.0294e-04) (hash(x)=20663223)
47
+ 370 train 5.501541 (lr=3.1133e-04) (hash(x)=22952038)
48
+ 380 train 5.418203 (lr=3.1972e-04) (hash(x)=21902455)
49
+ 390 train 5.427595 (lr=3.2811e-04) (hash(x)=21953350)
50
+ 400 val loss 5.3124
51
+ 400 val perplexity 202.8403
52
+ 400 train 5.377280 (lr=3.3650e-04) (hash(x)=22915018)
53
+ 410 train 5.292256 (lr=3.4490e-04) (hash(x)=18301453)
54
+ 420 train 5.221013 (lr=3.5329e-04) (hash(x)=17422968)
55
+ 430 train 5.082413 (lr=3.6168e-04) (hash(x)=20616951)
56
+ 440 train 5.255016 (lr=3.7007e-04) (hash(x)=19912399)
57
+ 450 train 5.084553 (lr=3.7846e-04) (hash(x)=21154839)
58
+ 460 train 5.050186 (lr=3.8685e-04) (hash(x)=18919228)
59
+ 470 train 5.000904 (lr=3.9524e-04) (hash(x)=20916162)
60
+ 480 train 4.897154 (lr=4.0364e-04) (hash(x)=19136877)
61
+ 490 train 4.998033 (lr=4.1203e-04) (hash(x)=22631452)
62
+ 500 val loss 4.8836
63
+ 500 val perplexity 132.0999
64
+ 500 train 4.905692 (lr=4.2042e-04) (hash(x)=21454149)
65
+ 510 train 4.761783 (lr=4.2881e-04) (hash(x)=18263355)
66
+ 520 train 4.711554 (lr=4.3720e-04) (hash(x)=21540112)
67
+ 530 train 4.724404 (lr=4.4559e-04) (hash(x)=20378460)
68
+ 540 train 4.777648 (lr=4.5399e-04) (hash(x)=25242015)
69
+ 550 train 4.688991 (lr=4.6238e-04) (hash(x)=18067565)
70
+ 560 train 4.577537 (lr=4.7077e-04) (hash(x)=18532926)
71
+ 570 train 4.488824 (lr=4.7916e-04) (hash(x)=17014430)
72
+ 580 train 4.519387 (lr=4.8755e-04) (hash(x)=21587296)
73
+ 590 train 4.598910 (lr=4.9594e-04) (hash(x)=19956044)
74
+ 600 val loss 4.5575
75
+ 600 val perplexity 95.3439
76
+ 600 train 4.604202 (lr=5.0434e-04) (hash(x)=20658639)
77
+ 610 train 4.512754 (lr=5.1273e-04) (hash(x)=18301842)
78
+ 620 train 4.476230 (lr=5.2112e-04) (hash(x)=18675952)
79
+ 630 train 4.580554 (lr=5.2951e-04) (hash(x)=17602704)
80
+ 640 train 4.506849 (lr=5.3790e-04) (hash(x)=21491802)
81
+ 650 train 4.488055 (lr=5.4629e-04) (hash(x)=20557423)
82
+ 660 train 4.369354 (lr=5.5469e-04) (hash(x)=21188117)
83
+ 670 train 4.481751 (lr=5.6308e-04) (hash(x)=20683583)
84
+ 680 train 4.423576 (lr=5.7147e-04) (hash(x)=24879052)
85
+ 690 train 4.301941 (lr=5.7986e-04) (hash(x)=19538285)
86
+ 700 val loss 4.3183
87
+ 700 val perplexity 75.0588
88
+ 700 train 4.332294 (lr=5.8825e-04) (hash(x)=18338040)
89
+ 710 train 4.331699 (lr=5.9664e-04) (hash(x)=21087427)
90
+ 720 train 4.266440 (lr=5.9999e-04) (hash(x)=20546196)
91
+ 730 train 4.293985 (lr=5.9991e-04) (hash(x)=19605436)
92
+ 740 train 4.268080 (lr=5.9974e-04) (hash(x)=20510264)
93
+ 750 train 4.311675 (lr=5.9949e-04) (hash(x)=19782550)
94
+ 760 train 4.207647 (lr=5.9915e-04) (hash(x)=20997594)
95
+ 770 train 4.099854 (lr=5.9874e-04) (hash(x)=20733511)
96
+ 780 train 4.175283 (lr=5.9824e-04) (hash(x)=17584884)
97
+ 790 train 4.109723 (lr=5.9765e-04) (hash(x)=20952808)
98
+ 800 val loss 4.1632
99
+ 800 val perplexity 64.2749
100
+ 800 train 4.062247 (lr=5.9698e-04) (hash(x)=16092551)
101
+ 810 train 4.097902 (lr=5.9623e-04) (hash(x)=16116155)
102
+ 820 train 4.168887 (lr=5.9540e-04) (hash(x)=22196607)
103
+ 830 train 4.049020 (lr=5.9449e-04) (hash(x)=21490054)
104
+ 840 train 3.964845 (lr=5.9349e-04) (hash(x)=19471853)
105
+ 850 train 4.099875 (lr=5.9241e-04) (hash(x)=40176264)
106
+ 860 train 4.094699 (lr=5.9126e-04) (hash(x)=19241841)
107
+ 870 train 3.953854 (lr=5.9002e-04) (hash(x)=19010654)
108
+ 880 train 4.045705 (lr=5.8869e-04) (hash(x)=21812805)
109
+ 890 train 4.139117 (lr=5.8729e-04) (hash(x)=23266756)
110
+ 900 val loss 4.0361
111
+ 900 val perplexity 56.6077
112
+ 900 train 4.008787 (lr=5.8581e-04) (hash(x)=17198172)
113
+ 910 train 3.904811 (lr=5.8425e-04) (hash(x)=17654181)
114
+ 920 train 4.011348 (lr=5.8262e-04) (hash(x)=18651087)
115
+ 930 train 3.963641 (lr=5.8090e-04) (hash(x)=18812778)
116
+ 940 train 3.876603 (lr=5.7911e-04) (hash(x)=19229059)
117
+ 950 train 3.991151 (lr=5.7723e-04) (hash(x)=16244689)
118
+ 960 train 3.936969 (lr=5.7529e-04) (hash(x)=14252464)
119
+ 970 train 3.855998 (lr=5.7326e-04) (hash(x)=20812699)
120
+ 980 train 4.033554 (lr=5.7116e-04) (hash(x)=19274156)
121
+ 990 train 3.900966 (lr=5.6899e-04) (hash(x)=23893376)
122
+ 1000 val loss 3.9334
123
+ 1000 val perplexity 51.0805
124
+ 1000 train 3.788527 (lr=5.6674e-04) (hash(x)=20084739)
125
+ 1010 train 3.871222 (lr=5.6442e-04) (hash(x)=20537558)
126
+ 1020 train 3.960896 (lr=5.6202e-04) (hash(x)=20432294)
127
+ 1030 train 3.872612 (lr=5.5956e-04) (hash(x)=22466028)
128
+ 1040 train 3.836304 (lr=5.5702e-04) (hash(x)=17894926)
129
+ 1050 train 3.940110 (lr=5.5441e-04) (hash(x)=21738864)
130
+ 1060 train 3.848487 (lr=5.5174e-04) (hash(x)=18724801)
131
+ 1070 train 3.816853 (lr=5.4899e-04) (hash(x)=21845067)
132
+ 1080 train 3.763618 (lr=5.4618e-04) (hash(x)=17269002)
133
+ 1090 train 3.891530 (lr=5.4330e-04) (hash(x)=20777124)
134
+ 1100 val loss 3.8508
135
+ 1100 val perplexity 47.0318
136
+ 1100 train 3.818132 (lr=5.4035e-04) (hash(x)=21658646)
137
+ 1110 train 3.793998 (lr=5.3734e-04) (hash(x)=25217492)
138
+ 1120 train 3.849333 (lr=5.3426e-04) (hash(x)=17597309)
139
+ 1130 train 3.840953 (lr=5.3113e-04) (hash(x)=15251388)
140
+ 1140 train 3.827075 (lr=5.2792e-04) (hash(x)=16526754)
141
+ 1150 train 3.784678 (lr=5.2466e-04) (hash(x)=16839845)
142
+ 1160 train 3.801742 (lr=5.2134e-04) (hash(x)=19271941)
143
+ 1170 train 3.784835 (lr=5.1796e-04) (hash(x)=20159915)
144
+ 1180 train 3.752326 (lr=5.1452e-04) (hash(x)=18768657)
145
+ 1190 train 3.797867 (lr=5.1102e-04) (hash(x)=19691298)
146
+ 1200 val loss 3.7837
147
+ 1200 val perplexity 43.9772
148
+ 1200 train 3.795585 (lr=5.0746e-04) (hash(x)=20619530)
149
+ 1210 train 3.733601 (lr=5.0386e-04) (hash(x)=20327868)
150
+ 1220 train 3.724357 (lr=5.0019e-04) (hash(x)=17297005)
151
+ 1230 train 3.744087 (lr=4.9648e-04) (hash(x)=22743384)
152
+ 1240 train 3.748625 (lr=4.9271e-04) (hash(x)=19764130)
153
+ 1250 train 3.652545 (lr=4.8889e-04) (hash(x)=19685029)
154
+ 1260 train 3.709536 (lr=4.8503e-04) (hash(x)=21621842)
155
+ 1270 train 3.714777 (lr=4.8111e-04) (hash(x)=21024799)
156
+ 1280 train 3.724952 (lr=4.7715e-04) (hash(x)=20459305)
157
+ 1290 train 3.697165 (lr=4.7315e-04) (hash(x)=21945208)
158
+ 1300 val loss 3.7369
159
+ 1300 val perplexity 41.9693
160
+ 1300 train 3.745376 (lr=4.6909e-04) (hash(x)=19421909)
161
+ 1310 train 3.672164 (lr=4.6500e-04) (hash(x)=22065431)
162
+ 1320 train 3.644414 (lr=4.6086e-04) (hash(x)=18310987)
163
+ 1330 train 3.739283 (lr=4.5669e-04) (hash(x)=18046899)
164
+ 1340 train 3.656600 (lr=4.5247e-04) (hash(x)=19877417)
165
+ 1350 train 3.799405 (lr=4.4822e-04) (hash(x)=21157293)
166
+ 1360 train 3.675133 (lr=4.4393e-04) (hash(x)=20260598)
167
+ 1370 train 3.678849 (lr=4.3960e-04) (hash(x)=20189167)
168
+ 1380 train 3.668259 (lr=4.3524e-04) (hash(x)=22193383)
169
+ 1390 train 3.663222 (lr=4.3085e-04) (hash(x)=16407046)
170
+ 1400 val loss 3.6874
171
+ 1400 val perplexity 39.9398
172
+ 1400 train 3.679003 (lr=4.2643e-04) (hash(x)=22770666)
173
+ 1410 train 3.698073 (lr=4.2197e-04) (hash(x)=22418421)
174
+ 1420 train 3.762456 (lr=4.1749e-04) (hash(x)=17883057)
175
+ 1430 train 3.597680 (lr=4.1298e-04) (hash(x)=18408864)
176
+ 1440 train 3.589924 (lr=4.0845e-04) (hash(x)=19699467)
177
+ 1450 train 3.649273 (lr=4.0389e-04) (hash(x)=21354581)
178
+ 1460 train 3.673139 (lr=3.9931e-04) (hash(x)=20422335)
179
+ 1470 train 3.577728 (lr=3.9470e-04) (hash(x)=18953370)
180
+ 1480 train 3.689185 (lr=3.9008e-04) (hash(x)=23442747)
181
+ 1490 train 3.735836 (lr=3.8544e-04) (hash(x)=20126187)
182
+ 1500 val loss 3.6476
183
+ 1500 val perplexity 38.3807
184
+ 1500 train 3.676198 (lr=3.8078e-04) (hash(x)=20342754)
185
+ 1510 train 3.635450 (lr=3.7610e-04) (hash(x)=19036313)
186
+ 1520 train 3.681353 (lr=3.7142e-04) (hash(x)=22164007)
187
+ 1530 train 3.652462 (lr=3.6671e-04) (hash(x)=24857054)
188
+ 1540 train 3.617547 (lr=3.6200e-04) (hash(x)=21281762)
189
+ 1550 train 3.578492 (lr=3.5728e-04) (hash(x)=19650272)
190
+ 1560 train 3.679856 (lr=3.5255e-04) (hash(x)=20137905)
191
+ 1570 train 3.514530 (lr=3.4781e-04) (hash(x)=14416059)
192
+ 1580 train 3.647124 (lr=3.4306e-04) (hash(x)=23684747)
193
+ 1590 train 3.493770 (lr=3.3831e-04) (hash(x)=20092448)
194
+ 1600 val loss 3.6138
195
+ 1600 val perplexity 37.1076
196
+ 1600 train 3.575259 (lr=3.3356e-04) (hash(x)=18725986)
197
+ 1610 train 3.560003 (lr=3.2881e-04) (hash(x)=27357245)
198
+ 1620 train 3.472562 (lr=3.2406e-04) (hash(x)=20033464)
199
+ 1630 train 3.701512 (lr=3.1931e-04) (hash(x)=20930109)
200
+ 1640 train 3.640582 (lr=3.1456e-04) (hash(x)=19398603)
201
+ 1650 train 3.680525 (lr=3.0982e-04) (hash(x)=20959448)
202
+ 1660 train 3.398083 (lr=3.0509e-04) (hash(x)=19692923)
203
+ 1670 train 3.656372 (lr=3.0036e-04) (hash(x)=23448284)
204
+ 1680 train 3.647425 (lr=2.9564e-04) (hash(x)=17968371)
205
+ 1690 train 3.573978 (lr=2.9093e-04) (hash(x)=15669180)
206
+ 1700 val loss 3.5803
207
+ 1700 val perplexity 35.8827
208
+ 1700 train 3.389013 (lr=2.8624e-04) (hash(x)=17816072)
209
+ 1710 train 3.589632 (lr=2.8156e-04) (hash(x)=19307466)
210
+ 1720 train 3.634946 (lr=2.7689e-04) (hash(x)=22978699)
211
+ 1730 train 3.600740 (lr=2.7224e-04) (hash(x)=23143048)
212
+ 1740 train 3.475498 (lr=2.6761e-04) (hash(x)=18722687)
213
+ 1750 train 3.400439 (lr=2.6299e-04) (hash(x)=18477212)
214
+ 1760 train 3.587195 (lr=2.5840e-04) (hash(x)=19884708)
215
+ 1770 train 3.600843 (lr=2.5383e-04) (hash(x)=20256662)
216
+ 1780 train 3.568640 (lr=2.4928e-04) (hash(x)=18622247)
217
+ 1790 train 3.526660 (lr=2.4476e-04) (hash(x)=21400648)
218
+ 1800 val loss 3.5568
219
+ 1800 val perplexity 35.0525
220
+ 1800 train 3.357833 (lr=2.4026e-04) (hash(x)=23156122)
221
+ 1810 train 3.600115 (lr=2.3580e-04) (hash(x)=13466715)
222
+ 1820 train 3.528891 (lr=2.3136e-04) (hash(x)=18810586)
223
+ 1830 train 3.528353 (lr=2.2695e-04) (hash(x)=20834847)
224
+ 1840 train 3.348410 (lr=2.2257e-04) (hash(x)=14702111)
225
+ 1850 train 3.587048 (lr=2.1823e-04) (hash(x)=18470456)
226
+ 1860 train 3.580447 (lr=2.1392e-04) (hash(x)=20701741)
227
+ 1870 train 3.511037 (lr=2.0965e-04) (hash(x)=19669016)
228
+ 1880 train 3.491338 (lr=2.0542e-04) (hash(x)=20381930)
229
+ 1890 train 3.268223 (lr=2.0122e-04) (hash(x)=21303953)
230
+ 1900 val loss 3.5316
231
+ 1900 val perplexity 34.1794
232
+ 1900 train 3.553535 (lr=1.9706e-04) (hash(x)=22119135)
233
+ 1910 train 3.604808 (lr=1.9295e-04) (hash(x)=19343834)
234
+ 1920 train 3.504541 (lr=1.8887e-04) (hash(x)=20131130)
235
+ 1930 train 3.351381 (lr=1.8485e-04) (hash(x)=21254128)
236
+ 1940 train 3.513910 (lr=1.8086e-04) (hash(x)=20816939)
237
+ 1950 train 3.556132 (lr=1.7692e-04) (hash(x)=19820308)
238
+ 1960 train 3.552554 (lr=1.7303e-04) (hash(x)=28009259)
239
+ 1970 train 3.457393 (lr=1.6919e-04) (hash(x)=19370262)
240
+ 1980 train 3.276536 (lr=1.6540e-04) (hash(x)=19401165)
241
+ 1990 train 3.558918 (lr=1.6166e-04) (hash(x)=18628676)
242
+ 2000 val loss 3.5061
243
+ 2000 val perplexity 33.3184
244
+ 2000 train 3.553443 (lr=1.5797e-04) (hash(x)=20151188)
245
+ 2010 train 3.557988 (lr=1.5433e-04) (hash(x)=18053856)
246
+ 2020 train 3.549010 (lr=1.5075e-04) (hash(x)=20481771)
247
+ 2030 train 3.578179 (lr=1.4723e-04) (hash(x)=18607449)
248
+ 2040 train 3.461038 (lr=1.4376e-04) (hash(x)=21897936)
249
+ 2050 train 3.489151 (lr=1.4035e-04) (hash(x)=19648528)
250
+ 2060 train 3.514096 (lr=1.3699e-04) (hash(x)=21541784)
251
+ 2070 train 3.534430 (lr=1.3370e-04) (hash(x)=19466104)
252
+ 2080 train 3.503854 (lr=1.3047e-04) (hash(x)=19540614)
253
+ 2090 train 3.484404 (lr=1.2730e-04) (hash(x)=18783653)
254
+ 2100 val loss 3.4932
255
+ 2100 val perplexity 32.8902
256
+ 2100 train 3.422311 (lr=1.2419e-04) (hash(x)=30861019)
257
+ 2110 train 3.447717 (lr=1.2115e-04) (hash(x)=20187147)
258
+ 2120 train 3.546875 (lr=1.1817e-04) (hash(x)=17350218)
259
+ 2130 train 3.583870 (lr=1.1525e-04) (hash(x)=21668918)
260
+ 2140 train 3.446914 (lr=1.1241e-04) (hash(x)=16953068)
261
+ 2150 train 3.561490 (lr=1.0963e-04) (hash(x)=22435234)
262
+ 2160 train 3.453270 (lr=1.0692e-04) (hash(x)=14277862)
263
+ 2170 train 3.526937 (lr=1.0427e-04) (hash(x)=21051545)
264
+ 2180 train 3.502241 (lr=1.0170e-04) (hash(x)=16728289)
265
+ 2190 train 3.506988 (lr=9.9199e-05) (hash(x)=18519101)
266
+ 2200 val loss 3.4720
267
+ 2200 val perplexity 32.1996
268
+ 2200 train 3.506023 (lr=9.6769e-05) (hash(x)=19727249)
269
+ 2210 train 3.355397 (lr=9.4412e-05) (hash(x)=18890853)
270
+ 2220 train 3.514431 (lr=9.2127e-05) (hash(x)=20660564)
271
+ 2230 train 3.526658 (lr=8.9916e-05) (hash(x)=22774110)
272
+ 2240 train 3.496122 (lr=8.7779e-05) (hash(x)=19389926)
273
+ 2250 train 3.528989 (lr=8.5717e-05) (hash(x)=17041691)
274
+ 2260 train 3.408443 (lr=8.3731e-05) (hash(x)=20807441)
275
+ 2270 train 3.539665 (lr=8.1821e-05) (hash(x)=22538437)
276
+ 2280 train 3.559744 (lr=7.9988e-05) (hash(x)=21551546)
277
+ 2290 train 3.435174 (lr=7.8232e-05) (hash(x)=24167699)
278
+ 2300 val loss 3.4593
279
+ 2300 val perplexity 31.7936
280
+ 2300 train 3.343737 (lr=7.6555e-05) (hash(x)=16362157)
281
+ 2310 train 3.318605 (lr=7.4956e-05) (hash(x)=23722796)
282
+ 2320 train 3.546045 (lr=7.3436e-05) (hash(x)=18346470)
283
+ 2330 train 3.496224 (lr=7.1995e-05) (hash(x)=17096354)
284
+ 2340 train 3.496301 (lr=7.0635e-05) (hash(x)=23151918)
285
+ 2350 train 3.312477 (lr=6.9354e-05) (hash(x)=17604704)
286
+ 2360 train 3.452374 (lr=6.8155e-05) (hash(x)=20390213)
287
+ 2370 train 3.500260 (lr=6.7036e-05) (hash(x)=24875449)
288
+ 2380 train 3.518620 (lr=6.5999e-05) (hash(x)=21185640)
289
+ 2390 train 3.455839 (lr=6.5044e-05) (hash(x)=14839939)
290
+ 2400 val loss 3.4475
291
+ 2400 val perplexity 31.4208
292
+ 2400 train 3.522248 (lr=6.4171e-05) (hash(x)=17498688)
293
+ 2410 train 3.389596 (lr=6.3380e-05) (hash(x)=20544025)
294
+ 2420 train 3.397803 (lr=6.2672e-05) (hash(x)=21093275)
295
+ 2430 train 3.382004 (lr=6.2046e-05) (hash(x)=19270991)
296
+ 2440 train 3.508012 (lr=6.1504e-05) (hash(x)=17536544)
297
+ 2450 train 3.442179 (lr=6.1045e-05) (hash(x)=16978919)
298
+ 2460 train 3.440809 (lr=6.0669e-05) (hash(x)=18893211)
299
+ 2470 train 3.420211 (lr=6.0376e-05) (hash(x)=17072230)
300
+ 2480 train 3.340027 (lr=6.0167e-05) (hash(x)=21601992)
301
+ 2490 train 3.473424 (lr=6.0042e-05) (hash(x)=23385916)
302
+ 2499 val loss 3.4416
303
+ 2499 val perplexity 31.2361
trying_new_latent_masks/baseline_no_head_seed_1338/model_02499.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:36c667e49b836953e1bf117c57cc38bb910363fa953f1f2e28cf6f205e11e287
3
+ size 545793730
trying_new_latent_masks/baseline_no_head_seed_1338/optimizer_02499.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0c05df5ed5b76341e47de788960dbb4d07c08cec08b7173bbda73670df913be4
3
+ size 990934278