andrew-healey commited on
Commit
0d61a2a
·
verified ·
1 Parent(s): 31ee4ac

Upload folder using huggingface_hub

Browse files
logs/fix_1_latent_mask/1_latent_mask_lr_35e-4_n_latent_masks_1_relu_seed_1340/args.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "logs/fix_1_latent_mask/1_latent_mask_lr_35e-4_n_latent_masks_1_relu_seed_1340", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 12, "n_embd": 264, "head_dim": 22, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 4375, "warmup_steps": 250, "group": "fix_1_latent_mask", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1340, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "n_latent_masks", "selection_head_linear_combo_scale": 1.0, "disable_selection_head_linear_combo_bias": false, "assert_latent_matches_no_head": false, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 32, "total_batch_size": 131072, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": 1, "init_latent_masks_to_identity": true, "latent_mask_scale": null, "latent_mask_sigmoid": false, "S_layernorm": false, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.0035, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "1_latent_mask_lr_35e-4_n_latent_masks_1_relu"}
logs/fix_1_latent_mask/1_latent_mask_lr_35e-4_n_latent_masks_1_relu_seed_1340/dataloader_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6031fd3e2855a036f7a5531cc24555aabd1115f9dd6618b8b2ca6f55279ef0b2
3
+ size 964
logs/fix_1_latent_mask/1_latent_mask_lr_35e-4_n_latent_masks_1_relu_seed_1340/log2.txt ADDED
@@ -0,0 +1,529 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ max_steps: 4375
2
+ 0 val loss 11.2769
3
+ 0 val perplexity 78975.5547
4
+ 0 train 11.266157 (lr=4.8951e-06) (hash(x)=45482580)
5
+ 10 train 9.895537 (lr=5.3846e-05) (hash(x)=38414730)
6
+ 20 train 9.300697 (lr=1.0280e-04) (hash(x)=39783917)
7
+ 30 train 8.493125 (lr=1.5175e-04) (hash(x)=38395733)
8
+ 40 train 7.819687 (lr=2.0070e-04) (hash(x)=37486707)
9
+ 50 train 7.610592 (lr=2.4965e-04) (hash(x)=39897505)
10
+ 60 train 7.531856 (lr=2.9860e-04) (hash(x)=40933473)
11
+ 70 train 7.507973 (lr=3.4755e-04) (hash(x)=37690746)
12
+ 80 train 7.383476 (lr=3.9650e-04) (hash(x)=38609656)
13
+ 90 train 7.141262 (lr=4.4545e-04) (hash(x)=38356571)
14
+ 100 val loss 7.0334
15
+ 100 val perplexity 1133.9086
16
+ 100 train 7.015599 (lr=4.9441e-04) (hash(x)=35980376)
17
+ 110 train 6.911716 (lr=5.4336e-04) (hash(x)=42011042)
18
+ 120 train 6.824089 (lr=5.9231e-04) (hash(x)=40266823)
19
+ 130 train 6.669220 (lr=6.4126e-04) (hash(x)=38645447)
20
+ 140 train 6.552652 (lr=6.9021e-04) (hash(x)=36059313)
21
+ 150 train 6.499080 (lr=7.3916e-04) (hash(x)=34161947)
22
+ 160 train 6.366819 (lr=7.8811e-04) (hash(x)=42263375)
23
+ 170 train 6.416893 (lr=8.3706e-04) (hash(x)=42957725)
24
+ 180 train 6.425648 (lr=8.8601e-04) (hash(x)=40198018)
25
+ 190 train 6.163019 (lr=9.3497e-04) (hash(x)=41666215)
26
+ 200 val loss 6.1629
27
+ 200 val perplexity 474.7998
28
+ 200 train 6.122208 (lr=9.8392e-04) (hash(x)=54060482)
29
+ 210 train 6.048735 (lr=1.0329e-03) (hash(x)=42332778)
30
+ 220 train 5.993058 (lr=1.0818e-03) (hash(x)=39551486)
31
+ 230 train 5.997335 (lr=1.1308e-03) (hash(x)=38462018)
32
+ 240 train 6.038626 (lr=1.1797e-03) (hash(x)=36591442)
33
+ 250 train 5.957669 (lr=1.2287e-03) (hash(x)=40861237)
34
+ 260 train 5.911050 (lr=1.2776e-03) (hash(x)=41739369)
35
+ 270 train 5.786546 (lr=1.3266e-03) (hash(x)=39415360)
36
+ 280 train 5.794543 (lr=1.3755e-03) (hash(x)=44665934)
37
+ 290 train 5.728720 (lr=1.4245e-03) (hash(x)=37035579)
38
+ 300 val loss 5.7022
39
+ 300 val perplexity 299.5253
40
+ 300 train 5.725222 (lr=1.4734e-03) (hash(x)=38301011)
41
+ 310 train 5.562574 (lr=1.5224e-03) (hash(x)=42850980)
42
+ 320 train 5.627087 (lr=1.5713e-03) (hash(x)=37476222)
43
+ 330 train 5.568226 (lr=1.6203e-03) (hash(x)=53028205)
44
+ 340 train 5.560227 (lr=1.6692e-03) (hash(x)=41466008)
45
+ 350 train 5.473418 (lr=1.7182e-03) (hash(x)=37802865)
46
+ 360 train 5.425015 (lr=1.7671e-03) (hash(x)=42769282)
47
+ 370 train 5.437476 (lr=1.8161e-03) (hash(x)=39319256)
48
+ 380 train 5.449152 (lr=1.8650e-03) (hash(x)=42637402)
49
+ 390 train 5.422606 (lr=1.9140e-03) (hash(x)=31141514)
50
+ 400 val loss 5.3931
51
+ 400 val perplexity 219.8749
52
+ 400 train 5.469412 (lr=1.9629e-03) (hash(x)=38151157)
53
+ 410 train 5.395465 (lr=2.0119e-03) (hash(x)=46033439)
54
+ 420 train 5.379012 (lr=2.0608e-03) (hash(x)=41365246)
55
+ 430 train 5.374238 (lr=2.1098e-03) (hash(x)=42369184)
56
+ 440 train 5.368971 (lr=2.1587e-03) (hash(x)=42004840)
57
+ 450 train 5.281859 (lr=2.2077e-03) (hash(x)=37181172)
58
+ 460 train 5.379414 (lr=2.2566e-03) (hash(x)=31630797)
59
+ 470 train 5.245008 (lr=2.3056e-03) (hash(x)=42135747)
60
+ 480 train 5.166485 (lr=2.3545e-03) (hash(x)=30023651)
61
+ 490 train 5.247091 (lr=2.4035e-03) (hash(x)=40463476)
62
+ 500 val loss 5.1902
63
+ 500 val perplexity 179.5069
64
+ 500 train 5.112246 (lr=2.4524e-03) (hash(x)=37089842)
65
+ 510 train 5.123877 (lr=2.5014e-03) (hash(x)=41095974)
66
+ 520 train 5.197102 (lr=2.5503e-03) (hash(x)=41525258)
67
+ 530 train 5.163846 (lr=2.5993e-03) (hash(x)=37697291)
68
+ 540 train 5.161045 (lr=2.6483e-03) (hash(x)=43459894)
69
+ 550 train 5.035006 (lr=2.6972e-03) (hash(x)=38993956)
70
+ 560 train 5.053627 (lr=2.7462e-03) (hash(x)=40677277)
71
+ 570 train 5.161124 (lr=2.7951e-03) (hash(x)=43103127)
72
+ 580 train 5.052238 (lr=2.8441e-03) (hash(x)=41871776)
73
+ 590 train 5.051658 (lr=2.8930e-03) (hash(x)=43230967)
74
+ 600 val loss 4.9739
75
+ 600 val perplexity 144.5891
76
+ 600 train 5.015007 (lr=2.9420e-03) (hash(x)=42226172)
77
+ 610 train 5.333269 (lr=2.9909e-03) (hash(x)=43007164)
78
+ 620 train 4.882530 (lr=3.0399e-03) (hash(x)=35752341)
79
+ 630 train 4.968871 (lr=3.0888e-03) (hash(x)=39453504)
80
+ 640 train 4.904297 (lr=3.1378e-03) (hash(x)=43635712)
81
+ 650 train 4.916444 (lr=3.1867e-03) (hash(x)=41852456)
82
+ 660 train 4.929788 (lr=3.2357e-03) (hash(x)=49125742)
83
+ 670 train 4.933168 (lr=3.2846e-03) (hash(x)=34886275)
84
+ 680 train 5.031371 (lr=3.3336e-03) (hash(x)=40546482)
85
+ 690 train 4.841712 (lr=3.3825e-03) (hash(x)=40850901)
86
+ 700 val loss 4.8335
87
+ 700 val perplexity 125.6462
88
+ 700 train 4.805322 (lr=3.4315e-03) (hash(x)=37426993)
89
+ 710 train 4.984472 (lr=3.4804e-03) (hash(x)=42679743)
90
+ 720 train 4.792905 (lr=3.5000e-03) (hash(x)=44856043)
91
+ 730 train 4.783589 (lr=3.4999e-03) (hash(x)=36457471)
92
+ 740 train 4.741574 (lr=3.4996e-03) (hash(x)=47203534)
93
+ 750 train 4.759369 (lr=3.4993e-03) (hash(x)=35585657)
94
+ 760 train 4.668281 (lr=3.4988e-03) (hash(x)=37259264)
95
+ 770 train 4.582800 (lr=3.4982e-03) (hash(x)=44161997)
96
+ 780 train 4.533280 (lr=3.4975e-03) (hash(x)=41272886)
97
+ 790 train 4.544402 (lr=3.4967e-03) (hash(x)=43303662)
98
+ 800 val loss 4.7309
99
+ 800 val perplexity 113.4008
100
+ 800 train 4.448476 (lr=3.4958e-03) (hash(x)=39067231)
101
+ 810 train 4.684662 (lr=3.4948e-03) (hash(x)=44046732)
102
+ 820 train 4.664653 (lr=3.4936e-03) (hash(x)=39783962)
103
+ 830 train 4.718083 (lr=3.4923e-03) (hash(x)=40973974)
104
+ 840 train 4.695090 (lr=3.4909e-03) (hash(x)=41619968)
105
+ 850 train 4.754364 (lr=3.4894e-03) (hash(x)=36939960)
106
+ 860 train 4.625278 (lr=3.4878e-03) (hash(x)=42942377)
107
+ 870 train 4.633760 (lr=3.4861e-03) (hash(x)=39659455)
108
+ 880 train 4.646001 (lr=3.4842e-03) (hash(x)=40331986)
109
+ 890 train 4.559513 (lr=3.4823e-03) (hash(x)=38084814)
110
+ 900 val loss 4.6005
111
+ 900 val perplexity 99.5353
112
+ 900 train 4.593882 (lr=3.4802e-03) (hash(x)=30830367)
113
+ 910 train 4.499743 (lr=3.4780e-03) (hash(x)=39664356)
114
+ 920 train 4.505427 (lr=3.4757e-03) (hash(x)=39007775)
115
+ 930 train 4.533010 (lr=3.4733e-03) (hash(x)=39319254)
116
+ 940 train 4.586444 (lr=3.4707e-03) (hash(x)=36347051)
117
+ 950 train 4.532959 (lr=3.4681e-03) (hash(x)=39243577)
118
+ 960 train 4.553211 (lr=3.4653e-03) (hash(x)=45603839)
119
+ 970 train 4.364499 (lr=3.4624e-03) (hash(x)=40036075)
120
+ 980 train 4.415587 (lr=3.4594e-03) (hash(x)=43068524)
121
+ 990 train 4.398490 (lr=3.4563e-03) (hash(x)=33798472)
122
+ 1000 val loss 4.5641
123
+ 1000 val perplexity 95.9766
124
+ 1000 train 4.779282 (lr=3.4531e-03) (hash(x)=37728665)
125
+ 1010 train 4.633487 (lr=3.4498e-03) (hash(x)=43678636)
126
+ 1020 train 4.665901 (lr=3.4463e-03) (hash(x)=48464376)
127
+ 1030 train 4.622082 (lr=3.4428e-03) (hash(x)=43771461)
128
+ 1040 train 4.474751 (lr=3.4391e-03) (hash(x)=44643085)
129
+ 1050 train 4.559820 (lr=3.4353e-03) (hash(x)=39201716)
130
+ 1060 train 4.463646 (lr=3.4314e-03) (hash(x)=36396343)
131
+ 1070 train 4.456701 (lr=3.4274e-03) (hash(x)=37504111)
132
+ 1080 train 4.493592 (lr=3.4233e-03) (hash(x)=37454537)
133
+ 1090 train 4.491288 (lr=3.4191e-03) (hash(x)=36888183)
134
+ 1100 val loss 4.4851
135
+ 1100 val perplexity 88.6872
136
+ 1100 train 4.449874 (lr=3.4148e-03) (hash(x)=39549045)
137
+ 1110 train 4.625975 (lr=3.4103e-03) (hash(x)=56818420)
138
+ 1120 train 4.425657 (lr=3.4058e-03) (hash(x)=36573357)
139
+ 1130 train 4.359355 (lr=3.4011e-03) (hash(x)=38540878)
140
+ 1140 train 4.360647 (lr=3.3964e-03) (hash(x)=38645859)
141
+ 1150 train 4.563716 (lr=3.3915e-03) (hash(x)=41972503)
142
+ 1160 train 4.185710 (lr=3.3865e-03) (hash(x)=38151868)
143
+ 1170 train 4.219554 (lr=3.3814e-03) (hash(x)=42375436)
144
+ 1180 train 4.201130 (lr=3.3762e-03) (hash(x)=42868028)
145
+ 1190 train 4.450093 (lr=3.3709e-03) (hash(x)=41060868)
146
+ 1200 val loss 4.4428
147
+ 1200 val perplexity 85.0163
148
+ 1200 train 4.452469 (lr=3.3655e-03) (hash(x)=40852814)
149
+ 1210 train 4.436384 (lr=3.3600e-03) (hash(x)=43365554)
150
+ 1220 train 4.377617 (lr=3.3543e-03) (hash(x)=39121134)
151
+ 1230 train 4.460714 (lr=3.3486e-03) (hash(x)=40565426)
152
+ 1240 train 4.525054 (lr=3.3428e-03) (hash(x)=43378926)
153
+ 1250 train 4.383266 (lr=3.3368e-03) (hash(x)=35227381)
154
+ 1260 train 4.471020 (lr=3.3308e-03) (hash(x)=41267226)
155
+ 1270 train 4.384312 (lr=3.3246e-03) (hash(x)=31395565)
156
+ 1280 train 4.511070 (lr=3.3184e-03) (hash(x)=41470963)
157
+ 1290 train 4.310106 (lr=3.3120e-03) (hash(x)=40729366)
158
+ 1300 val loss 4.3787
159
+ 1300 val perplexity 79.7312
160
+ 1300 train 4.349926 (lr=3.3056e-03) (hash(x)=39916663)
161
+ 1310 train 4.403814 (lr=3.2990e-03) (hash(x)=42179962)
162
+ 1320 train 4.266848 (lr=3.2924e-03) (hash(x)=44991932)
163
+ 1330 train 4.236328 (lr=3.2856e-03) (hash(x)=38925911)
164
+ 1340 train 4.185036 (lr=3.2787e-03) (hash(x)=38565560)
165
+ 1350 train 4.229335 (lr=3.2718e-03) (hash(x)=43384902)
166
+ 1360 train 4.182764 (lr=3.2647e-03) (hash(x)=45422261)
167
+ 1370 train 4.163294 (lr=3.2576e-03) (hash(x)=40167325)
168
+ 1380 train 4.327692 (lr=3.2503e-03) (hash(x)=51258162)
169
+ 1390 train 4.384328 (lr=3.2430e-03) (hash(x)=43229106)
170
+ 1400 val loss 4.3346
171
+ 1400 val perplexity 76.2952
172
+ 1400 train 4.282740 (lr=3.2355e-03) (hash(x)=42537494)
173
+ 1410 train 4.589291 (lr=3.2280e-03) (hash(x)=51236490)
174
+ 1420 train 4.423134 (lr=3.2203e-03) (hash(x)=45243347)
175
+ 1430 train 4.272029 (lr=3.2126e-03) (hash(x)=37658566)
176
+ 1440 train 4.340452 (lr=3.2047e-03) (hash(x)=38856106)
177
+ 1450 train 4.400936 (lr=3.1968e-03) (hash(x)=36649488)
178
+ 1460 train 4.409300 (lr=3.1888e-03) (hash(x)=40445407)
179
+ 1470 train 4.332956 (lr=3.1807e-03) (hash(x)=39916387)
180
+ 1480 train 4.304753 (lr=3.1725e-03) (hash(x)=40934555)
181
+ 1490 train 4.267069 (lr=3.1642e-03) (hash(x)=42276527)
182
+ 1500 val loss 4.3191
183
+ 1500 val perplexity 75.1175
184
+ 1500 train 4.263336 (lr=3.1558e-03) (hash(x)=37973883)
185
+ 1510 train 4.313284 (lr=3.1473e-03) (hash(x)=38929286)
186
+ 1520 train 4.131422 (lr=3.1387e-03) (hash(x)=36855837)
187
+ 1530 train 4.379396 (lr=3.1301e-03) (hash(x)=42146431)
188
+ 1540 train 4.323425 (lr=3.1213e-03) (hash(x)=41898411)
189
+ 1550 train 4.237386 (lr=3.1125e-03) (hash(x)=44906272)
190
+ 1560 train 4.265580 (lr=3.1036e-03) (hash(x)=40414353)
191
+ 1570 train 4.288120 (lr=3.0945e-03) (hash(x)=38723190)
192
+ 1580 train 4.210950 (lr=3.0855e-03) (hash(x)=39065271)
193
+ 1590 train 4.201223 (lr=3.0763e-03) (hash(x)=40000886)
194
+ 1600 val loss 4.2876
195
+ 1600 val perplexity 72.7914
196
+ 1600 train 4.120812 (lr=3.0670e-03) (hash(x)=37898571)
197
+ 1610 train 4.230485 (lr=3.0576e-03) (hash(x)=40447863)
198
+ 1620 train 4.197022 (lr=3.0482e-03) (hash(x)=40119318)
199
+ 1630 train 4.134617 (lr=3.0387e-03) (hash(x)=40664074)
200
+ 1640 train 4.097693 (lr=3.0291e-03) (hash(x)=37404741)
201
+ 1650 train 4.254972 (lr=3.0194e-03) (hash(x)=39214928)
202
+ 1660 train 4.296905 (lr=3.0096e-03) (hash(x)=35005019)
203
+ 1670 train 4.364587 (lr=2.9998e-03) (hash(x)=37675832)
204
+ 1680 train 4.279342 (lr=2.9899e-03) (hash(x)=40489680)
205
+ 1690 train 4.187274 (lr=2.9799e-03) (hash(x)=45537879)
206
+ 1700 val loss 4.2553
207
+ 1700 val perplexity 70.4815
208
+ 1700 train 4.172568 (lr=2.9698e-03) (hash(x)=38866100)
209
+ 1710 train 4.233847 (lr=2.9597e-03) (hash(x)=44726254)
210
+ 1720 train 4.204811 (lr=2.9494e-03) (hash(x)=36917792)
211
+ 1730 train 4.195695 (lr=2.9391e-03) (hash(x)=42814805)
212
+ 1740 train 4.275191 (lr=2.9287e-03) (hash(x)=51812216)
213
+ 1750 train 4.141500 (lr=2.9183e-03) (hash(x)=33549014)
214
+ 1760 train 4.156732 (lr=2.9078e-03) (hash(x)=40354215)
215
+ 1770 train 4.173367 (lr=2.8972e-03) (hash(x)=40718606)
216
+ 1780 train 4.107291 (lr=2.8865e-03) (hash(x)=38274164)
217
+ 1790 train 4.282050 (lr=2.8757e-03) (hash(x)=42531471)
218
+ 1800 val loss 4.2356
219
+ 1800 val perplexity 69.1007
220
+ 1800 train 4.215047 (lr=2.8649e-03) (hash(x)=35616519)
221
+ 1810 train 4.262810 (lr=2.8540e-03) (hash(x)=33803118)
222
+ 1820 train 4.268219 (lr=2.8431e-03) (hash(x)=36973525)
223
+ 1830 train 4.208704 (lr=2.8321e-03) (hash(x)=38162549)
224
+ 1840 train 4.177037 (lr=2.8210e-03) (hash(x)=33937159)
225
+ 1850 train 4.288040 (lr=2.8098e-03) (hash(x)=39887546)
226
+ 1860 train 4.113797 (lr=2.7986e-03) (hash(x)=37818525)
227
+ 1870 train 4.171314 (lr=2.7873e-03) (hash(x)=37250478)
228
+ 1880 train 4.066730 (lr=2.7760e-03) (hash(x)=36412167)
229
+ 1890 train 4.176078 (lr=2.7646e-03) (hash(x)=40261189)
230
+ 1900 val loss 4.2344
231
+ 1900 val perplexity 69.0182
232
+ 1900 train 4.129030 (lr=2.7531e-03) (hash(x)=38654303)
233
+ 1910 train 4.103625 (lr=2.7416e-03) (hash(x)=37929515)
234
+ 1920 train 4.283575 (lr=2.7300e-03) (hash(x)=45580146)
235
+ 1930 train 4.225971 (lr=2.7183e-03) (hash(x)=32386330)
236
+ 1940 train 4.266378 (lr=2.7066e-03) (hash(x)=36331864)
237
+ 1950 train 4.236341 (lr=2.6949e-03) (hash(x)=41181727)
238
+ 1960 train 4.063370 (lr=2.6831e-03) (hash(x)=42705152)
239
+ 1970 train 4.180380 (lr=2.6712e-03) (hash(x)=40251511)
240
+ 1980 train 4.099927 (lr=2.6592e-03) (hash(x)=37525551)
241
+ 1990 train 4.171893 (lr=2.6473e-03) (hash(x)=41849618)
242
+ 2000 val loss 4.1929
243
+ 2000 val perplexity 66.2142
244
+ 2000 train 4.172562 (lr=2.6352e-03) (hash(x)=37642582)
245
+ 2010 train 4.175938 (lr=2.6231e-03) (hash(x)=42131121)
246
+ 2020 train 4.054909 (lr=2.6110e-03) (hash(x)=39000209)
247
+ 2030 train 4.148036 (lr=2.5988e-03) (hash(x)=43641355)
248
+ 2040 train 4.039284 (lr=2.5865e-03) (hash(x)=47910507)
249
+ 2050 train 4.041530 (lr=2.5742e-03) (hash(x)=36670359)
250
+ 2060 train 4.236312 (lr=2.5619e-03) (hash(x)=36477755)
251
+ 2070 train 4.330647 (lr=2.5495e-03) (hash(x)=43262487)
252
+ 2080 train 4.141380 (lr=2.5371e-03) (hash(x)=49546029)
253
+ 2090 train 4.273189 (lr=2.5246e-03) (hash(x)=39924731)
254
+ 2100 val loss 4.1822
255
+ 2100 val perplexity 65.5086
256
+ 2100 train 4.136207 (lr=2.5121e-03) (hash(x)=39921304)
257
+ 2110 train 4.079198 (lr=2.4995e-03) (hash(x)=38152788)
258
+ 2120 train 4.136242 (lr=2.4869e-03) (hash(x)=37977911)
259
+ 2130 train 4.093823 (lr=2.4743e-03) (hash(x)=34748760)
260
+ 2140 train 4.154966 (lr=2.4616e-03) (hash(x)=40509369)
261
+ 2150 train 4.121099 (lr=2.4488e-03) (hash(x)=37654262)
262
+ 2160 train 4.069815 (lr=2.4361e-03) (hash(x)=38139543)
263
+ 2170 train 3.977030 (lr=2.4233e-03) (hash(x)=42501806)
264
+ 2180 train 4.134811 (lr=2.4104e-03) (hash(x)=40085092)
265
+ 2190 train 4.074011 (lr=2.3975e-03) (hash(x)=50480193)
266
+ 2200 val loss 4.1665
267
+ 2200 val perplexity 64.4908
268
+ 2200 train 4.169612 (lr=2.3846e-03) (hash(x)=40604084)
269
+ 2210 train 4.275844 (lr=2.3717e-03) (hash(x)=41555823)
270
+ 2220 train 4.098688 (lr=2.3587e-03) (hash(x)=50441765)
271
+ 2230 train 4.192125 (lr=2.3457e-03) (hash(x)=39796580)
272
+ 2240 train 4.215929 (lr=2.3326e-03) (hash(x)=44127022)
273
+ 2250 train 4.118304 (lr=2.3196e-03) (hash(x)=37026826)
274
+ 2260 train 4.152521 (lr=2.3065e-03) (hash(x)=42133839)
275
+ 2270 train 4.110434 (lr=2.2933e-03) (hash(x)=38500664)
276
+ 2280 train 4.103017 (lr=2.2802e-03) (hash(x)=40538661)
277
+ 2290 train 4.108027 (lr=2.2670e-03) (hash(x)=51509210)
278
+ 2300 val loss 4.1413
279
+ 2300 val perplexity 62.8841
280
+ 2300 train 4.113607 (lr=2.2538e-03) (hash(x)=41952328)
281
+ 2310 train 4.034453 (lr=2.2405e-03) (hash(x)=39758123)
282
+ 2320 train 4.156231 (lr=2.2273e-03) (hash(x)=51089268)
283
+ 2330 train 4.214990 (lr=2.2140e-03) (hash(x)=39767618)
284
+ 2340 train 4.257306 (lr=2.2007e-03) (hash(x)=40409617)
285
+ 2350 train 4.090903 (lr=2.1874e-03) (hash(x)=40349634)
286
+ 2360 train 4.359507 (lr=2.1741e-03) (hash(x)=31841172)
287
+ 2370 train 4.154937 (lr=2.1607e-03) (hash(x)=42720539)
288
+ 2380 train 4.027129 (lr=2.1473e-03) (hash(x)=40998632)
289
+ 2390 train 4.127597 (lr=2.1339e-03) (hash(x)=40615413)
290
+ 2400 val loss 4.1217
291
+ 2400 val perplexity 61.6669
292
+ 2400 train 4.043159 (lr=2.1205e-03) (hash(x)=39373658)
293
+ 2410 train 4.082250 (lr=2.1071e-03) (hash(x)=35480858)
294
+ 2420 train 4.081224 (lr=2.0937e-03) (hash(x)=32241095)
295
+ 2430 train 4.069393 (lr=2.0802e-03) (hash(x)=36669715)
296
+ 2440 train 4.066841 (lr=2.0668e-03) (hash(x)=45768335)
297
+ 2450 train 3.973601 (lr=2.0533e-03) (hash(x)=39395055)
298
+ 2460 train 3.998394 (lr=2.0398e-03) (hash(x)=34899269)
299
+ 2470 train 4.191318 (lr=2.0263e-03) (hash(x)=43642420)
300
+ 2480 train 4.290695 (lr=2.0128e-03) (hash(x)=47099507)
301
+ 2490 train 4.165180 (lr=1.9993e-03) (hash(x)=38825558)
302
+ 2500 val loss 4.1110
303
+ 2500 val perplexity 61.0061
304
+ 2500 train 4.142973 (lr=1.9858e-03) (hash(x)=39833804)
305
+ 2510 train 4.241924 (lr=1.9723e-03) (hash(x)=35051654)
306
+ 2520 train 4.139011 (lr=1.9588e-03) (hash(x)=40291109)
307
+ 2530 train 4.041285 (lr=1.9453e-03) (hash(x)=36915768)
308
+ 2540 train 4.024476 (lr=1.9318e-03) (hash(x)=37016308)
309
+ 2550 train 3.986281 (lr=1.9182e-03) (hash(x)=43221777)
310
+ 2560 train 4.034099 (lr=1.9047e-03) (hash(x)=37233207)
311
+ 2570 train 3.934948 (lr=1.8912e-03) (hash(x)=37510626)
312
+ 2580 train 3.989610 (lr=1.8777e-03) (hash(x)=39624656)
313
+ 2590 train 3.840893 (lr=1.8642e-03) (hash(x)=37647501)
314
+ 2600 val loss 4.0985
315
+ 2600 val perplexity 60.2478
316
+ 2600 train 3.975706 (lr=1.8507e-03) (hash(x)=37200138)
317
+ 2610 train 3.988949 (lr=1.8372e-03) (hash(x)=43066970)
318
+ 2620 train 4.126474 (lr=1.8237e-03) (hash(x)=40503799)
319
+ 2630 train 4.066187 (lr=1.8102e-03) (hash(x)=45534660)
320
+ 2640 train 4.282201 (lr=1.7967e-03) (hash(x)=52358353)
321
+ 2650 train 4.059135 (lr=1.7832e-03) (hash(x)=41843599)
322
+ 2660 train 4.045559 (lr=1.7698e-03) (hash(x)=40519770)
323
+ 2670 train 4.123116 (lr=1.7563e-03) (hash(x)=37185602)
324
+ 2680 train 3.983552 (lr=1.7429e-03) (hash(x)=43805719)
325
+ 2690 train 3.998803 (lr=1.7295e-03) (hash(x)=39458250)
326
+ 2700 val loss 4.0749
327
+ 2700 val perplexity 58.8470
328
+ 2700 train 4.095272 (lr=1.7161e-03) (hash(x)=39149255)
329
+ 2710 train 3.943356 (lr=1.7027e-03) (hash(x)=43544861)
330
+ 2720 train 4.042298 (lr=1.6893e-03) (hash(x)=36858197)
331
+ 2730 train 3.965263 (lr=1.6759e-03) (hash(x)=37971712)
332
+ 2740 train 3.988328 (lr=1.6626e-03) (hash(x)=42416578)
333
+ 2750 train 3.933888 (lr=1.6493e-03) (hash(x)=53554622)
334
+ 2760 train 4.073882 (lr=1.6360e-03) (hash(x)=40502545)
335
+ 2770 train 4.043880 (lr=1.6227e-03) (hash(x)=36086417)
336
+ 2780 train 4.066634 (lr=1.6095e-03) (hash(x)=36242982)
337
+ 2790 train 4.137808 (lr=1.5962e-03) (hash(x)=41433780)
338
+ 2800 val loss 4.0559
339
+ 2800 val perplexity 57.7390
340
+ 2800 train 4.009763 (lr=1.5830e-03) (hash(x)=48553484)
341
+ 2810 train 3.950516 (lr=1.5698e-03) (hash(x)=39928650)
342
+ 2820 train 4.057592 (lr=1.5567e-03) (hash(x)=37891724)
343
+ 2830 train 4.033255 (lr=1.5435e-03) (hash(x)=34353412)
344
+ 2840 train 4.051026 (lr=1.5304e-03) (hash(x)=38486611)
345
+ 2850 train 3.976616 (lr=1.5174e-03) (hash(x)=32706934)
346
+ 2860 train 3.922453 (lr=1.5043e-03) (hash(x)=38693681)
347
+ 2870 train 3.984871 (lr=1.4913e-03) (hash(x)=38430800)
348
+ 2880 train 3.960547 (lr=1.4783e-03) (hash(x)=40619559)
349
+ 2890 train 3.936327 (lr=1.4654e-03) (hash(x)=42741066)
350
+ 2900 val loss 4.0570
351
+ 2900 val perplexity 57.7999
352
+ 2900 train 4.194837 (lr=1.4525e-03) (hash(x)=38216091)
353
+ 2910 train 3.998954 (lr=1.4396e-03) (hash(x)=35447832)
354
+ 2920 train 4.102769 (lr=1.4267e-03) (hash(x)=39825190)
355
+ 2930 train 4.118647 (lr=1.4139e-03) (hash(x)=37789121)
356
+ 2940 train 3.993258 (lr=1.4012e-03) (hash(x)=44499116)
357
+ 2950 train 4.074873 (lr=1.3884e-03) (hash(x)=39669860)
358
+ 2960 train 4.020585 (lr=1.3757e-03) (hash(x)=51683741)
359
+ 2970 train 3.886484 (lr=1.3631e-03) (hash(x)=36399721)
360
+ 2980 train 3.978506 (lr=1.3505e-03) (hash(x)=42629700)
361
+ 2990 train 3.864774 (lr=1.3379e-03) (hash(x)=39263773)
362
+ 3000 val loss 4.0320
363
+ 3000 val perplexity 56.3737
364
+ 3000 train 3.910132 (lr=1.3254e-03) (hash(x)=37920485)
365
+ 3010 train 3.952121 (lr=1.3129e-03) (hash(x)=40835161)
366
+ 3020 train 3.922709 (lr=1.3005e-03) (hash(x)=38381159)
367
+ 3030 train 3.912831 (lr=1.2881e-03) (hash(x)=35846270)
368
+ 3040 train 4.300807 (lr=1.2758e-03) (hash(x)=47516567)
369
+ 3050 train 4.142690 (lr=1.2635e-03) (hash(x)=35600311)
370
+ 3060 train 4.172473 (lr=1.2512e-03) (hash(x)=37775318)
371
+ 3070 train 4.026793 (lr=1.2390e-03) (hash(x)=39881333)
372
+ 3080 train 4.031108 (lr=1.2269e-03) (hash(x)=37310168)
373
+ 3090 train 3.959684 (lr=1.2148e-03) (hash(x)=43669978)
374
+ 3100 val loss 4.0211
375
+ 3100 val perplexity 55.7612
376
+ 3100 train 4.424047 (lr=1.2027e-03) (hash(x)=46356797)
377
+ 3110 train 4.058935 (lr=1.1908e-03) (hash(x)=40814232)
378
+ 3120 train 3.904812 (lr=1.1788e-03) (hash(x)=42504837)
379
+ 3130 train 4.039184 (lr=1.1669e-03) (hash(x)=39693594)
380
+ 3140 train 3.946033 (lr=1.1551e-03) (hash(x)=40277645)
381
+ 3150 train 3.949246 (lr=1.1434e-03) (hash(x)=40988003)
382
+ 3160 train 4.072472 (lr=1.1317e-03) (hash(x)=38875266)
383
+ 3170 train 4.025267 (lr=1.1200e-03) (hash(x)=45232173)
384
+ 3180 train 4.078420 (lr=1.1084e-03) (hash(x)=39213336)
385
+ 3190 train 4.135744 (lr=1.0969e-03) (hash(x)=42118576)
386
+ 3200 val loss 3.9972
387
+ 3200 val perplexity 54.4438
388
+ 3200 train 3.996798 (lr=1.0854e-03) (hash(x)=32884223)
389
+ 3210 train 4.034976 (lr=1.0740e-03) (hash(x)=41276800)
390
+ 3220 train 4.086184 (lr=1.0627e-03) (hash(x)=40284461)
391
+ 3230 train 4.063759 (lr=1.0514e-03) (hash(x)=40566734)
392
+ 3240 train 4.054878 (lr=1.0402e-03) (hash(x)=36484570)
393
+ 3250 train 4.015331 (lr=1.0290e-03) (hash(x)=41642338)
394
+ 3260 train 3.983226 (lr=1.0179e-03) (hash(x)=43883570)
395
+ 3270 train 3.918281 (lr=1.0069e-03) (hash(x)=40432560)
396
+ 3280 train 3.889582 (lr=9.9596e-04) (hash(x)=38002717)
397
+ 3290 train 3.832526 (lr=9.8508e-04) (hash(x)=41926004)
398
+ 3300 val loss 3.9898
399
+ 3300 val perplexity 54.0445
400
+ 3300 train 3.994999 (lr=9.7426e-04) (hash(x)=46015509)
401
+ 3310 train 4.030125 (lr=9.6352e-04) (hash(x)=40112249)
402
+ 3320 train 4.000380 (lr=9.5285e-04) (hash(x)=49162296)
403
+ 3330 train 4.061421 (lr=9.4225e-04) (hash(x)=46606969)
404
+ 3340 train 4.080829 (lr=9.3172e-04) (hash(x)=31128992)
405
+ 3350 train 4.057094 (lr=9.2127e-04) (hash(x)=41232534)
406
+ 3360 train 3.899421 (lr=9.1088e-04) (hash(x)=41599699)
407
+ 3370 train 4.071400 (lr=9.0058e-04) (hash(x)=40885280)
408
+ 3380 train 3.983772 (lr=8.9035e-04) (hash(x)=37169148)
409
+ 3390 train 3.892005 (lr=8.8019e-04) (hash(x)=46581889)
410
+ 3400 val loss 3.9766
411
+ 3400 val perplexity 53.3365
412
+ 3400 train 3.940259 (lr=8.7012e-04) (hash(x)=37612074)
413
+ 3410 train 4.012302 (lr=8.6012e-04) (hash(x)=38763316)
414
+ 3420 train 3.836145 (lr=8.5019e-04) (hash(x)=40562379)
415
+ 3430 train 3.932541 (lr=8.4035e-04) (hash(x)=44724867)
416
+ 3440 train 4.137836 (lr=8.3059e-04) (hash(x)=39386624)
417
+ 3450 train 4.095798 (lr=8.2091e-04) (hash(x)=38652923)
418
+ 3460 train 4.073486 (lr=8.1131e-04) (hash(x)=29587379)
419
+ 3470 train 3.976187 (lr=8.0179e-04) (hash(x)=38228776)
420
+ 3480 train 3.952075 (lr=7.9235e-04) (hash(x)=30076039)
421
+ 3490 train 4.015835 (lr=7.8300e-04) (hash(x)=38398908)
422
+ 3500 val loss 3.9629
423
+ 3500 val perplexity 52.6097
424
+ 3500 train 4.029348 (lr=7.7373e-04) (hash(x)=39259918)
425
+ 3510 train 3.930116 (lr=7.6455e-04) (hash(x)=40664091)
426
+ 3520 train 3.960013 (lr=7.5545e-04) (hash(x)=40309647)
427
+ 3530 train 3.926680 (lr=7.4644e-04) (hash(x)=38424801)
428
+ 3540 train 3.851492 (lr=7.3752e-04) (hash(x)=51322307)
429
+ 3550 train 3.915296 (lr=7.2868e-04) (hash(x)=38192628)
430
+ 3560 train 3.969600 (lr=7.1993e-04) (hash(x)=41563952)
431
+ 3570 train 3.917771 (lr=7.1127e-04) (hash(x)=39508843)
432
+ 3580 train 3.989493 (lr=7.0270e-04) (hash(x)=41260225)
433
+ 3590 train 3.972328 (lr=6.9422e-04) (hash(x)=49098107)
434
+ 3600 val loss 3.9519
435
+ 3600 val perplexity 52.0350
436
+ 3600 train 3.957163 (lr=6.8583e-04) (hash(x)=41194370)
437
+ 3610 train 4.073334 (lr=6.7753e-04) (hash(x)=40399152)
438
+ 3620 train 4.036810 (lr=6.6932e-04) (hash(x)=41625018)
439
+ 3630 train 4.057173 (lr=6.6121e-04) (hash(x)=44077942)
440
+ 3640 train 4.054482 (lr=6.5319e-04) (hash(x)=42427512)
441
+ 3650 train 4.037042 (lr=6.4526e-04) (hash(x)=40067454)
442
+ 3660 train 3.959332 (lr=6.3743e-04) (hash(x)=37969892)
443
+ 3670 train 3.932523 (lr=6.2969e-04) (hash(x)=43670246)
444
+ 3680 train 4.011621 (lr=6.2204e-04) (hash(x)=37031732)
445
+ 3690 train 3.889862 (lr=6.1450e-04) (hash(x)=39237187)
446
+ 3700 val loss 3.9462
447
+ 3700 val perplexity 51.7400
448
+ 3700 train 3.929801 (lr=6.0705e-04) (hash(x)=39521416)
449
+ 3710 train 3.824125 (lr=5.9969e-04) (hash(x)=41147425)
450
+ 3720 train 3.814727 (lr=5.9244e-04) (hash(x)=40204658)
451
+ 3730 train 3.909194 (lr=5.8528e-04) (hash(x)=39420983)
452
+ 3740 train 3.958788 (lr=5.7822e-04) (hash(x)=51155740)
453
+ 3750 train 3.943041 (lr=5.7126e-04) (hash(x)=41654187)
454
+ 3760 train 3.906280 (lr=5.6440e-04) (hash(x)=40598339)
455
+ 3770 train 3.870958 (lr=5.5764e-04) (hash(x)=44950116)
456
+ 3780 train 3.927697 (lr=5.5098e-04) (hash(x)=40660326)
457
+ 3790 train 3.972531 (lr=5.4443e-04) (hash(x)=37805851)
458
+ 3800 val loss 3.9276
459
+ 3800 val perplexity 50.7862
460
+ 3800 train 3.946080 (lr=5.3797e-04) (hash(x)=38064443)
461
+ 3810 train 4.014811 (lr=5.3162e-04) (hash(x)=39161498)
462
+ 3820 train 3.861611 (lr=5.2537e-04) (hash(x)=40626649)
463
+ 3830 train 3.935430 (lr=5.1922e-04) (hash(x)=36894771)
464
+ 3840 train 3.872973 (lr=5.1317e-04) (hash(x)=33672141)
465
+ 3850 train 3.835094 (lr=5.0723e-04) (hash(x)=41751813)
466
+ 3860 train 3.714859 (lr=5.0140e-04) (hash(x)=33784172)
467
+ 3870 train 3.638690 (lr=4.9567e-04) (hash(x)=43110493)
468
+ 3880 train 3.737185 (lr=4.9004e-04) (hash(x)=36585111)
469
+ 3890 train 3.743103 (lr=4.8452e-04) (hash(x)=40134264)
470
+ 3900 val loss 3.9357
471
+ 3900 val perplexity 51.1996
472
+ 3900 train 3.742950 (lr=4.7911e-04) (hash(x)=39815215)
473
+ 3910 train 3.874100 (lr=4.7380e-04) (hash(x)=42081557)
474
+ 3920 train 3.817131 (lr=4.6860e-04) (hash(x)=48184973)
475
+ 3930 train 4.009429 (lr=4.6351e-04) (hash(x)=42416681)
476
+ 3940 train 3.930143 (lr=4.5852e-04) (hash(x)=38587062)
477
+ 3950 train 3.930746 (lr=4.5364e-04) (hash(x)=37296617)
478
+ 3960 train 3.903465 (lr=4.4888e-04) (hash(x)=37959330)
479
+ 3970 train 3.928712 (lr=4.4422e-04) (hash(x)=35175634)
480
+ 3980 train 3.979532 (lr=4.3966e-04) (hash(x)=37219362)
481
+ 3990 train 3.999736 (lr=4.3522e-04) (hash(x)=38906132)
482
+ 4000 val loss 3.9122
483
+ 4000 val perplexity 50.0071
484
+ 4000 train 3.924122 (lr=4.3089e-04) (hash(x)=39940517)
485
+ 4010 train 3.934385 (lr=4.2667e-04) (hash(x)=39952437)
486
+ 4020 train 3.853545 (lr=4.2256e-04) (hash(x)=42000930)
487
+ 4030 train 3.866506 (lr=4.1856e-04) (hash(x)=41460672)
488
+ 4040 train 3.607843 (lr=4.1467e-04) (hash(x)=43702329)
489
+ 4050 train 3.706838 (lr=4.1089e-04) (hash(x)=41740121)
490
+ 4060 train 3.704847 (lr=4.0722e-04) (hash(x)=37066349)
491
+ 4070 train 3.548918 (lr=4.0367e-04) (hash(x)=38359869)
492
+ 4080 train 3.792638 (lr=4.0022e-04) (hash(x)=35131370)
493
+ 4090 train 3.851109 (lr=3.9689e-04) (hash(x)=39918811)
494
+ 4100 val loss 3.9105
495
+ 4100 val perplexity 49.9238
496
+ 4100 train 4.009623 (lr=3.9368e-04) (hash(x)=47036374)
497
+ 4110 train 3.928993 (lr=3.9057e-04) (hash(x)=37657523)
498
+ 4120 train 3.977993 (lr=3.8758e-04) (hash(x)=37218703)
499
+ 4130 train 3.988193 (lr=3.8470e-04) (hash(x)=41350513)
500
+ 4140 train 4.014175 (lr=3.8193e-04) (hash(x)=38283785)
501
+ 4150 train 3.997081 (lr=3.7928e-04) (hash(x)=32927892)
502
+ 4160 train 3.872600 (lr=3.7674e-04) (hash(x)=42011933)
503
+ 4170 train 3.894452 (lr=3.7432e-04) (hash(x)=41854594)
504
+ 4180 train 3.907142 (lr=3.7201e-04) (hash(x)=36737064)
505
+ 4190 train 3.816070 (lr=3.6982e-04) (hash(x)=36326176)
506
+ 4200 val loss 3.9030
507
+ 4200 val perplexity 49.5516
508
+ 4200 train 3.888032 (lr=3.6774e-04) (hash(x)=39106683)
509
+ 4210 train 3.935813 (lr=3.6577e-04) (hash(x)=39759977)
510
+ 4220 train 3.680127 (lr=3.6392e-04) (hash(x)=43666151)
511
+ 4230 train 3.633502 (lr=3.6218e-04) (hash(x)=38154367)
512
+ 4240 train 3.739508 (lr=3.6056e-04) (hash(x)=38284168)
513
+ 4250 train 3.677848 (lr=3.5906e-04) (hash(x)=38888879)
514
+ 4260 train 3.586399 (lr=3.5767e-04) (hash(x)=36846417)
515
+ 4270 train 3.996838 (lr=3.5639e-04) (hash(x)=43125375)
516
+ 4280 train 3.953469 (lr=3.5523e-04) (hash(x)=43556412)
517
+ 4290 train 3.885408 (lr=3.5419e-04) (hash(x)=48598302)
518
+ 4300 val loss 3.8968
519
+ 4300 val perplexity 49.2459
520
+ 4300 train 3.863606 (lr=3.5326e-04) (hash(x)=42764385)
521
+ 4310 train 3.917937 (lr=3.5245e-04) (hash(x)=41190028)
522
+ 4320 train 3.887140 (lr=3.5175e-04) (hash(x)=35767770)
523
+ 4330 train 3.963176 (lr=3.5117e-04) (hash(x)=36760476)
524
+ 4340 train 3.907284 (lr=3.5071e-04) (hash(x)=40618921)
525
+ 4350 train 4.008026 (lr=3.5036e-04) (hash(x)=32300164)
526
+ 4360 train 3.861001 (lr=3.5013e-04) (hash(x)=41400085)
527
+ 4370 train 3.897710 (lr=3.5001e-04) (hash(x)=40251828)
528
+ 4374 val loss 3.8979
529
+ 4374 val perplexity 49.2975
logs/fix_1_latent_mask/1_latent_mask_lr_35e-4_n_latent_masks_1_relu_seed_1340/model_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fc9932f183dec84b738545dbe3f85dca553e3d351b309ec506bb3b1bc794fb8a
3
+ size 97706546
logs/fix_1_latent_mask/1_latent_mask_lr_35e-4_n_latent_masks_1_relu_seed_1340/optimizer_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:73604eb1b751b79fdf3a8c4643f3a9aec2b9c308f64d6339f3f0acd4e051409a
3
+ size 189135414