andrew-healey commited on
Commit
5f085b9
·
verified ·
1 Parent(s): 711e2dc

Upload folder using huggingface_hub

Browse files
logs/fix_1_latent_mask/1_latent_mask_lr_25e-4_n_latent_masks_2_relu_seed_1340/args.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "logs/fix_1_latent_mask/1_latent_mask_lr_25e-4_n_latent_masks_2_relu_seed_1340", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 12, "n_embd": 264, "head_dim": 22, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 4375, "warmup_steps": 250, "group": "fix_1_latent_mask", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1340, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "n_latent_masks", "selection_head_linear_combo_scale": 1.0, "disable_selection_head_linear_combo_bias": false, "assert_latent_matches_no_head": false, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 32, "total_batch_size": 131072, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": 2, "init_latent_masks_to_identity": true, "latent_mask_scale": null, "latent_mask_sigmoid": false, "S_layernorm": false, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.0025, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "1_latent_mask_lr_25e-4_n_latent_masks_2_relu"}
logs/fix_1_latent_mask/1_latent_mask_lr_25e-4_n_latent_masks_2_relu_seed_1340/dataloader_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6031fd3e2855a036f7a5531cc24555aabd1115f9dd6618b8b2ca6f55279ef0b2
3
+ size 964
logs/fix_1_latent_mask/1_latent_mask_lr_25e-4_n_latent_masks_2_relu_seed_1340/log2.txt ADDED
@@ -0,0 +1,529 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ max_steps: 4375
2
+ 0 val loss 11.2068
3
+ 0 val perplexity 73626.9297
4
+ 0 train 11.205775 (lr=3.4965e-06) (hash(x)=45482580)
5
+ 10 train 9.890842 (lr=3.8462e-05) (hash(x)=38414730)
6
+ 20 train 9.453115 (lr=7.3427e-05) (hash(x)=39783917)
7
+ 30 train 8.800729 (lr=1.0839e-04) (hash(x)=38395733)
8
+ 40 train 8.090753 (lr=1.4336e-04) (hash(x)=37486707)
9
+ 50 train 7.688906 (lr=1.7832e-04) (hash(x)=39897505)
10
+ 60 train 7.564517 (lr=2.1329e-04) (hash(x)=40933473)
11
+ 70 train 7.572178 (lr=2.4825e-04) (hash(x)=37690746)
12
+ 80 train 7.473712 (lr=2.8322e-04) (hash(x)=38609656)
13
+ 90 train 7.251760 (lr=3.1818e-04) (hash(x)=38356571)
14
+ 100 val loss 7.1472
15
+ 100 val perplexity 1270.5321
16
+ 100 train 7.125655 (lr=3.5315e-04) (hash(x)=35980376)
17
+ 110 train 7.009854 (lr=3.8811e-04) (hash(x)=42011042)
18
+ 120 train 6.897563 (lr=4.2308e-04) (hash(x)=40266823)
19
+ 130 train 6.741284 (lr=4.5804e-04) (hash(x)=38645447)
20
+ 140 train 6.620826 (lr=4.9301e-04) (hash(x)=36059313)
21
+ 150 train 6.591356 (lr=5.2797e-04) (hash(x)=34161947)
22
+ 160 train 6.461844 (lr=5.6294e-04) (hash(x)=42263375)
23
+ 170 train 6.501694 (lr=5.9790e-04) (hash(x)=42957725)
24
+ 180 train 6.518191 (lr=6.3287e-04) (hash(x)=40198018)
25
+ 190 train 6.243148 (lr=6.6783e-04) (hash(x)=41666215)
26
+ 200 val loss 6.2311
27
+ 200 val perplexity 508.2964
28
+ 200 train 6.194659 (lr=7.0280e-04) (hash(x)=54060482)
29
+ 210 train 6.105186 (lr=7.3776e-04) (hash(x)=42332778)
30
+ 220 train 6.035253 (lr=7.7273e-04) (hash(x)=39551486)
31
+ 230 train 6.047264 (lr=8.0769e-04) (hash(x)=38462018)
32
+ 240 train 6.096400 (lr=8.4266e-04) (hash(x)=36591442)
33
+ 250 train 6.027749 (lr=8.7762e-04) (hash(x)=40861237)
34
+ 260 train 5.974708 (lr=9.1259e-04) (hash(x)=41739369)
35
+ 270 train 5.840558 (lr=9.4755e-04) (hash(x)=39415360)
36
+ 280 train 5.840067 (lr=9.8252e-04) (hash(x)=44665934)
37
+ 290 train 5.753973 (lr=1.0175e-03) (hash(x)=37035579)
38
+ 300 val loss 5.7466
39
+ 300 val perplexity 313.1129
40
+ 300 train 5.773964 (lr=1.0524e-03) (hash(x)=38301011)
41
+ 310 train 5.604574 (lr=1.0874e-03) (hash(x)=42850980)
42
+ 320 train 5.682619 (lr=1.1224e-03) (hash(x)=37476222)
43
+ 330 train 5.589697 (lr=1.1573e-03) (hash(x)=53028205)
44
+ 340 train 5.617637 (lr=1.1923e-03) (hash(x)=41466008)
45
+ 350 train 5.513399 (lr=1.2273e-03) (hash(x)=37802865)
46
+ 360 train 5.444917 (lr=1.2622e-03) (hash(x)=42769282)
47
+ 370 train 5.490838 (lr=1.2972e-03) (hash(x)=39319256)
48
+ 380 train 5.481611 (lr=1.3322e-03) (hash(x)=42637402)
49
+ 390 train 5.441113 (lr=1.3671e-03) (hash(x)=31141514)
50
+ 400 val loss 5.4378
51
+ 400 val perplexity 229.9352
52
+ 400 train 5.513298 (lr=1.4021e-03) (hash(x)=38151157)
53
+ 410 train 5.439150 (lr=1.4371e-03) (hash(x)=46033439)
54
+ 420 train 5.421170 (lr=1.4720e-03) (hash(x)=41365246)
55
+ 430 train 5.402006 (lr=1.5070e-03) (hash(x)=42369184)
56
+ 440 train 5.403897 (lr=1.5420e-03) (hash(x)=42004840)
57
+ 450 train 5.316057 (lr=1.5769e-03) (hash(x)=37181172)
58
+ 460 train 5.402740 (lr=1.6119e-03) (hash(x)=31630797)
59
+ 470 train 5.270374 (lr=1.6469e-03) (hash(x)=42135747)
60
+ 480 train 5.196032 (lr=1.6818e-03) (hash(x)=30023651)
61
+ 490 train 5.267048 (lr=1.7168e-03) (hash(x)=40463476)
62
+ 500 val loss 5.2245
63
+ 500 val perplexity 185.7718
64
+ 500 train 5.142830 (lr=1.7517e-03) (hash(x)=37089842)
65
+ 510 train 5.172961 (lr=1.7867e-03) (hash(x)=41095974)
66
+ 520 train 5.213730 (lr=1.8217e-03) (hash(x)=41525258)
67
+ 530 train 5.196990 (lr=1.8566e-03) (hash(x)=37697291)
68
+ 540 train 5.223812 (lr=1.8916e-03) (hash(x)=43459894)
69
+ 550 train 5.084095 (lr=1.9266e-03) (hash(x)=38993956)
70
+ 560 train 5.108402 (lr=1.9615e-03) (hash(x)=40677277)
71
+ 570 train 5.170212 (lr=1.9965e-03) (hash(x)=43103127)
72
+ 580 train 5.093946 (lr=2.0315e-03) (hash(x)=41871776)
73
+ 590 train 5.093638 (lr=2.0664e-03) (hash(x)=43230967)
74
+ 600 val loss 5.0111
75
+ 600 val perplexity 150.0676
76
+ 600 train 5.050827 (lr=2.1014e-03) (hash(x)=42226172)
77
+ 610 train 5.406841 (lr=2.1364e-03) (hash(x)=43007164)
78
+ 620 train 4.924475 (lr=2.1713e-03) (hash(x)=35752341)
79
+ 630 train 4.987593 (lr=2.2063e-03) (hash(x)=39453504)
80
+ 640 train 4.924506 (lr=2.2413e-03) (hash(x)=43635712)
81
+ 650 train 4.922103 (lr=2.2762e-03) (hash(x)=41852456)
82
+ 660 train 4.969440 (lr=2.3112e-03) (hash(x)=49125742)
83
+ 670 train 4.949284 (lr=2.3462e-03) (hash(x)=34886275)
84
+ 680 train 5.043017 (lr=2.3811e-03) (hash(x)=40546482)
85
+ 690 train 4.841289 (lr=2.4161e-03) (hash(x)=40850901)
86
+ 700 val loss 4.8359
87
+ 700 val perplexity 125.9572
88
+ 700 train 4.783812 (lr=2.4510e-03) (hash(x)=37426993)
89
+ 710 train 4.971233 (lr=2.4860e-03) (hash(x)=42679743)
90
+ 720 train 4.784430 (lr=2.5000e-03) (hash(x)=44856043)
91
+ 730 train 4.772947 (lr=2.4999e-03) (hash(x)=36457471)
92
+ 740 train 4.734918 (lr=2.4997e-03) (hash(x)=47203534)
93
+ 750 train 4.752731 (lr=2.4995e-03) (hash(x)=35585657)
94
+ 760 train 4.669024 (lr=2.4992e-03) (hash(x)=37259264)
95
+ 770 train 4.579114 (lr=2.4987e-03) (hash(x)=44161997)
96
+ 780 train 4.518903 (lr=2.4982e-03) (hash(x)=41272886)
97
+ 790 train 4.523287 (lr=2.4977e-03) (hash(x)=43303662)
98
+ 800 val loss 4.7180
99
+ 800 val perplexity 111.9428
100
+ 800 train 4.428425 (lr=2.4970e-03) (hash(x)=39067231)
101
+ 810 train 4.664501 (lr=2.4963e-03) (hash(x)=44046732)
102
+ 820 train 4.650214 (lr=2.4954e-03) (hash(x)=39783962)
103
+ 830 train 4.705337 (lr=2.4945e-03) (hash(x)=40973974)
104
+ 840 train 4.677117 (lr=2.4935e-03) (hash(x)=41619968)
105
+ 850 train 4.725117 (lr=2.4925e-03) (hash(x)=36939960)
106
+ 860 train 4.613496 (lr=2.4913e-03) (hash(x)=42942377)
107
+ 870 train 4.612566 (lr=2.4901e-03) (hash(x)=39659455)
108
+ 880 train 4.629526 (lr=2.4887e-03) (hash(x)=40331986)
109
+ 890 train 4.537940 (lr=2.4873e-03) (hash(x)=38084814)
110
+ 900 val loss 4.5727
111
+ 900 val perplexity 96.8069
112
+ 900 train 4.565434 (lr=2.4858e-03) (hash(x)=30830367)
113
+ 910 train 4.475521 (lr=2.4843e-03) (hash(x)=39664356)
114
+ 920 train 4.480733 (lr=2.4826e-03) (hash(x)=39007775)
115
+ 930 train 4.505388 (lr=2.4809e-03) (hash(x)=39319254)
116
+ 940 train 4.547891 (lr=2.4791e-03) (hash(x)=36347051)
117
+ 950 train 4.504954 (lr=2.4772e-03) (hash(x)=39243577)
118
+ 960 train 4.529503 (lr=2.4752e-03) (hash(x)=45603839)
119
+ 970 train 4.336865 (lr=2.4732e-03) (hash(x)=40036075)
120
+ 980 train 4.385392 (lr=2.4710e-03) (hash(x)=43068524)
121
+ 990 train 4.376199 (lr=2.4688e-03) (hash(x)=33798472)
122
+ 1000 val loss 4.5354
123
+ 1000 val perplexity 93.2614
124
+ 1000 train 4.744228 (lr=2.4665e-03) (hash(x)=37728665)
125
+ 1010 train 4.616531 (lr=2.4641e-03) (hash(x)=43678636)
126
+ 1020 train 4.631880 (lr=2.4617e-03) (hash(x)=48464376)
127
+ 1030 train 4.593956 (lr=2.4591e-03) (hash(x)=43771461)
128
+ 1040 train 4.454466 (lr=2.4565e-03) (hash(x)=44643085)
129
+ 1050 train 4.537310 (lr=2.4538e-03) (hash(x)=39201716)
130
+ 1060 train 4.442482 (lr=2.4510e-03) (hash(x)=36396343)
131
+ 1070 train 4.441087 (lr=2.4482e-03) (hash(x)=37504111)
132
+ 1080 train 4.473013 (lr=2.4452e-03) (hash(x)=37454537)
133
+ 1090 train 4.460074 (lr=2.4422e-03) (hash(x)=36888183)
134
+ 1100 val loss 4.4564
135
+ 1100 val perplexity 86.1788
136
+ 1100 train 4.401981 (lr=2.4391e-03) (hash(x)=39549045)
137
+ 1110 train 4.589377 (lr=2.4360e-03) (hash(x)=56818420)
138
+ 1120 train 4.401405 (lr=2.4327e-03) (hash(x)=36573357)
139
+ 1130 train 4.335915 (lr=2.4294e-03) (hash(x)=38540878)
140
+ 1140 train 4.334489 (lr=2.4260e-03) (hash(x)=38645859)
141
+ 1150 train 4.544632 (lr=2.4225e-03) (hash(x)=41972503)
142
+ 1160 train 4.155989 (lr=2.4189e-03) (hash(x)=38151868)
143
+ 1170 train 4.192753 (lr=2.4153e-03) (hash(x)=42375436)
144
+ 1180 train 4.172551 (lr=2.4116e-03) (hash(x)=42868028)
145
+ 1190 train 4.426139 (lr=2.4078e-03) (hash(x)=41060868)
146
+ 1200 val loss 4.4175
147
+ 1200 val perplexity 82.8882
148
+ 1200 train 4.431848 (lr=2.4039e-03) (hash(x)=40852814)
149
+ 1210 train 4.407787 (lr=2.4000e-03) (hash(x)=43365554)
150
+ 1220 train 4.352692 (lr=2.3960e-03) (hash(x)=39121134)
151
+ 1230 train 4.440488 (lr=2.3919e-03) (hash(x)=40565426)
152
+ 1240 train 4.495507 (lr=2.3877e-03) (hash(x)=43378926)
153
+ 1250 train 4.364605 (lr=2.3834e-03) (hash(x)=35227381)
154
+ 1260 train 4.454797 (lr=2.3791e-03) (hash(x)=41267226)
155
+ 1270 train 4.358776 (lr=2.3747e-03) (hash(x)=31395565)
156
+ 1280 train 4.467421 (lr=2.3703e-03) (hash(x)=41470963)
157
+ 1290 train 4.281929 (lr=2.3657e-03) (hash(x)=40729366)
158
+ 1300 val loss 4.3555
159
+ 1300 val perplexity 77.9088
160
+ 1300 train 4.325957 (lr=2.3611e-03) (hash(x)=39916663)
161
+ 1310 train 4.376181 (lr=2.3564e-03) (hash(x)=42179962)
162
+ 1320 train 4.244958 (lr=2.3517e-03) (hash(x)=44991932)
163
+ 1330 train 4.216087 (lr=2.3469e-03) (hash(x)=38925911)
164
+ 1340 train 4.161520 (lr=2.3420e-03) (hash(x)=38565560)
165
+ 1350 train 4.203225 (lr=2.3370e-03) (hash(x)=43384902)
166
+ 1360 train 4.155228 (lr=2.3319e-03) (hash(x)=45422261)
167
+ 1370 train 4.138535 (lr=2.3268e-03) (hash(x)=40167325)
168
+ 1380 train 4.291839 (lr=2.3216e-03) (hash(x)=51258162)
169
+ 1390 train 4.356454 (lr=2.3164e-03) (hash(x)=43229106)
170
+ 1400 val loss 4.3101
171
+ 1400 val perplexity 74.4511
172
+ 1400 train 4.258812 (lr=2.3111e-03) (hash(x)=42537494)
173
+ 1410 train 4.568239 (lr=2.3057e-03) (hash(x)=51236490)
174
+ 1420 train 4.403322 (lr=2.3002e-03) (hash(x)=45243347)
175
+ 1430 train 4.242885 (lr=2.2947e-03) (hash(x)=37658566)
176
+ 1440 train 4.312455 (lr=2.2891e-03) (hash(x)=38856106)
177
+ 1450 train 4.377654 (lr=2.2834e-03) (hash(x)=36649488)
178
+ 1460 train 4.377281 (lr=2.2777e-03) (hash(x)=40445407)
179
+ 1470 train 4.306540 (lr=2.2719e-03) (hash(x)=39916387)
180
+ 1480 train 4.275320 (lr=2.2661e-03) (hash(x)=40934555)
181
+ 1490 train 4.241853 (lr=2.2601e-03) (hash(x)=42276527)
182
+ 1500 val loss 4.2910
183
+ 1500 val perplexity 73.0429
184
+ 1500 train 4.229302 (lr=2.2541e-03) (hash(x)=37973883)
185
+ 1510 train 4.296679 (lr=2.2481e-03) (hash(x)=38929286)
186
+ 1520 train 4.109448 (lr=2.2420e-03) (hash(x)=36855837)
187
+ 1530 train 4.357996 (lr=2.2358e-03) (hash(x)=42146431)
188
+ 1540 train 4.299703 (lr=2.2295e-03) (hash(x)=41898411)
189
+ 1550 train 4.210162 (lr=2.2232e-03) (hash(x)=44906272)
190
+ 1560 train 4.236172 (lr=2.2168e-03) (hash(x)=40414353)
191
+ 1570 train 4.265667 (lr=2.2104e-03) (hash(x)=38723190)
192
+ 1580 train 4.183865 (lr=2.2039e-03) (hash(x)=39065271)
193
+ 1590 train 4.176035 (lr=2.1973e-03) (hash(x)=40000886)
194
+ 1600 val loss 4.2592
195
+ 1600 val perplexity 70.7523
196
+ 1600 train 4.092718 (lr=2.1907e-03) (hash(x)=37898571)
197
+ 1610 train 4.203907 (lr=2.1840e-03) (hash(x)=40447863)
198
+ 1620 train 4.177111 (lr=2.1773e-03) (hash(x)=40119318)
199
+ 1630 train 4.109319 (lr=2.1705e-03) (hash(x)=40664074)
200
+ 1640 train 4.077795 (lr=2.1636e-03) (hash(x)=37404741)
201
+ 1650 train 4.234222 (lr=2.1567e-03) (hash(x)=39214928)
202
+ 1660 train 4.267488 (lr=2.1497e-03) (hash(x)=35005019)
203
+ 1670 train 4.319953 (lr=2.1427e-03) (hash(x)=37675832)
204
+ 1680 train 4.242300 (lr=2.1356e-03) (hash(x)=40489680)
205
+ 1690 train 4.160681 (lr=2.1285e-03) (hash(x)=45537879)
206
+ 1700 val loss 4.2286
207
+ 1700 val perplexity 68.6239
208
+ 1700 train 4.145211 (lr=2.1213e-03) (hash(x)=38866100)
209
+ 1710 train 4.212273 (lr=2.1140e-03) (hash(x)=44726254)
210
+ 1720 train 4.185357 (lr=2.1067e-03) (hash(x)=36917792)
211
+ 1730 train 4.172176 (lr=2.0994e-03) (hash(x)=42814805)
212
+ 1740 train 4.248016 (lr=2.0920e-03) (hash(x)=51812216)
213
+ 1750 train 4.116182 (lr=2.0845e-03) (hash(x)=33549014)
214
+ 1760 train 4.130714 (lr=2.0770e-03) (hash(x)=40354215)
215
+ 1770 train 4.153460 (lr=2.0694e-03) (hash(x)=40718606)
216
+ 1780 train 4.086258 (lr=2.0618e-03) (hash(x)=38274164)
217
+ 1790 train 4.257994 (lr=2.0541e-03) (hash(x)=42531471)
218
+ 1800 val loss 4.2059
219
+ 1800 val perplexity 67.0823
220
+ 1800 train 4.184424 (lr=2.0464e-03) (hash(x)=35616519)
221
+ 1810 train 4.235831 (lr=2.0386e-03) (hash(x)=33803118)
222
+ 1820 train 4.240029 (lr=2.0308e-03) (hash(x)=36973525)
223
+ 1830 train 4.182438 (lr=2.0229e-03) (hash(x)=38162549)
224
+ 1840 train 4.151460 (lr=2.0150e-03) (hash(x)=33937159)
225
+ 1850 train 4.260734 (lr=2.0070e-03) (hash(x)=39887546)
226
+ 1860 train 4.098763 (lr=1.9990e-03) (hash(x)=37818525)
227
+ 1870 train 4.144792 (lr=1.9910e-03) (hash(x)=37250478)
228
+ 1880 train 4.037030 (lr=1.9829e-03) (hash(x)=36412167)
229
+ 1890 train 4.150329 (lr=1.9747e-03) (hash(x)=40261189)
230
+ 1900 val loss 4.2046
231
+ 1900 val perplexity 66.9945
232
+ 1900 train 4.102262 (lr=1.9665e-03) (hash(x)=38654303)
233
+ 1910 train 4.074639 (lr=1.9583e-03) (hash(x)=37929515)
234
+ 1920 train 4.254277 (lr=1.9500e-03) (hash(x)=45580146)
235
+ 1930 train 4.198043 (lr=1.9417e-03) (hash(x)=32386330)
236
+ 1940 train 4.236018 (lr=1.9333e-03) (hash(x)=36331864)
237
+ 1950 train 4.215192 (lr=1.9249e-03) (hash(x)=41181727)
238
+ 1960 train 4.042159 (lr=1.9165e-03) (hash(x)=42705152)
239
+ 1970 train 4.155267 (lr=1.9080e-03) (hash(x)=40251511)
240
+ 1980 train 4.073574 (lr=1.8995e-03) (hash(x)=37525551)
241
+ 1990 train 4.141328 (lr=1.8909e-03) (hash(x)=41849618)
242
+ 2000 val loss 4.1680
243
+ 2000 val perplexity 64.5876
244
+ 2000 train 4.146471 (lr=1.8823e-03) (hash(x)=37642582)
245
+ 2010 train 4.146174 (lr=1.8737e-03) (hash(x)=42131121)
246
+ 2020 train 4.028848 (lr=1.8650e-03) (hash(x)=39000209)
247
+ 2030 train 4.121723 (lr=1.8563e-03) (hash(x)=43641355)
248
+ 2040 train 4.015273 (lr=1.8475e-03) (hash(x)=47910507)
249
+ 2050 train 4.013456 (lr=1.8387e-03) (hash(x)=36670359)
250
+ 2060 train 4.213159 (lr=1.8299e-03) (hash(x)=36477755)
251
+ 2070 train 4.298463 (lr=1.8211e-03) (hash(x)=43262487)
252
+ 2080 train 4.119202 (lr=1.8122e-03) (hash(x)=49546029)
253
+ 2090 train 4.242021 (lr=1.8033e-03) (hash(x)=39924731)
254
+ 2100 val loss 4.1567
255
+ 2100 val perplexity 63.8594
256
+ 2100 train 4.110860 (lr=1.7943e-03) (hash(x)=39921304)
257
+ 2110 train 4.058792 (lr=1.7854e-03) (hash(x)=38152788)
258
+ 2120 train 4.112485 (lr=1.7764e-03) (hash(x)=37977911)
259
+ 2130 train 4.075456 (lr=1.7673e-03) (hash(x)=34748760)
260
+ 2140 train 4.124844 (lr=1.7583e-03) (hash(x)=40509369)
261
+ 2150 train 4.095079 (lr=1.7492e-03) (hash(x)=37654262)
262
+ 2160 train 4.053647 (lr=1.7400e-03) (hash(x)=38139543)
263
+ 2170 train 3.959253 (lr=1.7309e-03) (hash(x)=42501806)
264
+ 2180 train 4.111735 (lr=1.7217e-03) (hash(x)=40085092)
265
+ 2190 train 4.049169 (lr=1.7125e-03) (hash(x)=50480193)
266
+ 2200 val loss 4.1399
267
+ 2200 val perplexity 62.7944
268
+ 2200 train 4.145333 (lr=1.7033e-03) (hash(x)=40604084)
269
+ 2210 train 4.247211 (lr=1.6941e-03) (hash(x)=41555823)
270
+ 2220 train 4.063179 (lr=1.6848e-03) (hash(x)=50441765)
271
+ 2230 train 4.162491 (lr=1.6755e-03) (hash(x)=39796580)
272
+ 2240 train 4.189950 (lr=1.6662e-03) (hash(x)=44127022)
273
+ 2250 train 4.097532 (lr=1.6568e-03) (hash(x)=37026826)
274
+ 2260 train 4.129111 (lr=1.6475e-03) (hash(x)=42133839)
275
+ 2270 train 4.089989 (lr=1.6381e-03) (hash(x)=38500664)
276
+ 2280 train 4.078357 (lr=1.6287e-03) (hash(x)=40538661)
277
+ 2290 train 4.083980 (lr=1.6193e-03) (hash(x)=51509210)
278
+ 2300 val loss 4.1150
279
+ 2300 val perplexity 61.2534
280
+ 2300 train 4.086582 (lr=1.6098e-03) (hash(x)=41952328)
281
+ 2310 train 4.012139 (lr=1.6004e-03) (hash(x)=39758123)
282
+ 2320 train 4.130382 (lr=1.5909e-03) (hash(x)=51089268)
283
+ 2330 train 4.186350 (lr=1.5814e-03) (hash(x)=39767618)
284
+ 2340 train 4.227912 (lr=1.5719e-03) (hash(x)=40409617)
285
+ 2350 train 4.066875 (lr=1.5624e-03) (hash(x)=40349634)
286
+ 2360 train 4.339818 (lr=1.5529e-03) (hash(x)=31841172)
287
+ 2370 train 4.133272 (lr=1.5434e-03) (hash(x)=42720539)
288
+ 2380 train 3.996330 (lr=1.5338e-03) (hash(x)=40998632)
289
+ 2390 train 4.104516 (lr=1.5242e-03) (hash(x)=40615413)
290
+ 2400 val loss 4.0973
291
+ 2400 val perplexity 60.1777
292
+ 2400 train 4.019341 (lr=1.5147e-03) (hash(x)=39373658)
293
+ 2410 train 4.063322 (lr=1.5051e-03) (hash(x)=35480858)
294
+ 2420 train 4.055887 (lr=1.4955e-03) (hash(x)=32241095)
295
+ 2430 train 4.037612 (lr=1.4859e-03) (hash(x)=36669715)
296
+ 2440 train 4.042526 (lr=1.4763e-03) (hash(x)=45768335)
297
+ 2450 train 3.946782 (lr=1.4666e-03) (hash(x)=39395055)
298
+ 2460 train 3.971433 (lr=1.4570e-03) (hash(x)=34899269)
299
+ 2470 train 4.167533 (lr=1.4474e-03) (hash(x)=43642420)
300
+ 2480 train 4.264967 (lr=1.4377e-03) (hash(x)=47099507)
301
+ 2490 train 4.146496 (lr=1.4281e-03) (hash(x)=38825558)
302
+ 2500 val loss 4.0839
303
+ 2500 val perplexity 59.3780
304
+ 2500 train 4.117302 (lr=1.4184e-03) (hash(x)=39833804)
305
+ 2510 train 4.208681 (lr=1.4088e-03) (hash(x)=35051654)
306
+ 2520 train 4.108937 (lr=1.3991e-03) (hash(x)=40291109)
307
+ 2530 train 4.017869 (lr=1.3895e-03) (hash(x)=36915768)
308
+ 2540 train 4.002216 (lr=1.3798e-03) (hash(x)=37016308)
309
+ 2550 train 3.961236 (lr=1.3702e-03) (hash(x)=43221777)
310
+ 2560 train 4.016005 (lr=1.3605e-03) (hash(x)=37233207)
311
+ 2570 train 3.914043 (lr=1.3509e-03) (hash(x)=37510626)
312
+ 2580 train 3.966836 (lr=1.3412e-03) (hash(x)=39624656)
313
+ 2590 train 3.816824 (lr=1.3316e-03) (hash(x)=37647501)
314
+ 2600 val loss 4.0726
315
+ 2600 val perplexity 58.7079
316
+ 2600 train 3.954801 (lr=1.3219e-03) (hash(x)=37200138)
317
+ 2610 train 3.963615 (lr=1.3123e-03) (hash(x)=43066970)
318
+ 2620 train 4.104868 (lr=1.3026e-03) (hash(x)=40503799)
319
+ 2630 train 4.040703 (lr=1.2930e-03) (hash(x)=45534660)
320
+ 2640 train 4.253039 (lr=1.2834e-03) (hash(x)=52358353)
321
+ 2650 train 4.033212 (lr=1.2737e-03) (hash(x)=41843599)
322
+ 2660 train 4.017492 (lr=1.2641e-03) (hash(x)=40519770)
323
+ 2670 train 4.103260 (lr=1.2545e-03) (hash(x)=37185602)
324
+ 2680 train 3.961729 (lr=1.2449e-03) (hash(x)=43805719)
325
+ 2690 train 3.973426 (lr=1.2353e-03) (hash(x)=39458250)
326
+ 2700 val loss 4.0497
327
+ 2700 val perplexity 57.3792
328
+ 2700 train 4.076020 (lr=1.2258e-03) (hash(x)=39149255)
329
+ 2710 train 3.917845 (lr=1.2162e-03) (hash(x)=43544861)
330
+ 2720 train 4.020093 (lr=1.2066e-03) (hash(x)=36858197)
331
+ 2730 train 3.942430 (lr=1.1971e-03) (hash(x)=37971712)
332
+ 2740 train 3.965685 (lr=1.1876e-03) (hash(x)=42416578)
333
+ 2750 train 3.913247 (lr=1.1781e-03) (hash(x)=53554622)
334
+ 2760 train 4.050060 (lr=1.1686e-03) (hash(x)=40502545)
335
+ 2770 train 4.015020 (lr=1.1591e-03) (hash(x)=36086417)
336
+ 2780 train 4.037379 (lr=1.1496e-03) (hash(x)=36242982)
337
+ 2790 train 4.113669 (lr=1.1402e-03) (hash(x)=41433780)
338
+ 2800 val loss 4.0305
339
+ 2800 val perplexity 56.2871
340
+ 2800 train 3.987751 (lr=1.1307e-03) (hash(x)=48553484)
341
+ 2810 train 3.931310 (lr=1.1213e-03) (hash(x)=39928650)
342
+ 2820 train 4.033776 (lr=1.1119e-03) (hash(x)=37891724)
343
+ 2830 train 4.008382 (lr=1.1025e-03) (hash(x)=34353412)
344
+ 2840 train 4.028496 (lr=1.0932e-03) (hash(x)=38486611)
345
+ 2850 train 3.950552 (lr=1.0838e-03) (hash(x)=32706934)
346
+ 2860 train 3.899506 (lr=1.0745e-03) (hash(x)=38693681)
347
+ 2870 train 3.965463 (lr=1.0652e-03) (hash(x)=38430800)
348
+ 2880 train 3.935346 (lr=1.0559e-03) (hash(x)=40619559)
349
+ 2890 train 3.912123 (lr=1.0467e-03) (hash(x)=42741066)
350
+ 2900 val loss 4.0330
351
+ 2900 val perplexity 56.4316
352
+ 2900 train 4.166699 (lr=1.0375e-03) (hash(x)=38216091)
353
+ 2910 train 3.965841 (lr=1.0283e-03) (hash(x)=35447832)
354
+ 2920 train 4.082309 (lr=1.0191e-03) (hash(x)=39825190)
355
+ 2930 train 4.096532 (lr=1.0100e-03) (hash(x)=37789121)
356
+ 2940 train 3.970724 (lr=1.0008e-03) (hash(x)=44499116)
357
+ 2950 train 4.051816 (lr=9.9174e-04) (hash(x)=39669860)
358
+ 2960 train 3.998207 (lr=9.8268e-04) (hash(x)=51683741)
359
+ 2970 train 3.864767 (lr=9.7364e-04) (hash(x)=36399721)
360
+ 2980 train 3.957017 (lr=9.6463e-04) (hash(x)=42629700)
361
+ 2990 train 3.846457 (lr=9.5566e-04) (hash(x)=39263773)
362
+ 3000 val loss 4.0085
363
+ 3000 val perplexity 55.0660
364
+ 3000 train 3.889926 (lr=9.4671e-04) (hash(x)=37920485)
365
+ 3010 train 3.932953 (lr=9.3780e-04) (hash(x)=40835161)
366
+ 3020 train 3.900779 (lr=9.2892e-04) (hash(x)=38381159)
367
+ 3030 train 3.890289 (lr=9.2007e-04) (hash(x)=35846270)
368
+ 3040 train 4.279407 (lr=9.1125e-04) (hash(x)=47516567)
369
+ 3050 train 4.118652 (lr=9.0247e-04) (hash(x)=35600311)
370
+ 3060 train 4.153103 (lr=8.9373e-04) (hash(x)=37775318)
371
+ 3070 train 3.991057 (lr=8.8502e-04) (hash(x)=39881333)
372
+ 3080 train 4.012084 (lr=8.7634e-04) (hash(x)=37310168)
373
+ 3090 train 3.943832 (lr=8.6771e-04) (hash(x)=43669978)
374
+ 3100 val loss 3.9981
375
+ 3100 val perplexity 54.4923
376
+ 3100 train 4.398820 (lr=8.5911e-04) (hash(x)=46356797)
377
+ 3110 train 4.039447 (lr=8.5054e-04) (hash(x)=40814232)
378
+ 3120 train 3.880230 (lr=8.4202e-04) (hash(x)=42504837)
379
+ 3130 train 4.016898 (lr=8.3353e-04) (hash(x)=39693594)
380
+ 3140 train 3.921024 (lr=8.2509e-04) (hash(x)=40277645)
381
+ 3150 train 3.929754 (lr=8.1669e-04) (hash(x)=40988003)
382
+ 3160 train 4.050598 (lr=8.0832e-04) (hash(x)=38875266)
383
+ 3170 train 4.003269 (lr=8.0000e-04) (hash(x)=45232173)
384
+ 3180 train 4.049362 (lr=7.9172e-04) (hash(x)=39213336)
385
+ 3190 train 4.113184 (lr=7.8349e-04) (hash(x)=42118576)
386
+ 3200 val loss 3.9765
387
+ 3200 val perplexity 53.3293
388
+ 3200 train 3.971393 (lr=7.7530e-04) (hash(x)=32884223)
389
+ 3210 train 4.018674 (lr=7.6715e-04) (hash(x)=41276800)
390
+ 3220 train 4.062593 (lr=7.5905e-04) (hash(x)=40284461)
391
+ 3230 train 4.045835 (lr=7.5099e-04) (hash(x)=40566734)
392
+ 3240 train 4.031040 (lr=7.4298e-04) (hash(x)=36484570)
393
+ 3250 train 3.999912 (lr=7.3501e-04) (hash(x)=41642338)
394
+ 3260 train 3.963591 (lr=7.2709e-04) (hash(x)=43883570)
395
+ 3270 train 3.905051 (lr=7.1922e-04) (hash(x)=40432560)
396
+ 3280 train 3.870279 (lr=7.1140e-04) (hash(x)=38002717)
397
+ 3290 train 3.815768 (lr=7.0363e-04) (hash(x)=41926004)
398
+ 3300 val loss 3.9691
399
+ 3300 val perplexity 52.9376
400
+ 3300 train 3.971046 (lr=6.9590e-04) (hash(x)=46015509)
401
+ 3310 train 4.005016 (lr=6.8823e-04) (hash(x)=40112249)
402
+ 3320 train 3.983451 (lr=6.8061e-04) (hash(x)=49162296)
403
+ 3330 train 4.044431 (lr=6.7303e-04) (hash(x)=46606969)
404
+ 3340 train 4.060053 (lr=6.6551e-04) (hash(x)=31128992)
405
+ 3350 train 4.034428 (lr=6.5805e-04) (hash(x)=41232534)
406
+ 3360 train 3.878048 (lr=6.5063e-04) (hash(x)=41599699)
407
+ 3370 train 4.049624 (lr=6.4327e-04) (hash(x)=40885280)
408
+ 3380 train 3.966612 (lr=6.3596e-04) (hash(x)=37169148)
409
+ 3390 train 3.874076 (lr=6.2871e-04) (hash(x)=46581889)
410
+ 3400 val loss 3.9567
411
+ 3400 val perplexity 52.2848
412
+ 3400 train 3.920069 (lr=6.2151e-04) (hash(x)=37612074)
413
+ 3410 train 3.994933 (lr=6.1437e-04) (hash(x)=38763316)
414
+ 3420 train 3.815643 (lr=6.0728e-04) (hash(x)=40562379)
415
+ 3430 train 3.918336 (lr=6.0025e-04) (hash(x)=44724867)
416
+ 3440 train 4.123186 (lr=5.9328e-04) (hash(x)=39386624)
417
+ 3450 train 4.075572 (lr=5.8636e-04) (hash(x)=38652923)
418
+ 3460 train 4.054631 (lr=5.7950e-04) (hash(x)=29587379)
419
+ 3470 train 3.958997 (lr=5.7271e-04) (hash(x)=38228776)
420
+ 3480 train 3.933403 (lr=5.6597e-04) (hash(x)=30076039)
421
+ 3490 train 3.997309 (lr=5.5929e-04) (hash(x)=38398908)
422
+ 3500 val loss 3.9434
423
+ 3500 val perplexity 51.5923
424
+ 3500 train 4.008643 (lr=5.5267e-04) (hash(x)=39259918)
425
+ 3510 train 3.907606 (lr=5.4611e-04) (hash(x)=40664091)
426
+ 3520 train 3.943635 (lr=5.3961e-04) (hash(x)=40309647)
427
+ 3530 train 3.906577 (lr=5.3317e-04) (hash(x)=38424801)
428
+ 3540 train 3.830001 (lr=5.2680e-04) (hash(x)=51322307)
429
+ 3550 train 3.895124 (lr=5.2048e-04) (hash(x)=38192628)
430
+ 3560 train 3.943963 (lr=5.1424e-04) (hash(x)=41563952)
431
+ 3570 train 3.896312 (lr=5.0805e-04) (hash(x)=39508843)
432
+ 3580 train 3.969873 (lr=5.0193e-04) (hash(x)=41260225)
433
+ 3590 train 3.952579 (lr=4.9587e-04) (hash(x)=49098107)
434
+ 3600 val loss 3.9341
435
+ 3600 val perplexity 51.1157
436
+ 3600 train 3.940125 (lr=4.8988e-04) (hash(x)=41194370)
437
+ 3610 train 4.055688 (lr=4.8395e-04) (hash(x)=40399152)
438
+ 3620 train 4.017262 (lr=4.7809e-04) (hash(x)=41625018)
439
+ 3630 train 4.037700 (lr=4.7229e-04) (hash(x)=44077942)
440
+ 3640 train 4.039501 (lr=4.6656e-04) (hash(x)=42427512)
441
+ 3650 train 4.016996 (lr=4.6090e-04) (hash(x)=40067454)
442
+ 3660 train 3.944356 (lr=4.5530e-04) (hash(x)=37969892)
443
+ 3670 train 3.916641 (lr=4.4978e-04) (hash(x)=43670246)
444
+ 3680 train 3.993936 (lr=4.4432e-04) (hash(x)=37031732)
445
+ 3690 train 3.873747 (lr=4.3893e-04) (hash(x)=39237187)
446
+ 3700 val loss 3.9282
447
+ 3700 val perplexity 50.8167
448
+ 3700 train 3.911326 (lr=4.3360e-04) (hash(x)=39521416)
449
+ 3710 train 3.806998 (lr=4.2835e-04) (hash(x)=41147425)
450
+ 3720 train 3.801924 (lr=4.2317e-04) (hash(x)=40204658)
451
+ 3730 train 3.891439 (lr=4.1806e-04) (hash(x)=39420983)
452
+ 3740 train 3.943177 (lr=4.1302e-04) (hash(x)=51155740)
453
+ 3750 train 3.929652 (lr=4.0804e-04) (hash(x)=41654187)
454
+ 3760 train 3.886012 (lr=4.0314e-04) (hash(x)=40598339)
455
+ 3770 train 3.849638 (lr=3.9832e-04) (hash(x)=44950116)
456
+ 3780 train 3.911427 (lr=3.9356e-04) (hash(x)=40660326)
457
+ 3790 train 3.959795 (lr=3.8888e-04) (hash(x)=37805851)
458
+ 3800 val loss 3.9103
459
+ 3800 val perplexity 49.9117
460
+ 3800 train 3.928065 (lr=3.8426e-04) (hash(x)=38064443)
461
+ 3810 train 3.999147 (lr=3.7973e-04) (hash(x)=39161498)
462
+ 3820 train 3.842401 (lr=3.7526e-04) (hash(x)=40626649)
463
+ 3830 train 3.916437 (lr=3.7087e-04) (hash(x)=36894771)
464
+ 3840 train 3.859153 (lr=3.6655e-04) (hash(x)=33672141)
465
+ 3850 train 3.819809 (lr=3.6231e-04) (hash(x)=41751813)
466
+ 3860 train 3.694380 (lr=3.5814e-04) (hash(x)=33784172)
467
+ 3870 train 3.623693 (lr=3.5405e-04) (hash(x)=43110493)
468
+ 3880 train 3.722750 (lr=3.5003e-04) (hash(x)=36585111)
469
+ 3890 train 3.729099 (lr=3.4609e-04) (hash(x)=40134264)
470
+ 3900 val loss 3.9173
471
+ 3900 val perplexity 50.2624
472
+ 3900 train 3.723496 (lr=3.4222e-04) (hash(x)=39815215)
473
+ 3910 train 3.859874 (lr=3.3843e-04) (hash(x)=42081557)
474
+ 3920 train 3.800117 (lr=3.3471e-04) (hash(x)=48184973)
475
+ 3930 train 3.993379 (lr=3.3108e-04) (hash(x)=42416681)
476
+ 3940 train 3.917407 (lr=3.2752e-04) (hash(x)=38587062)
477
+ 3950 train 3.912456 (lr=3.2403e-04) (hash(x)=37296617)
478
+ 3960 train 3.889674 (lr=3.2063e-04) (hash(x)=37959330)
479
+ 3970 train 3.919787 (lr=3.1730e-04) (hash(x)=35175634)
480
+ 3980 train 3.962559 (lr=3.1405e-04) (hash(x)=37219362)
481
+ 3990 train 3.983266 (lr=3.1087e-04) (hash(x)=38906132)
482
+ 4000 val loss 3.8963
483
+ 4000 val perplexity 49.2191
484
+ 4000 train 3.904757 (lr=3.0778e-04) (hash(x)=39940517)
485
+ 4010 train 3.920691 (lr=3.0476e-04) (hash(x)=39952437)
486
+ 4020 train 3.839117 (lr=3.0183e-04) (hash(x)=42000930)
487
+ 4030 train 3.849427 (lr=2.9897e-04) (hash(x)=41460672)
488
+ 4040 train 3.595020 (lr=2.9619e-04) (hash(x)=43702329)
489
+ 4050 train 3.688439 (lr=2.9349e-04) (hash(x)=41740121)
490
+ 4060 train 3.690479 (lr=2.9087e-04) (hash(x)=37066349)
491
+ 4070 train 3.531346 (lr=2.8833e-04) (hash(x)=38359869)
492
+ 4080 train 3.772682 (lr=2.8587e-04) (hash(x)=35131370)
493
+ 4090 train 3.833387 (lr=2.8350e-04) (hash(x)=39918811)
494
+ 4100 val loss 3.8960
495
+ 4100 val perplexity 49.2046
496
+ 4100 train 3.994807 (lr=2.8120e-04) (hash(x)=47036374)
497
+ 4110 train 3.919089 (lr=2.7898e-04) (hash(x)=37657523)
498
+ 4120 train 3.962015 (lr=2.7684e-04) (hash(x)=37218703)
499
+ 4130 train 3.976485 (lr=2.7479e-04) (hash(x)=41350513)
500
+ 4140 train 4.004144 (lr=2.7281e-04) (hash(x)=38283785)
501
+ 4150 train 3.978419 (lr=2.7092e-04) (hash(x)=32927892)
502
+ 4160 train 3.861234 (lr=2.6910e-04) (hash(x)=42011933)
503
+ 4170 train 3.882958 (lr=2.6737e-04) (hash(x)=41854594)
504
+ 4180 train 3.888269 (lr=2.6572e-04) (hash(x)=36737064)
505
+ 4190 train 3.801370 (lr=2.6415e-04) (hash(x)=36326176)
506
+ 4200 val loss 3.8875
507
+ 4200 val perplexity 48.7903
508
+ 4200 train 3.875155 (lr=2.6267e-04) (hash(x)=39106683)
509
+ 4210 train 3.921571 (lr=2.6126e-04) (hash(x)=39759977)
510
+ 4220 train 3.665363 (lr=2.5994e-04) (hash(x)=43666151)
511
+ 4230 train 3.617259 (lr=2.5870e-04) (hash(x)=38154367)
512
+ 4240 train 3.721552 (lr=2.5754e-04) (hash(x)=38284168)
513
+ 4250 train 3.658618 (lr=2.5647e-04) (hash(x)=38888879)
514
+ 4260 train 3.575215 (lr=2.5548e-04) (hash(x)=36846417)
515
+ 4270 train 3.980031 (lr=2.5457e-04) (hash(x)=43125375)
516
+ 4280 train 3.936495 (lr=2.5374e-04) (hash(x)=43556412)
517
+ 4290 train 3.870555 (lr=2.5299e-04) (hash(x)=48598302)
518
+ 4300 val loss 3.8826
519
+ 4300 val perplexity 48.5486
520
+ 4300 train 3.851266 (lr=2.5233e-04) (hash(x)=42764385)
521
+ 4310 train 3.906841 (lr=2.5175e-04) (hash(x)=41190028)
522
+ 4320 train 3.871940 (lr=2.5125e-04) (hash(x)=35767770)
523
+ 4330 train 3.952089 (lr=2.5084e-04) (hash(x)=36760476)
524
+ 4340 train 3.891926 (lr=2.5051e-04) (hash(x)=40618921)
525
+ 4350 train 3.991082 (lr=2.5026e-04) (hash(x)=32300164)
526
+ 4360 train 3.846099 (lr=2.5009e-04) (hash(x)=41400085)
527
+ 4370 train 3.883541 (lr=2.5001e-04) (hash(x)=40251828)
528
+ 4374 val loss 3.8825
529
+ 4374 val perplexity 48.5437
logs/fix_1_latent_mask/1_latent_mask_lr_25e-4_n_latent_masks_2_relu_seed_1340/model_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:07cf3954e02d8e6ef3f4dfe60358f5e29e43fbfbf932e3b0f3c44092490f03bc
3
+ size 97707314
logs/fix_1_latent_mask/1_latent_mask_lr_25e-4_n_latent_masks_2_relu_seed_1340/optimizer_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f66d06a45d60990c21cca3079a766330b36d66984a4fbfd94aefe3afd5e42a71
3
+ size 189136950