andrew-healey commited on
Commit
032d2d6
·
verified ·
1 Parent(s): 4cd311a

Upload folder using huggingface_hub

Browse files
two_latent_masks/two_latent_masks_seed_1340/args.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "two_latent_masks/two_latent_masks_seed_1340", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 12, "n_embd": 264, "head_dim": 22, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 4375, "warmup_steps": 250, "group": "two_latent_masks", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1340, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "n_latent_masks", "selection_head_linear_combo_scale": 1.0, "disable_selection_head_linear_combo_bias": false, "assert_latent_matches_no_head": false, "n_latent_masks": 2, "init_latent_masks_to_identity": true, "init_latent_masks_to_inverse": false, "latent_mask_scale": null, "latent_mask_runtime_multiplier": null, "latent_mask_sigmoid": false, "S_layernorm": false, "one_head_per_latent_mask": false, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": false, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 32, "total_batch_size": 131072, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.003, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "two_latent_masks", "latent_mask_precision": "float32"}
two_latent_masks/two_latent_masks_seed_1340/dataloader_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6031fd3e2855a036f7a5531cc24555aabd1115f9dd6618b8b2ca6f55279ef0b2
3
+ size 964
two_latent_masks/two_latent_masks_seed_1340/log2.txt ADDED
@@ -0,0 +1,548 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ max_steps: 4375
2
+ 0 val loss 11.2067
3
+ 0 val perplexity 73625.3828
4
+ 0 hella 0.2447
5
+ 0 train 11.205807 (lr=4.1958e-06) (hash(x)=45482580)
6
+ 10 train 9.849052 (lr=4.6154e-05) (hash(x)=38414730)
7
+ 20 train 9.344046 (lr=8.8112e-05) (hash(x)=39783917)
8
+ 30 train 8.604156 (lr=1.3007e-04) (hash(x)=38395733)
9
+ 40 train 7.903117 (lr=1.7203e-04) (hash(x)=37486707)
10
+ 50 train 7.629532 (lr=2.1399e-04) (hash(x)=39897505)
11
+ 60 train 7.543610 (lr=2.5594e-04) (hash(x)=40933473)
12
+ 70 train 7.539197 (lr=2.9790e-04) (hash(x)=37690746)
13
+ 80 train 7.416548 (lr=3.3986e-04) (hash(x)=38609656)
14
+ 90 train 7.193022 (lr=3.8182e-04) (hash(x)=38356571)
15
+ 100 val loss 7.0923
16
+ 100 val perplexity 1202.6874
17
+ 100 train 7.073114 (lr=4.2378e-04) (hash(x)=35980376)
18
+ 110 train 6.963567 (lr=4.6573e-04) (hash(x)=42011042)
19
+ 120 train 6.855889 (lr=5.0769e-04) (hash(x)=40266823)
20
+ 130 train 6.695091 (lr=5.4965e-04) (hash(x)=38645447)
21
+ 140 train 6.592414 (lr=5.9161e-04) (hash(x)=36059313)
22
+ 150 train 6.528383 (lr=6.3357e-04) (hash(x)=34161947)
23
+ 160 train 6.405905 (lr=6.7552e-04) (hash(x)=42263375)
24
+ 170 train 6.467422 (lr=7.1748e-04) (hash(x)=42957725)
25
+ 180 train 6.475669 (lr=7.5944e-04) (hash(x)=40198018)
26
+ 190 train 6.203243 (lr=8.0140e-04) (hash(x)=41666215)
27
+ 200 val loss 6.1980
28
+ 200 val perplexity 491.7875
29
+ 200 train 6.159960 (lr=8.4336e-04) (hash(x)=54060482)
30
+ 210 train 6.048736 (lr=8.8531e-04) (hash(x)=42332778)
31
+ 220 train 6.003103 (lr=9.2727e-04) (hash(x)=39551486)
32
+ 230 train 5.999578 (lr=9.6923e-04) (hash(x)=38462018)
33
+ 240 train 6.062742 (lr=1.0112e-03) (hash(x)=36591442)
34
+ 250 hella 0.2372
35
+ 250 train 6.003727 (lr=1.0531e-03) (hash(x)=40861237)
36
+ 260 train 5.943408 (lr=1.0951e-03) (hash(x)=41739369)
37
+ 270 train 5.807301 (lr=1.1371e-03) (hash(x)=39415360)
38
+ 280 train 5.795361 (lr=1.1790e-03) (hash(x)=44665934)
39
+ 290 train 5.726136 (lr=1.2210e-03) (hash(x)=37035579)
40
+ 300 val loss 5.7097
41
+ 300 val perplexity 301.7834
42
+ 300 train 5.730904 (lr=1.2629e-03) (hash(x)=38301011)
43
+ 310 train 5.566712 (lr=1.3049e-03) (hash(x)=42850980)
44
+ 320 train 5.647444 (lr=1.3469e-03) (hash(x)=37476222)
45
+ 330 train 5.561852 (lr=1.3888e-03) (hash(x)=53028205)
46
+ 340 train 5.568568 (lr=1.4308e-03) (hash(x)=41466008)
47
+ 350 train 5.487262 (lr=1.4727e-03) (hash(x)=37802865)
48
+ 360 train 5.435179 (lr=1.5147e-03) (hash(x)=42769282)
49
+ 370 train 5.453918 (lr=1.5566e-03) (hash(x)=39319256)
50
+ 380 train 5.439272 (lr=1.5986e-03) (hash(x)=42637402)
51
+ 390 train 5.399359 (lr=1.6406e-03) (hash(x)=31141514)
52
+ 400 val loss 5.3977
53
+ 400 val perplexity 220.9045
54
+ 400 train 5.471302 (lr=1.6825e-03) (hash(x)=38151157)
55
+ 410 train 5.410476 (lr=1.7245e-03) (hash(x)=46033439)
56
+ 420 train 5.386098 (lr=1.7664e-03) (hash(x)=41365246)
57
+ 430 train 5.356382 (lr=1.8084e-03) (hash(x)=42369184)
58
+ 440 train 5.357374 (lr=1.8503e-03) (hash(x)=42004840)
59
+ 450 train 5.275893 (lr=1.8923e-03) (hash(x)=37181172)
60
+ 460 train 5.379376 (lr=1.9343e-03) (hash(x)=31630797)
61
+ 470 train 5.220837 (lr=1.9762e-03) (hash(x)=42135747)
62
+ 480 train 5.142321 (lr=2.0182e-03) (hash(x)=30023651)
63
+ 490 train 5.226364 (lr=2.0601e-03) (hash(x)=40463476)
64
+ 500 val loss 5.1917
65
+ 500 val perplexity 179.7751
66
+ 500 hella 0.2385
67
+ 500 train 5.109916 (lr=2.1021e-03) (hash(x)=37089842)
68
+ 510 train 5.126831 (lr=2.1441e-03) (hash(x)=41095974)
69
+ 520 train 5.169706 (lr=2.1860e-03) (hash(x)=41525258)
70
+ 530 train 5.144300 (lr=2.2280e-03) (hash(x)=37697291)
71
+ 540 train 5.158866 (lr=2.2699e-03) (hash(x)=43459894)
72
+ 550 train 5.018561 (lr=2.3119e-03) (hash(x)=38993956)
73
+ 560 train 5.055124 (lr=2.3538e-03) (hash(x)=40677277)
74
+ 570 train 5.122100 (lr=2.3958e-03) (hash(x)=43103127)
75
+ 580 train 5.037695 (lr=2.4378e-03) (hash(x)=41871776)
76
+ 590 train 5.031014 (lr=2.4797e-03) (hash(x)=43230967)
77
+ 600 val loss 4.9571
78
+ 600 val perplexity 142.1804
79
+ 600 train 5.002603 (lr=2.5217e-03) (hash(x)=42226172)
80
+ 610 train 5.309992 (lr=2.5636e-03) (hash(x)=43007164)
81
+ 620 train 4.852357 (lr=2.6056e-03) (hash(x)=35752341)
82
+ 630 train 4.941896 (lr=2.6476e-03) (hash(x)=39453504)
83
+ 640 train 4.881917 (lr=2.6895e-03) (hash(x)=43635712)
84
+ 650 train 4.879248 (lr=2.7315e-03) (hash(x)=41852456)
85
+ 660 train 4.908008 (lr=2.7734e-03) (hash(x)=49125742)
86
+ 670 train 4.901972 (lr=2.8154e-03) (hash(x)=34886275)
87
+ 680 train 4.981446 (lr=2.8573e-03) (hash(x)=40546482)
88
+ 690 train 4.794238 (lr=2.8993e-03) (hash(x)=40850901)
89
+ 700 val loss 4.8023
90
+ 700 val perplexity 121.7876
91
+ 700 train 4.756218 (lr=2.9413e-03) (hash(x)=37426993)
92
+ 710 train 4.939689 (lr=2.9832e-03) (hash(x)=42679743)
93
+ 720 train 4.757261 (lr=3.0000e-03) (hash(x)=44856043)
94
+ 730 train 4.740185 (lr=2.9999e-03) (hash(x)=36457471)
95
+ 740 train 4.696213 (lr=2.9997e-03) (hash(x)=47203534)
96
+ 750 hella 0.2522
97
+ 750 train 4.725652 (lr=2.9994e-03) (hash(x)=35585657)
98
+ 760 train 4.634695 (lr=2.9990e-03) (hash(x)=37259264)
99
+ 770 train 4.547191 (lr=2.9985e-03) (hash(x)=44161997)
100
+ 780 train 4.497768 (lr=2.9979e-03) (hash(x)=41272886)
101
+ 790 train 4.502317 (lr=2.9972e-03) (hash(x)=43303662)
102
+ 800 val loss 4.6883
103
+ 800 val perplexity 108.6673
104
+ 800 train 4.400466 (lr=2.9964e-03) (hash(x)=39067231)
105
+ 810 train 4.647120 (lr=2.9955e-03) (hash(x)=44046732)
106
+ 820 train 4.627461 (lr=2.9945e-03) (hash(x)=39783962)
107
+ 830 train 4.679810 (lr=2.9934e-03) (hash(x)=40973974)
108
+ 840 train 4.665509 (lr=2.9922e-03) (hash(x)=41619968)
109
+ 850 train 4.712312 (lr=2.9909e-03) (hash(x)=36939960)
110
+ 860 train 4.583115 (lr=2.9896e-03) (hash(x)=42942377)
111
+ 870 train 4.594052 (lr=2.9881e-03) (hash(x)=39659455)
112
+ 880 train 4.615222 (lr=2.9865e-03) (hash(x)=40331986)
113
+ 890 train 4.522671 (lr=2.9848e-03) (hash(x)=38084814)
114
+ 900 val loss 4.5581
115
+ 900 val perplexity 95.4065
116
+ 900 train 4.547886 (lr=2.9830e-03) (hash(x)=30830367)
117
+ 910 train 4.462755 (lr=2.9811e-03) (hash(x)=39664356)
118
+ 920 train 4.475551 (lr=2.9792e-03) (hash(x)=39007775)
119
+ 930 train 4.492985 (lr=2.9771e-03) (hash(x)=39319254)
120
+ 940 train 4.548799 (lr=2.9749e-03) (hash(x)=36347051)
121
+ 950 train 4.492932 (lr=2.9726e-03) (hash(x)=39243577)
122
+ 960 train 4.522244 (lr=2.9703e-03) (hash(x)=45603839)
123
+ 970 train 4.329557 (lr=2.9678e-03) (hash(x)=40036075)
124
+ 980 train 4.372044 (lr=2.9652e-03) (hash(x)=43068524)
125
+ 990 train 4.356610 (lr=2.9626e-03) (hash(x)=33798472)
126
+ 1000 val loss 4.5266
127
+ 1000 val perplexity 92.4451
128
+ 1000 hella 0.2539
129
+ 1000 train 4.735182 (lr=2.9598e-03) (hash(x)=37728665)
130
+ 1010 train 4.597361 (lr=2.9570e-03) (hash(x)=43678636)
131
+ 1020 train 4.617698 (lr=2.9540e-03) (hash(x)=48464376)
132
+ 1030 train 4.583224 (lr=2.9510e-03) (hash(x)=43771461)
133
+ 1040 train 4.442571 (lr=2.9478e-03) (hash(x)=44643085)
134
+ 1050 train 4.522850 (lr=2.9446e-03) (hash(x)=39201716)
135
+ 1060 train 4.434003 (lr=2.9412e-03) (hash(x)=36396343)
136
+ 1070 train 4.424822 (lr=2.9378e-03) (hash(x)=37504111)
137
+ 1080 train 4.458571 (lr=2.9343e-03) (hash(x)=37454537)
138
+ 1090 train 4.461934 (lr=2.9307e-03) (hash(x)=36888183)
139
+ 1100 val loss 4.4503
140
+ 1100 val perplexity 85.6496
141
+ 1100 train 4.394948 (lr=2.9270e-03) (hash(x)=39549045)
142
+ 1110 train 4.588317 (lr=2.9231e-03) (hash(x)=56818420)
143
+ 1120 train 4.395922 (lr=2.9192e-03) (hash(x)=36573357)
144
+ 1130 train 4.337223 (lr=2.9152e-03) (hash(x)=38540878)
145
+ 1140 train 4.323796 (lr=2.9112e-03) (hash(x)=38645859)
146
+ 1150 train 4.535546 (lr=2.9070e-03) (hash(x)=41972503)
147
+ 1160 train 4.154067 (lr=2.9027e-03) (hash(x)=38151868)
148
+ 1170 train 4.187426 (lr=2.8983e-03) (hash(x)=42375436)
149
+ 1180 train 4.173421 (lr=2.8939e-03) (hash(x)=42868028)
150
+ 1190 train 4.417728 (lr=2.8893e-03) (hash(x)=41060868)
151
+ 1200 val loss 4.4152
152
+ 1200 val perplexity 82.6984
153
+ 1200 train 4.427809 (lr=2.8847e-03) (hash(x)=40852814)
154
+ 1210 train 4.409160 (lr=2.8800e-03) (hash(x)=43365554)
155
+ 1220 train 4.343733 (lr=2.8751e-03) (hash(x)=39121134)
156
+ 1230 train 4.432467 (lr=2.8702e-03) (hash(x)=40565426)
157
+ 1240 train 4.495364 (lr=2.8652e-03) (hash(x)=43378926)
158
+ 1250 hella 0.2536
159
+ 1250 train 4.358536 (lr=2.8601e-03) (hash(x)=35227381)
160
+ 1260 train 4.450266 (lr=2.8550e-03) (hash(x)=41267226)
161
+ 1270 train 4.361721 (lr=2.8497e-03) (hash(x)=31395565)
162
+ 1280 train 4.475723 (lr=2.8443e-03) (hash(x)=41470963)
163
+ 1290 train 4.279185 (lr=2.8389e-03) (hash(x)=40729366)
164
+ 1300 val loss 4.3495
165
+ 1300 val perplexity 77.4396
166
+ 1300 train 4.321046 (lr=2.8333e-03) (hash(x)=39916663)
167
+ 1310 train 4.373919 (lr=2.8277e-03) (hash(x)=42179962)
168
+ 1320 train 4.236485 (lr=2.8220e-03) (hash(x)=44991932)
169
+ 1330 train 4.204968 (lr=2.8162e-03) (hash(x)=38925911)
170
+ 1340 train 4.156964 (lr=2.8103e-03) (hash(x)=38565560)
171
+ 1350 train 4.202971 (lr=2.8044e-03) (hash(x)=43384902)
172
+ 1360 train 4.149682 (lr=2.7983e-03) (hash(x)=45422261)
173
+ 1370 train 4.135951 (lr=2.7922e-03) (hash(x)=40167325)
174
+ 1380 train 4.289577 (lr=2.7860e-03) (hash(x)=51258162)
175
+ 1390 train 4.352801 (lr=2.7797e-03) (hash(x)=43229106)
176
+ 1400 val loss 4.3085
177
+ 1400 val perplexity 74.3277
178
+ 1400 train 4.257587 (lr=2.7733e-03) (hash(x)=42537494)
179
+ 1410 train 4.556211 (lr=2.7668e-03) (hash(x)=51236490)
180
+ 1420 train 4.399123 (lr=2.7603e-03) (hash(x)=45243347)
181
+ 1430 train 4.245559 (lr=2.7536e-03) (hash(x)=37658566)
182
+ 1440 train 4.311000 (lr=2.7469e-03) (hash(x)=38856106)
183
+ 1450 train 4.376157 (lr=2.7401e-03) (hash(x)=36649488)
184
+ 1460 train 4.373933 (lr=2.7333e-03) (hash(x)=40445407)
185
+ 1470 train 4.301167 (lr=2.7263e-03) (hash(x)=39916387)
186
+ 1480 train 4.275552 (lr=2.7193e-03) (hash(x)=40934555)
187
+ 1490 train 4.238409 (lr=2.7121e-03) (hash(x)=42276527)
188
+ 1500 val loss 4.2899
189
+ 1500 val perplexity 72.9625
190
+ 1500 hella 0.2512
191
+ 1500 train 4.230471 (lr=2.7050e-03) (hash(x)=37973883)
192
+ 1510 train 4.289554 (lr=2.6977e-03) (hash(x)=38929286)
193
+ 1520 train 4.106341 (lr=2.6903e-03) (hash(x)=36855837)
194
+ 1530 train 4.357666 (lr=2.6829e-03) (hash(x)=42146431)
195
+ 1540 train 4.309226 (lr=2.6754e-03) (hash(x)=41898411)
196
+ 1550 train 4.212825 (lr=2.6678e-03) (hash(x)=44906272)
197
+ 1560 train 4.236341 (lr=2.6602e-03) (hash(x)=40414353)
198
+ 1570 train 4.268190 (lr=2.6525e-03) (hash(x)=38723190)
199
+ 1580 train 4.181475 (lr=2.6447e-03) (hash(x)=39065271)
200
+ 1590 train 4.178324 (lr=2.6368e-03) (hash(x)=40000886)
201
+ 1600 val loss 4.2630
202
+ 1600 val perplexity 71.0227
203
+ 1600 train 4.099240 (lr=2.6289e-03) (hash(x)=37898571)
204
+ 1610 train 4.202587 (lr=2.6208e-03) (hash(x)=40447863)
205
+ 1620 train 4.171416 (lr=2.6128e-03) (hash(x)=40119318)
206
+ 1630 train 4.107509 (lr=2.6046e-03) (hash(x)=40664074)
207
+ 1640 train 4.072978 (lr=2.5964e-03) (hash(x)=37404741)
208
+ 1650 train 4.227535 (lr=2.5881e-03) (hash(x)=39214928)
209
+ 1660 train 4.263394 (lr=2.5797e-03) (hash(x)=35005019)
210
+ 1670 train 4.312654 (lr=2.5713e-03) (hash(x)=37675832)
211
+ 1680 train 4.245783 (lr=2.5628e-03) (hash(x)=40489680)
212
+ 1690 train 4.164114 (lr=2.5542e-03) (hash(x)=45537879)
213
+ 1700 val loss 4.2283
214
+ 1700 val perplexity 68.5995
215
+ 1700 train 4.142451 (lr=2.5455e-03) (hash(x)=38866100)
216
+ 1710 train 4.211920 (lr=2.5368e-03) (hash(x)=44726254)
217
+ 1720 train 4.178302 (lr=2.5281e-03) (hash(x)=36917792)
218
+ 1730 train 4.170093 (lr=2.5192e-03) (hash(x)=42814805)
219
+ 1740 train 4.249225 (lr=2.5103e-03) (hash(x)=51812216)
220
+ 1750 hella 0.2538
221
+ 1750 train 4.113585 (lr=2.5014e-03) (hash(x)=33549014)
222
+ 1760 train 4.132693 (lr=2.4924e-03) (hash(x)=40354215)
223
+ 1770 train 4.152946 (lr=2.4833e-03) (hash(x)=40718606)
224
+ 1780 train 4.080906 (lr=2.4741e-03) (hash(x)=38274164)
225
+ 1790 train 4.258163 (lr=2.4649e-03) (hash(x)=42531471)
226
+ 1800 val loss 4.2085
227
+ 1800 val perplexity 67.2572
228
+ 1800 train 4.186649 (lr=2.4556e-03) (hash(x)=35616519)
229
+ 1810 train 4.239477 (lr=2.4463e-03) (hash(x)=33803118)
230
+ 1820 train 4.245177 (lr=2.4369e-03) (hash(x)=36973525)
231
+ 1830 train 4.186145 (lr=2.4275e-03) (hash(x)=38162549)
232
+ 1840 train 4.152010 (lr=2.4180e-03) (hash(x)=33937159)
233
+ 1850 train 4.259073 (lr=2.4084e-03) (hash(x)=39887546)
234
+ 1860 train 4.099063 (lr=2.3988e-03) (hash(x)=37818525)
235
+ 1870 train 4.145759 (lr=2.3891e-03) (hash(x)=37250478)
236
+ 1880 train 4.039742 (lr=2.3794e-03) (hash(x)=36412167)
237
+ 1890 train 4.150132 (lr=2.3696e-03) (hash(x)=40261189)
238
+ 1900 val loss 4.2077
239
+ 1900 val perplexity 67.1990
240
+ 1900 train 4.104714 (lr=2.3598e-03) (hash(x)=38654303)
241
+ 1910 train 4.085256 (lr=2.3499e-03) (hash(x)=37929515)
242
+ 1920 train 4.256851 (lr=2.3400e-03) (hash(x)=45580146)
243
+ 1930 train 4.195326 (lr=2.3300e-03) (hash(x)=32386330)
244
+ 1940 train 4.236884 (lr=2.3200e-03) (hash(x)=36331864)
245
+ 1950 train 4.215518 (lr=2.3099e-03) (hash(x)=41181727)
246
+ 1960 train 4.047160 (lr=2.2998e-03) (hash(x)=42705152)
247
+ 1970 train 4.156836 (lr=2.2896e-03) (hash(x)=40251511)
248
+ 1980 train 4.076596 (lr=2.2793e-03) (hash(x)=37525551)
249
+ 1990 train 4.146008 (lr=2.2691e-03) (hash(x)=41849618)
250
+ 2000 val loss 4.1698
251
+ 2000 val perplexity 64.7027
252
+ 2000 hella 0.2568
253
+ 2000 train 4.148685 (lr=2.2588e-03) (hash(x)=37642582)
254
+ 2010 train 4.151536 (lr=2.2484e-03) (hash(x)=42131121)
255
+ 2020 train 4.031137 (lr=2.2380e-03) (hash(x)=39000209)
256
+ 2030 train 4.123723 (lr=2.2275e-03) (hash(x)=43641355)
257
+ 2040 train 4.017178 (lr=2.2170e-03) (hash(x)=47910507)
258
+ 2050 train 4.016842 (lr=2.2065e-03) (hash(x)=36670359)
259
+ 2060 train 4.209729 (lr=2.1959e-03) (hash(x)=36477755)
260
+ 2070 train 4.299826 (lr=2.1853e-03) (hash(x)=43262487)
261
+ 2080 train 4.126570 (lr=2.1746e-03) (hash(x)=49546029)
262
+ 2090 train 4.244806 (lr=2.1639e-03) (hash(x)=39924731)
263
+ 2100 val loss 4.1605
264
+ 2100 val perplexity 64.1005
265
+ 2100 train 4.115921 (lr=2.1532e-03) (hash(x)=39921304)
266
+ 2110 train 4.059774 (lr=2.1424e-03) (hash(x)=38152788)
267
+ 2120 train 4.114033 (lr=2.1316e-03) (hash(x)=37977911)
268
+ 2130 train 4.075366 (lr=2.1208e-03) (hash(x)=34748760)
269
+ 2140 train 4.126271 (lr=2.1099e-03) (hash(x)=40509369)
270
+ 2150 train 4.100454 (lr=2.0990e-03) (hash(x)=37654262)
271
+ 2160 train 4.050358 (lr=2.0881e-03) (hash(x)=38139543)
272
+ 2170 train 3.952373 (lr=2.0771e-03) (hash(x)=42501806)
273
+ 2180 train 4.113502 (lr=2.0661e-03) (hash(x)=40085092)
274
+ 2190 train 4.053726 (lr=2.0550e-03) (hash(x)=50480193)
275
+ 2200 val loss 4.1400
276
+ 2200 val perplexity 62.8044
277
+ 2200 train 4.143538 (lr=2.0440e-03) (hash(x)=40604084)
278
+ 2210 train 4.249660 (lr=2.0329e-03) (hash(x)=41555823)
279
+ 2220 train 4.059656 (lr=2.0217e-03) (hash(x)=50441765)
280
+ 2230 train 4.167061 (lr=2.0106e-03) (hash(x)=39796580)
281
+ 2240 train 4.194585 (lr=1.9994e-03) (hash(x)=44127022)
282
+ 2250 hella 0.2571
283
+ 2250 train 4.095289 (lr=1.9882e-03) (hash(x)=37026826)
284
+ 2260 train 4.131207 (lr=1.9770e-03) (hash(x)=42133839)
285
+ 2270 train 4.091559 (lr=1.9657e-03) (hash(x)=38500664)
286
+ 2280 train 4.077365 (lr=1.9544e-03) (hash(x)=40538661)
287
+ 2290 train 4.083925 (lr=1.9431e-03) (hash(x)=51509210)
288
+ 2300 val loss 4.1168
289
+ 2300 val perplexity 61.3622
290
+ 2300 train 4.089623 (lr=1.9318e-03) (hash(x)=41952328)
291
+ 2310 train 4.013175 (lr=1.9205e-03) (hash(x)=39758123)
292
+ 2320 train 4.132835 (lr=1.9091e-03) (hash(x)=51089268)
293
+ 2330 train 4.188250 (lr=1.8977e-03) (hash(x)=39767618)
294
+ 2340 train 4.236145 (lr=1.8863e-03) (hash(x)=40409617)
295
+ 2350 train 4.067610 (lr=1.8749e-03) (hash(x)=40349634)
296
+ 2360 train 4.341906 (lr=1.8635e-03) (hash(x)=31841172)
297
+ 2370 train 4.134640 (lr=1.8520e-03) (hash(x)=42720539)
298
+ 2380 train 4.000613 (lr=1.8406e-03) (hash(x)=40998632)
299
+ 2390 train 4.105022 (lr=1.8291e-03) (hash(x)=40615413)
300
+ 2400 val loss 4.0989
301
+ 2400 val perplexity 60.2713
302
+ 2400 train 4.019584 (lr=1.8176e-03) (hash(x)=39373658)
303
+ 2410 train 4.063104 (lr=1.8061e-03) (hash(x)=35480858)
304
+ 2420 train 4.052644 (lr=1.7946e-03) (hash(x)=32241095)
305
+ 2430 train 4.038379 (lr=1.7830e-03) (hash(x)=36669715)
306
+ 2440 train 4.043463 (lr=1.7715e-03) (hash(x)=45768335)
307
+ 2450 train 3.951196 (lr=1.7600e-03) (hash(x)=39395055)
308
+ 2460 train 3.975086 (lr=1.7484e-03) (hash(x)=34899269)
309
+ 2470 train 4.165730 (lr=1.7368e-03) (hash(x)=43642420)
310
+ 2480 train 4.269686 (lr=1.7253e-03) (hash(x)=47099507)
311
+ 2490 train 4.145368 (lr=1.7137e-03) (hash(x)=38825558)
312
+ 2500 val loss 4.0867
313
+ 2500 val perplexity 59.5417
314
+ 2500 hella 0.2525
315
+ 2500 train 4.119425 (lr=1.7021e-03) (hash(x)=39833804)
316
+ 2510 train 4.216507 (lr=1.6906e-03) (hash(x)=35051654)
317
+ 2520 train 4.110330 (lr=1.6790e-03) (hash(x)=40291109)
318
+ 2530 train 4.024299 (lr=1.6674e-03) (hash(x)=36915768)
319
+ 2540 train 4.003747 (lr=1.6558e-03) (hash(x)=37016308)
320
+ 2550 train 3.963031 (lr=1.6442e-03) (hash(x)=43221777)
321
+ 2560 train 4.013532 (lr=1.6326e-03) (hash(x)=37233207)
322
+ 2570 train 3.913771 (lr=1.6210e-03) (hash(x)=37510626)
323
+ 2580 train 3.966712 (lr=1.6094e-03) (hash(x)=39624656)
324
+ 2590 train 3.820452 (lr=1.5979e-03) (hash(x)=37647501)
325
+ 2600 val loss 4.0759
326
+ 2600 val perplexity 58.9057
327
+ 2600 train 3.955671 (lr=1.5863e-03) (hash(x)=37200138)
328
+ 2610 train 3.963947 (lr=1.5747e-03) (hash(x)=43066970)
329
+ 2620 train 4.103914 (lr=1.5632e-03) (hash(x)=40503799)
330
+ 2630 train 4.041849 (lr=1.5516e-03) (hash(x)=45534660)
331
+ 2640 train 4.259575 (lr=1.5400e-03) (hash(x)=52358353)
332
+ 2650 train 4.040175 (lr=1.5285e-03) (hash(x)=41843599)
333
+ 2660 train 4.020873 (lr=1.5170e-03) (hash(x)=40519770)
334
+ 2670 train 4.106056 (lr=1.5054e-03) (hash(x)=37185602)
335
+ 2680 train 3.963651 (lr=1.4939e-03) (hash(x)=43805719)
336
+ 2690 train 3.978496 (lr=1.4824e-03) (hash(x)=39458250)
337
+ 2700 val loss 4.0524
338
+ 2700 val perplexity 57.5340
339
+ 2700 train 4.076158 (lr=1.4709e-03) (hash(x)=39149255)
340
+ 2710 train 3.920742 (lr=1.4594e-03) (hash(x)=43544861)
341
+ 2720 train 4.023382 (lr=1.4480e-03) (hash(x)=36858197)
342
+ 2730 train 3.941786 (lr=1.4365e-03) (hash(x)=37971712)
343
+ 2740 train 3.968019 (lr=1.4251e-03) (hash(x)=42416578)
344
+ 2750 hella 0.2579
345
+ 2750 train 3.912464 (lr=1.4137e-03) (hash(x)=53554622)
346
+ 2760 train 4.049701 (lr=1.4023e-03) (hash(x)=40502545)
347
+ 2770 train 4.019390 (lr=1.3909e-03) (hash(x)=36086417)
348
+ 2780 train 4.041177 (lr=1.3795e-03) (hash(x)=36242982)
349
+ 2790 train 4.114118 (lr=1.3682e-03) (hash(x)=41433780)
350
+ 2800 val loss 4.0339
351
+ 2800 val perplexity 56.4782
352
+ 2800 train 3.989080 (lr=1.3569e-03) (hash(x)=48553484)
353
+ 2810 train 3.928134 (lr=1.3456e-03) (hash(x)=39928650)
354
+ 2820 train 4.034158 (lr=1.3343e-03) (hash(x)=37891724)
355
+ 2830 train 4.012804 (lr=1.3230e-03) (hash(x)=34353412)
356
+ 2840 train 4.030173 (lr=1.3118e-03) (hash(x)=38486611)
357
+ 2850 train 3.951614 (lr=1.3006e-03) (hash(x)=32706934)
358
+ 2860 train 3.902723 (lr=1.2894e-03) (hash(x)=38693681)
359
+ 2870 train 3.963256 (lr=1.2783e-03) (hash(x)=38430800)
360
+ 2880 train 3.941194 (lr=1.2671e-03) (hash(x)=40619559)
361
+ 2890 train 3.915377 (lr=1.2560e-03) (hash(x)=42741066)
362
+ 2900 val loss 4.0370
363
+ 2900 val perplexity 56.6552
364
+ 2900 train 4.174275 (lr=1.2450e-03) (hash(x)=38216091)
365
+ 2910 train 3.969954 (lr=1.2339e-03) (hash(x)=35447832)
366
+ 2920 train 4.082445 (lr=1.2229e-03) (hash(x)=39825190)
367
+ 2930 train 4.095571 (lr=1.2119e-03) (hash(x)=37789121)
368
+ 2940 train 3.974077 (lr=1.2010e-03) (hash(x)=44499116)
369
+ 2950 train 4.057188 (lr=1.1901e-03) (hash(x)=39669860)
370
+ 2960 train 3.997952 (lr=1.1792e-03) (hash(x)=51683741)
371
+ 2970 train 3.859407 (lr=1.1684e-03) (hash(x)=36399721)
372
+ 2980 train 3.957276 (lr=1.1576e-03) (hash(x)=42629700)
373
+ 2990 train 3.848324 (lr=1.1468e-03) (hash(x)=39263773)
374
+ 3000 val loss 4.0096
375
+ 3000 val perplexity 55.1230
376
+ 3000 hella 0.2558
377
+ 3000 train 3.886480 (lr=1.1361e-03) (hash(x)=37920485)
378
+ 3010 train 3.931975 (lr=1.1254e-03) (hash(x)=40835161)
379
+ 3020 train 3.903965 (lr=1.1147e-03) (hash(x)=38381159)
380
+ 3030 train 3.894137 (lr=1.1041e-03) (hash(x)=35846270)
381
+ 3040 train 4.284738 (lr=1.0935e-03) (hash(x)=47516567)
382
+ 3050 train 4.121820 (lr=1.0830e-03) (hash(x)=35600311)
383
+ 3060 train 4.153137 (lr=1.0725e-03) (hash(x)=37775318)
384
+ 3070 train 4.010718 (lr=1.0620e-03) (hash(x)=39881333)
385
+ 3080 train 4.014673 (lr=1.0516e-03) (hash(x)=37310168)
386
+ 3090 train 3.940110 (lr=1.0412e-03) (hash(x)=43669978)
387
+ 3100 val loss 4.0008
388
+ 3100 val perplexity 54.6412
389
+ 3100 train 4.401735 (lr=1.0309e-03) (hash(x)=46356797)
390
+ 3110 train 4.038669 (lr=1.0207e-03) (hash(x)=40814232)
391
+ 3120 train 3.881634 (lr=1.0104e-03) (hash(x)=42504837)
392
+ 3130 train 4.021580 (lr=1.0002e-03) (hash(x)=39693594)
393
+ 3140 train 3.920084 (lr=9.9011e-04) (hash(x)=40277645)
394
+ 3150 train 3.934187 (lr=9.8002e-04) (hash(x)=40988003)
395
+ 3160 train 4.049877 (lr=9.6999e-04) (hash(x)=38875266)
396
+ 3170 train 4.002867 (lr=9.6000e-04) (hash(x)=45232173)
397
+ 3180 train 4.056527 (lr=9.5007e-04) (hash(x)=39213336)
398
+ 3190 train 4.114961 (lr=9.4019e-04) (hash(x)=42118576)
399
+ 3200 val loss 3.9777
400
+ 3200 val perplexity 53.3921
401
+ 3200 train 3.970934 (lr=9.3036e-04) (hash(x)=32884223)
402
+ 3210 train 4.021145 (lr=9.2058e-04) (hash(x)=41276800)
403
+ 3220 train 4.064268 (lr=9.1085e-04) (hash(x)=40284461)
404
+ 3230 train 4.047451 (lr=9.0118e-04) (hash(x)=40566734)
405
+ 3240 train 4.033525 (lr=8.9157e-04) (hash(x)=36484570)
406
+ 3250 hella 0.2545
407
+ 3250 train 3.996436 (lr=8.8201e-04) (hash(x)=41642338)
408
+ 3260 train 3.961580 (lr=8.7251e-04) (hash(x)=43883570)
409
+ 3270 train 3.902934 (lr=8.6307e-04) (hash(x)=40432560)
410
+ 3280 train 3.869506 (lr=8.5368e-04) (hash(x)=38002717)
411
+ 3290 train 3.816622 (lr=8.4435e-04) (hash(x)=41926004)
412
+ 3300 val loss 3.9714
413
+ 3300 val perplexity 53.0584
414
+ 3300 train 3.977376 (lr=8.3508e-04) (hash(x)=46015509)
415
+ 3310 train 4.006918 (lr=8.2588e-04) (hash(x)=40112249)
416
+ 3320 train 3.985246 (lr=8.1673e-04) (hash(x)=49162296)
417
+ 3330 train 4.043537 (lr=8.0764e-04) (hash(x)=46606969)
418
+ 3340 train 4.059159 (lr=7.9862e-04) (hash(x)=31128992)
419
+ 3350 train 4.036598 (lr=7.8966e-04) (hash(x)=41232534)
420
+ 3360 train 3.882490 (lr=7.8076e-04) (hash(x)=41599699)
421
+ 3370 train 4.050071 (lr=7.7192e-04) (hash(x)=40885280)
422
+ 3380 train 3.968867 (lr=7.6315e-04) (hash(x)=37169148)
423
+ 3390 train 3.871869 (lr=7.5445e-04) (hash(x)=46581889)
424
+ 3400 val loss 3.9572
425
+ 3400 val perplexity 52.3120
426
+ 3400 train 3.919286 (lr=7.4581e-04) (hash(x)=37612074)
427
+ 3410 train 3.991211 (lr=7.3724e-04) (hash(x)=38763316)
428
+ 3420 train 3.817677 (lr=7.2874e-04) (hash(x)=40562379)
429
+ 3430 train 3.920938 (lr=7.2030e-04) (hash(x)=44724867)
430
+ 3440 train 4.124046 (lr=7.1193e-04) (hash(x)=39386624)
431
+ 3450 train 4.073284 (lr=7.0363e-04) (hash(x)=38652923)
432
+ 3460 train 4.054944 (lr=6.9541e-04) (hash(x)=29587379)
433
+ 3470 train 3.960610 (lr=6.8725e-04) (hash(x)=38228776)
434
+ 3480 train 3.934063 (lr=6.7916e-04) (hash(x)=30076039)
435
+ 3490 train 3.996801 (lr=6.7114e-04) (hash(x)=38398908)
436
+ 3500 val loss 3.9444
437
+ 3500 val perplexity 51.6431
438
+ 3500 hella 0.2557
439
+ 3500 train 4.008248 (lr=6.6320e-04) (hash(x)=39259918)
440
+ 3510 train 3.905181 (lr=6.5533e-04) (hash(x)=40664091)
441
+ 3520 train 3.944775 (lr=6.4753e-04) (hash(x)=40309647)
442
+ 3530 train 3.907890 (lr=6.3981e-04) (hash(x)=38424801)
443
+ 3540 train 3.831909 (lr=6.3216e-04) (hash(x)=51322307)
444
+ 3550 train 3.894713 (lr=6.2458e-04) (hash(x)=38192628)
445
+ 3560 train 3.949013 (lr=6.1708e-04) (hash(x)=41563952)
446
+ 3570 train 3.897491 (lr=6.0966e-04) (hash(x)=39508843)
447
+ 3580 train 3.970937 (lr=6.0231e-04) (hash(x)=41260225)
448
+ 3590 train 3.954254 (lr=5.9504e-04) (hash(x)=49098107)
449
+ 3600 val loss 3.9337
450
+ 3600 val perplexity 51.0952
451
+ 3600 train 3.934903 (lr=5.8785e-04) (hash(x)=41194370)
452
+ 3610 train 4.050055 (lr=5.8074e-04) (hash(x)=40399152)
453
+ 3620 train 4.013094 (lr=5.7370e-04) (hash(x)=41625018)
454
+ 3630 train 4.040465 (lr=5.6675e-04) (hash(x)=44077942)
455
+ 3640 train 4.035549 (lr=5.5987e-04) (hash(x)=42427512)
456
+ 3650 train 4.016650 (lr=5.5308e-04) (hash(x)=40067454)
457
+ 3660 train 3.943001 (lr=5.4636e-04) (hash(x)=37969892)
458
+ 3670 train 3.915255 (lr=5.3973e-04) (hash(x)=43670246)
459
+ 3680 train 3.992107 (lr=5.3318e-04) (hash(x)=37031732)
460
+ 3690 train 3.872420 (lr=5.2671e-04) (hash(x)=39237187)
461
+ 3700 val loss 3.9263
462
+ 3700 val perplexity 50.7199
463
+ 3700 train 3.912562 (lr=5.2033e-04) (hash(x)=39521416)
464
+ 3710 train 3.806918 (lr=5.1402e-04) (hash(x)=41147425)
465
+ 3720 train 3.799218 (lr=5.0780e-04) (hash(x)=40204658)
466
+ 3730 train 3.893710 (lr=5.0167e-04) (hash(x)=39420983)
467
+ 3740 train 3.938824 (lr=4.9562e-04) (hash(x)=51155740)
468
+ 3750 hella 0.2556
469
+ 3750 train 3.929397 (lr=4.8965e-04) (hash(x)=41654187)
470
+ 3760 train 3.885113 (lr=4.8377e-04) (hash(x)=40598339)
471
+ 3770 train 3.848436 (lr=4.7798e-04) (hash(x)=44950116)
472
+ 3780 train 3.910052 (lr=4.7227e-04) (hash(x)=40660326)
473
+ 3790 train 3.959883 (lr=4.6665e-04) (hash(x)=37805851)
474
+ 3800 val loss 3.9093
475
+ 3800 val perplexity 49.8658
476
+ 3800 train 3.927743 (lr=4.6112e-04) (hash(x)=38064443)
477
+ 3810 train 3.996975 (lr=4.5567e-04) (hash(x)=39161498)
478
+ 3820 train 3.844104 (lr=4.5031e-04) (hash(x)=40626649)
479
+ 3830 train 3.917085 (lr=4.4504e-04) (hash(x)=36894771)
480
+ 3840 train 3.859523 (lr=4.3986e-04) (hash(x)=33672141)
481
+ 3850 train 3.816703 (lr=4.3477e-04) (hash(x)=41751813)
482
+ 3860 train 3.693633 (lr=4.2977e-04) (hash(x)=33784172)
483
+ 3870 train 3.619274 (lr=4.2486e-04) (hash(x)=43110493)
484
+ 3880 train 3.723425 (lr=4.2004e-04) (hash(x)=36585111)
485
+ 3890 train 3.726855 (lr=4.1530e-04) (hash(x)=40134264)
486
+ 3900 val loss 3.9166
487
+ 3900 val perplexity 50.2301
488
+ 3900 train 3.724312 (lr=4.1066e-04) (hash(x)=39815215)
489
+ 3910 train 3.859156 (lr=4.0611e-04) (hash(x)=42081557)
490
+ 3920 train 3.800432 (lr=4.0166e-04) (hash(x)=48184973)
491
+ 3930 train 3.992491 (lr=3.9729e-04) (hash(x)=42416681)
492
+ 3940 train 3.913870 (lr=3.9302e-04) (hash(x)=38587062)
493
+ 3950 train 3.912656 (lr=3.8884e-04) (hash(x)=37296617)
494
+ 3960 train 3.887910 (lr=3.8475e-04) (hash(x)=37959330)
495
+ 3970 train 3.913980 (lr=3.8076e-04) (hash(x)=35175634)
496
+ 3980 train 3.962816 (lr=3.7685e-04) (hash(x)=37219362)
497
+ 3990 train 3.987650 (lr=3.7305e-04) (hash(x)=38906132)
498
+ 4000 val loss 3.8945
499
+ 4000 val perplexity 49.1293
500
+ 4000 hella 0.2543
501
+ 4000 train 3.906464 (lr=3.6933e-04) (hash(x)=39940517)
502
+ 4010 train 3.919153 (lr=3.6572e-04) (hash(x)=39952437)
503
+ 4020 train 3.842512 (lr=3.6219e-04) (hash(x)=42000930)
504
+ 4030 train 3.851882 (lr=3.5876e-04) (hash(x)=41460672)
505
+ 4040 train 3.594108 (lr=3.5543e-04) (hash(x)=43702329)
506
+ 4050 train 3.692787 (lr=3.5219e-04) (hash(x)=41740121)
507
+ 4060 train 3.685658 (lr=3.4905e-04) (hash(x)=37066349)
508
+ 4070 train 3.530720 (lr=3.4600e-04) (hash(x)=38359869)
509
+ 4080 train 3.775069 (lr=3.4305e-04) (hash(x)=35131370)
510
+ 4090 train 3.835430 (lr=3.4019e-04) (hash(x)=39918811)
511
+ 4100 val loss 3.8937
512
+ 4100 val perplexity 49.0930
513
+ 4100 train 3.992283 (lr=3.3744e-04) (hash(x)=47036374)
514
+ 4110 train 3.916094 (lr=3.3477e-04) (hash(x)=37657523)
515
+ 4120 train 3.960736 (lr=3.3221e-04) (hash(x)=37218703)
516
+ 4130 train 3.978606 (lr=3.2974e-04) (hash(x)=41350513)
517
+ 4140 train 3.997961 (lr=3.2737e-04) (hash(x)=38283785)
518
+ 4150 train 3.979468 (lr=3.2510e-04) (hash(x)=32927892)
519
+ 4160 train 3.858462 (lr=3.2292e-04) (hash(x)=42011933)
520
+ 4170 train 3.882576 (lr=3.2085e-04) (hash(x)=41854594)
521
+ 4180 train 3.885392 (lr=3.1887e-04) (hash(x)=36737064)
522
+ 4190 train 3.800594 (lr=3.1699e-04) (hash(x)=36326176)
523
+ 4200 val loss 3.8863
524
+ 4200 val perplexity 48.7305
525
+ 4200 train 3.875670 (lr=3.1520e-04) (hash(x)=39106683)
526
+ 4210 train 3.922618 (lr=3.1352e-04) (hash(x)=39759977)
527
+ 4220 train 3.666094 (lr=3.1193e-04) (hash(x)=43666151)
528
+ 4230 train 3.612448 (lr=3.1044e-04) (hash(x)=38154367)
529
+ 4240 train 3.720786 (lr=3.0905e-04) (hash(x)=38284168)
530
+ 4250 hella 0.2542
531
+ 4250 train 3.661386 (lr=3.0776e-04) (hash(x)=38888879)
532
+ 4260 train 3.572495 (lr=3.0657e-04) (hash(x)=36846417)
533
+ 4270 train 3.975729 (lr=3.0548e-04) (hash(x)=43125375)
534
+ 4280 train 3.936632 (lr=3.0449e-04) (hash(x)=43556412)
535
+ 4290 train 3.864242 (lr=3.0359e-04) (hash(x)=48598302)
536
+ 4300 val loss 3.8809
537
+ 4300 val perplexity 48.4674
538
+ 4300 train 3.848815 (lr=3.0280e-04) (hash(x)=42764385)
539
+ 4310 train 3.903479 (lr=3.0210e-04) (hash(x)=41190028)
540
+ 4320 train 3.869745 (lr=3.0150e-04) (hash(x)=35767770)
541
+ 4330 train 3.948255 (lr=3.0101e-04) (hash(x)=36760476)
542
+ 4340 train 3.889231 (lr=3.0061e-04) (hash(x)=40618921)
543
+ 4350 train 3.988540 (lr=3.0031e-04) (hash(x)=32300164)
544
+ 4360 train 3.843482 (lr=3.0011e-04) (hash(x)=41400085)
545
+ 4370 train 3.881542 (lr=3.0001e-04) (hash(x)=40251828)
546
+ 4374 val loss 3.8807
547
+ 4374 val perplexity 48.4571
548
+ 4374 hella 0.2538
two_latent_masks/two_latent_masks_seed_1340/model_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1554e2a1e5d463d54009cd375389e98feb347403abbce57d70ce8c96ce94ba95
3
+ size 97707506
two_latent_masks/two_latent_masks_seed_1340/optimizer_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:15d1a5a2c415680bba6276e2fccd6c90f9063a6bbb4d8544afb91d263268be62
3
+ size 189136950