andrew-healey commited on
Commit
eac01f3
·
verified ·
1 Parent(s): 33cc5eb

Upload folder using huggingface_hub

Browse files
lr6e-4_total_batch_size61440_baseline_seed1339/args.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_11/lr6e-4_total_batch_size61440_baseline_seed1339", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_11", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1339, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.0006, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "6e-4_61440", "n_embd": 256}
lr6e-4_total_batch_size61440_baseline_seed1339/dataloader_08749.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:953385078aa3787b69fc6857dfd48b0a2cd2f4d27c6f8892e01211aca53d07f5
3
+ size 964
lr6e-4_total_batch_size61440_baseline_seed1339/log2.txt ADDED
@@ -0,0 +1,1054 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ max_steps: 8750
2
+ 0 val loss 11.2592
3
+ 0 val perplexity 77590.4141
4
+ 0 train 11.272774 (lr=1.2000e-06) (hash(x)=150724848)
5
+ 10 train 10.348531 (lr=1.3200e-05) (hash(x)=149663361)
6
+ 20 train 9.881202 (lr=2.5200e-05) (hash(x)=153483789)
7
+ 30 train 9.705391 (lr=3.7200e-05) (hash(x)=145733176)
8
+ 40 train 9.268219 (lr=4.9200e-05) (hash(x)=145287265)
9
+ 50 train 8.903151 (lr=6.1200e-05) (hash(x)=149217061)
10
+ 60 train 8.471861 (lr=7.3200e-05) (hash(x)=150157569)
11
+ 70 train 8.082783 (lr=8.5200e-05) (hash(x)=140292841)
12
+ 80 train 7.880149 (lr=9.7200e-05) (hash(x)=152157598)
13
+ 90 train 7.629221 (lr=1.0920e-04) (hash(x)=149756968)
14
+ 100 val loss 7.5905
15
+ 100 val perplexity 1979.2290
16
+ 100 train 7.560264 (lr=1.2120e-04) (hash(x)=149910534)
17
+ 110 train 7.479685 (lr=1.3320e-04) (hash(x)=159020328)
18
+ 120 train 7.438631 (lr=1.4520e-04) (hash(x)=150731273)
19
+ 130 train 7.267333 (lr=1.5720e-04) (hash(x)=156727355)
20
+ 140 train 7.303560 (lr=1.6920e-04) (hash(x)=166473281)
21
+ 150 train 6.991832 (lr=1.8120e-04) (hash(x)=142185643)
22
+ 160 train 6.989021 (lr=1.9320e-04) (hash(x)=151371145)
23
+ 170 train 7.010696 (lr=2.0520e-04) (hash(x)=151601659)
24
+ 180 train 6.841847 (lr=2.1720e-04) (hash(x)=153368917)
25
+ 190 train 6.697760 (lr=2.2920e-04) (hash(x)=145008196)
26
+ 200 val loss 6.8087
27
+ 200 val perplexity 905.6707
28
+ 200 train 6.711312 (lr=2.4120e-04) (hash(x)=148123706)
29
+ 210 train 6.587342 (lr=2.5320e-04) (hash(x)=145714762)
30
+ 220 train 6.576862 (lr=2.6520e-04) (hash(x)=150622583)
31
+ 230 train 6.375014 (lr=2.7720e-04) (hash(x)=146512648)
32
+ 240 train 6.406185 (lr=2.8920e-04) (hash(x)=151535384)
33
+ 250 train 6.314778 (lr=3.0120e-04) (hash(x)=149952383)
34
+ 260 train 6.231345 (lr=3.1320e-04) (hash(x)=148726088)
35
+ 270 train 6.193257 (lr=3.2520e-04) (hash(x)=152776695)
36
+ 280 train 6.235998 (lr=3.3720e-04) (hash(x)=157260375)
37
+ 290 train 6.272188 (lr=3.4920e-04) (hash(x)=151559106)
38
+ 300 val loss 6.3377
39
+ 300 val perplexity 565.4866
40
+ 300 train 6.081059 (lr=3.6120e-04) (hash(x)=146678221)
41
+ 310 train 6.176166 (lr=3.7320e-04) (hash(x)=156713527)
42
+ 320 train 6.220588 (lr=3.8520e-04) (hash(x)=144947971)
43
+ 330 train 6.391332 (lr=3.9720e-04) (hash(x)=158684944)
44
+ 340 train 6.256314 (lr=4.0920e-04) (hash(x)=162318756)
45
+ 350 train 6.058861 (lr=4.2120e-04) (hash(x)=156180736)
46
+ 360 train 6.249966 (lr=4.3320e-04) (hash(x)=178373797)
47
+ 370 train 6.135502 (lr=4.4520e-04) (hash(x)=154389449)
48
+ 380 train 5.965200 (lr=4.5720e-04) (hash(x)=150369080)
49
+ 390 train 6.098134 (lr=4.6920e-04) (hash(x)=159840654)
50
+ 400 val loss 5.9844
51
+ 400 val perplexity 397.2034
52
+ 400 train 5.886833 (lr=4.8120e-04) (hash(x)=151700982)
53
+ 410 train 5.883242 (lr=4.9320e-04) (hash(x)=160467274)
54
+ 420 train 5.801489 (lr=5.0520e-04) (hash(x)=148930522)
55
+ 430 train 6.008842 (lr=5.1720e-04) (hash(x)=154565303)
56
+ 440 train 5.968906 (lr=5.2920e-04) (hash(x)=148079227)
57
+ 450 train 5.706450 (lr=5.4120e-04) (hash(x)=147288467)
58
+ 460 train 5.724346 (lr=5.5320e-04) (hash(x)=145272617)
59
+ 470 train 5.727170 (lr=5.6520e-04) (hash(x)=123126729)
60
+ 480 train 5.695118 (lr=5.7720e-04) (hash(x)=156287490)
61
+ 490 train 5.821955 (lr=5.8920e-04) (hash(x)=161700145)
62
+ 500 val loss 5.7330
63
+ 500 val perplexity 308.9048
64
+ 500 train 5.843773 (lr=6.0000e-04) (hash(x)=156182087)
65
+ 510 train 5.446055 (lr=6.0000e-04) (hash(x)=131674504)
66
+ 520 train 5.762048 (lr=5.9999e-04) (hash(x)=162758254)
67
+ 530 train 5.569715 (lr=5.9998e-04) (hash(x)=133510409)
68
+ 540 train 5.574682 (lr=5.9997e-04) (hash(x)=157363914)
69
+ 550 train 5.495609 (lr=5.9995e-04) (hash(x)=147861550)
70
+ 560 train 5.445497 (lr=5.9993e-04) (hash(x)=149449280)
71
+ 570 train 5.451368 (lr=5.9990e-04) (hash(x)=143899904)
72
+ 580 train 5.428978 (lr=5.9987e-04) (hash(x)=149266312)
73
+ 590 train 5.653462 (lr=5.9984e-04) (hash(x)=164352774)
74
+ 600 val loss 5.5497
75
+ 600 val perplexity 257.1707
76
+ 600 train 5.425047 (lr=5.9980e-04) (hash(x)=149318660)
77
+ 610 train 5.324417 (lr=5.9976e-04) (hash(x)=146101781)
78
+ 620 train 5.280165 (lr=5.9972e-04) (hash(x)=137184623)
79
+ 630 train 5.170963 (lr=5.9967e-04) (hash(x)=145791873)
80
+ 640 train 5.191816 (lr=5.9962e-04) (hash(x)=143999212)
81
+ 650 train 5.296673 (lr=5.9956e-04) (hash(x)=151631103)
82
+ 660 train 5.056845 (lr=5.9950e-04) (hash(x)=138360108)
83
+ 670 train 5.425130 (lr=5.9943e-04) (hash(x)=156637679)
84
+ 680 train 5.071157 (lr=5.9937e-04) (hash(x)=143752601)
85
+ 690 train 5.232902 (lr=5.9929e-04) (hash(x)=158368903)
86
+ 700 val loss 5.4376
87
+ 700 val perplexity 229.8988
88
+ 700 train 5.435502 (lr=5.9922e-04) (hash(x)=150482428)
89
+ 710 train 5.390243 (lr=5.9914e-04) (hash(x)=158085060)
90
+ 720 train 5.451671 (lr=5.9905e-04) (hash(x)=159461069)
91
+ 730 train 5.295540 (lr=5.9897e-04) (hash(x)=148795562)
92
+ 740 train 5.311264 (lr=5.9887e-04) (hash(x)=149967984)
93
+ 750 train 5.293331 (lr=5.9878e-04) (hash(x)=144458617)
94
+ 760 train 5.335251 (lr=5.9868e-04) (hash(x)=155106202)
95
+ 770 train 5.139704 (lr=5.9857e-04) (hash(x)=144506191)
96
+ 780 train 5.283480 (lr=5.9847e-04) (hash(x)=155825158)
97
+ 790 train 5.339315 (lr=5.9836e-04) (hash(x)=158835381)
98
+ 800 val loss 5.2758
99
+ 800 val perplexity 195.5408
100
+ 800 train 5.221201 (lr=5.9824e-04) (hash(x)=143268605)
101
+ 810 train 5.131927 (lr=5.9812e-04) (hash(x)=139072755)
102
+ 820 train 5.289010 (lr=5.9800e-04) (hash(x)=179384792)
103
+ 830 train 5.067116 (lr=5.9787e-04) (hash(x)=151086962)
104
+ 840 train 5.661775 (lr=5.9774e-04) (hash(x)=142711393)
105
+ 850 train 5.152798 (lr=5.9761e-04) (hash(x)=150817011)
106
+ 860 train 5.079017 (lr=5.9747e-04) (hash(x)=138495801)
107
+ 870 train 5.115953 (lr=5.9732e-04) (hash(x)=155474016)
108
+ 880 train 5.142139 (lr=5.9718e-04) (hash(x)=151951501)
109
+ 890 train 5.010257 (lr=5.9703e-04) (hash(x)=143699524)
110
+ 900 val loss 5.1649
111
+ 900 val perplexity 175.0215
112
+ 900 train 5.159478 (lr=5.9687e-04) (hash(x)=152322423)
113
+ 910 train 5.104273 (lr=5.9672e-04) (hash(x)=151010509)
114
+ 920 train 4.893851 (lr=5.9655e-04) (hash(x)=148720342)
115
+ 930 train 4.846943 (lr=5.9639e-04) (hash(x)=125869480)
116
+ 940 train 5.102963 (lr=5.9622e-04) (hash(x)=150349503)
117
+ 950 train 4.882380 (lr=5.9605e-04) (hash(x)=143574126)
118
+ 960 train 5.064518 (lr=5.9587e-04) (hash(x)=146292562)
119
+ 970 train 5.011444 (lr=5.9569e-04) (hash(x)=162256596)
120
+ 980 train 4.948880 (lr=5.9550e-04) (hash(x)=137640259)
121
+ 990 train 4.855735 (lr=5.9531e-04) (hash(x)=141767794)
122
+ 1000 val loss 5.1091
123
+ 1000 val perplexity 165.5265
124
+ 1000 train 5.042518 (lr=5.9512e-04) (hash(x)=147904298)
125
+ 1010 train 4.974709 (lr=5.9492e-04) (hash(x)=162239186)
126
+ 1020 train 4.734469 (lr=5.9472e-04) (hash(x)=140706537)
127
+ 1030 train 4.853150 (lr=5.9452e-04) (hash(x)=155556720)
128
+ 1040 train 4.743192 (lr=5.9431e-04) (hash(x)=144746445)
129
+ 1050 train 4.839166 (lr=5.9410e-04) (hash(x)=144719400)
130
+ 1060 train 4.652897 (lr=5.9388e-04) (hash(x)=153065665)
131
+ 1070 train 4.803423 (lr=5.9366e-04) (hash(x)=136468722)
132
+ 1080 train 4.792062 (lr=5.9344e-04) (hash(x)=157674756)
133
+ 1090 train 4.941757 (lr=5.9321e-04) (hash(x)=155882884)
134
+ 1100 val loss 5.0126
135
+ 1100 val perplexity 150.2885
136
+ 1100 train 5.242490 (lr=5.9298e-04) (hash(x)=154343147)
137
+ 1110 train 5.006681 (lr=5.9275e-04) (hash(x)=158582939)
138
+ 1120 train 4.827847 (lr=5.9251e-04) (hash(x)=162479784)
139
+ 1130 train 5.024753 (lr=5.9227e-04) (hash(x)=155704345)
140
+ 1140 train 4.898115 (lr=5.9202e-04) (hash(x)=146934299)
141
+ 1150 train 4.857088 (lr=5.9177e-04) (hash(x)=154372635)
142
+ 1160 train 4.816307 (lr=5.9152e-04) (hash(x)=145536832)
143
+ 1170 train 5.123067 (lr=5.9126e-04) (hash(x)=158785901)
144
+ 1180 train 4.930710 (lr=5.9100e-04) (hash(x)=158271614)
145
+ 1190 train 4.950810 (lr=5.9073e-04) (hash(x)=157365589)
146
+ 1200 val loss 4.9073
147
+ 1200 val perplexity 135.2674
148
+ 1200 train 4.956767 (lr=5.9046e-04) (hash(x)=141843115)
149
+ 1210 train 4.872190 (lr=5.9019e-04) (hash(x)=146807973)
150
+ 1220 train 4.841632 (lr=5.8992e-04) (hash(x)=144149947)
151
+ 1230 train 4.780746 (lr=5.8963e-04) (hash(x)=147869063)
152
+ 1240 train 4.859216 (lr=5.8935e-04) (hash(x)=152217389)
153
+ 1250 train 4.843330 (lr=5.8906e-04) (hash(x)=149417679)
154
+ 1260 train 4.890008 (lr=5.8877e-04) (hash(x)=144029489)
155
+ 1270 train 4.985224 (lr=5.8848e-04) (hash(x)=154328303)
156
+ 1280 train 4.849533 (lr=5.8818e-04) (hash(x)=151625838)
157
+ 1290 train 4.716599 (lr=5.8787e-04) (hash(x)=140270541)
158
+ 1300 val loss 4.8360
159
+ 1300 val perplexity 125.9631
160
+ 1300 train 4.773347 (lr=5.8757e-04) (hash(x)=145279030)
161
+ 1310 train 4.799668 (lr=5.8726e-04) (hash(x)=140279630)
162
+ 1320 train 4.521388 (lr=5.8694e-04) (hash(x)=133642994)
163
+ 1330 train 4.691833 (lr=5.8663e-04) (hash(x)=146634776)
164
+ 1340 train 4.794439 (lr=5.8630e-04) (hash(x)=155919973)
165
+ 1350 train 4.814174 (lr=5.8598e-04) (hash(x)=146363911)
166
+ 1360 train 4.686668 (lr=5.8565e-04) (hash(x)=150613636)
167
+ 1370 train 4.676257 (lr=5.8532e-04) (hash(x)=148536643)
168
+ 1380 train 4.686653 (lr=5.8498e-04) (hash(x)=152803906)
169
+ 1390 train 4.633714 (lr=5.8464e-04) (hash(x)=150383305)
170
+ 1400 val loss 4.7960
171
+ 1400 val perplexity 121.0193
172
+ 1400 train 4.692439 (lr=5.8430e-04) (hash(x)=152507639)
173
+ 1410 train 4.454031 (lr=5.8395e-04) (hash(x)=161841408)
174
+ 1420 train 4.586825 (lr=5.8360e-04) (hash(x)=167824828)
175
+ 1430 train 4.542679 (lr=5.8324e-04) (hash(x)=147696374)
176
+ 1440 train 4.496708 (lr=5.8289e-04) (hash(x)=150085494)
177
+ 1450 train 4.447160 (lr=5.8252e-04) (hash(x)=139296191)
178
+ 1460 train 4.371064 (lr=5.8216e-04) (hash(x)=142956546)
179
+ 1470 train 4.450001 (lr=5.8179e-04) (hash(x)=155908628)
180
+ 1480 train 4.425796 (lr=5.8142e-04) (hash(x)=150893345)
181
+ 1490 train 4.445243 (lr=5.8104e-04) (hash(x)=143548902)
182
+ 1500 val loss 4.7301
183
+ 1500 val perplexity 113.3080
184
+ 1500 train 4.701874 (lr=5.8066e-04) (hash(x)=148473774)
185
+ 1510 train 4.771822 (lr=5.8028e-04) (hash(x)=161631000)
186
+ 1520 train 4.712205 (lr=5.7989e-04) (hash(x)=166217855)
187
+ 1530 train 4.657665 (lr=5.7950e-04) (hash(x)=149440046)
188
+ 1540 train 4.601507 (lr=5.7910e-04) (hash(x)=155013278)
189
+ 1550 train 4.617747 (lr=5.7870e-04) (hash(x)=154130527)
190
+ 1560 train 4.660770 (lr=5.7830e-04) (hash(x)=152138735)
191
+ 1570 train 4.563497 (lr=5.7790e-04) (hash(x)=148128305)
192
+ 1580 train 4.708984 (lr=5.7749e-04) (hash(x)=157176546)
193
+ 1590 train 4.805791 (lr=5.7707e-04) (hash(x)=152935467)
194
+ 1600 val loss 4.6581
195
+ 1600 val perplexity 105.4339
196
+ 1600 train 4.635081 (lr=5.7666e-04) (hash(x)=151117002)
197
+ 1610 train 4.615851 (lr=5.7624e-04) (hash(x)=149883384)
198
+ 1620 train 4.683335 (lr=5.7581e-04) (hash(x)=145015354)
199
+ 1630 train 4.592213 (lr=5.7539e-04) (hash(x)=146717645)
200
+ 1640 train 4.804672 (lr=5.7496e-04) (hash(x)=156372727)
201
+ 1650 train 4.707918 (lr=5.7452e-04) (hash(x)=140261555)
202
+ 1660 train 4.537348 (lr=5.7408e-04) (hash(x)=150088650)
203
+ 1670 train 4.458913 (lr=5.7364e-04) (hash(x)=135943513)
204
+ 1680 train 4.689734 (lr=5.7320e-04) (hash(x)=153529653)
205
+ 1690 train 4.733294 (lr=5.7275e-04) (hash(x)=148930360)
206
+ 1700 val loss 4.6493
207
+ 1700 val perplexity 104.5153
208
+ 1700 train 4.629510 (lr=5.7230e-04) (hash(x)=138011335)
209
+ 1710 train 4.586173 (lr=5.7184e-04) (hash(x)=141383599)
210
+ 1720 train 4.764947 (lr=5.7138e-04) (hash(x)=161449652)
211
+ 1730 train 4.559359 (lr=5.7092e-04) (hash(x)=153923561)
212
+ 1740 train 4.498707 (lr=5.7045e-04) (hash(x)=146043253)
213
+ 1750 train 4.524877 (lr=5.6999e-04) (hash(x)=155720883)
214
+ 1760 train 4.497934 (lr=5.6951e-04) (hash(x)=148222148)
215
+ 1770 train 4.568258 (lr=5.6904e-04) (hash(x)=155712380)
216
+ 1780 train 4.451543 (lr=5.6856e-04) (hash(x)=149767859)
217
+ 1790 train 4.688375 (lr=5.6807e-04) (hash(x)=167214230)
218
+ 1800 val loss 4.6062
219
+ 1800 val perplexity 100.1025
220
+ 1800 train 4.582571 (lr=5.6759e-04) (hash(x)=171180926)
221
+ 1810 train 4.596643 (lr=5.6710e-04) (hash(x)=162375834)
222
+ 1820 train 4.715477 (lr=5.6660e-04) (hash(x)=151495752)
223
+ 1830 train 4.626053 (lr=5.6611e-04) (hash(x)=146532257)
224
+ 1840 train 4.706219 (lr=5.6561e-04) (hash(x)=145694434)
225
+ 1850 train 4.594100 (lr=5.6510e-04) (hash(x)=144823488)
226
+ 1860 train 4.681180 (lr=5.6459e-04) (hash(x)=149836286)
227
+ 1870 train 4.653625 (lr=5.6408e-04) (hash(x)=143935049)
228
+ 1880 train 4.677234 (lr=5.6357e-04) (hash(x)=151786371)
229
+ 1890 train 4.573680 (lr=5.6305e-04) (hash(x)=152892567)
230
+ 1900 val loss 4.5636
231
+ 1900 val perplexity 95.9250
232
+ 1900 train 4.604174 (lr=5.6253e-04) (hash(x)=141769419)
233
+ 1910 train 4.637553 (lr=5.6201e-04) (hash(x)=156914416)
234
+ 1920 train 4.478181 (lr=5.6148e-04) (hash(x)=151573900)
235
+ 1930 train 4.578133 (lr=5.6095e-04) (hash(x)=145121701)
236
+ 1940 train 4.518665 (lr=5.6041e-04) (hash(x)=150896348)
237
+ 1950 train 4.629759 (lr=5.5988e-04) (hash(x)=168797189)
238
+ 1960 train 4.501838 (lr=5.5934e-04) (hash(x)=146711430)
239
+ 1970 train 4.357291 (lr=5.5879e-04) (hash(x)=152180946)
240
+ 1980 train 4.480283 (lr=5.5824e-04) (hash(x)=155542268)
241
+ 1990 train 4.446460 (lr=5.5769e-04) (hash(x)=144668660)
242
+ 2000 val loss 4.5288
243
+ 2000 val perplexity 92.6475
244
+ 2000 train 4.521599 (lr=5.5714e-04) (hash(x)=151963443)
245
+ 2010 train 4.511193 (lr=5.5658e-04) (hash(x)=150810352)
246
+ 2020 train 4.513572 (lr=5.5602e-04) (hash(x)=157539832)
247
+ 2030 train 4.734151 (lr=5.5546e-04) (hash(x)=137858786)
248
+ 2040 train 4.464633 (lr=5.5489e-04) (hash(x)=154270900)
249
+ 2050 train 4.326408 (lr=5.5432e-04) (hash(x)=156252579)
250
+ 2060 train 4.255533 (lr=5.5374e-04) (hash(x)=143285549)
251
+ 2070 train 4.466375 (lr=5.5317e-04) (hash(x)=150542438)
252
+ 2080 train 4.391304 (lr=5.5259e-04) (hash(x)=146290330)
253
+ 2090 train 4.477229 (lr=5.5200e-04) (hash(x)=146486756)
254
+ 2100 val loss 4.5450
255
+ 2100 val perplexity 94.1638
256
+ 2100 train 4.485621 (lr=5.5142e-04) (hash(x)=162947470)
257
+ 2110 train 4.384953 (lr=5.5083e-04) (hash(x)=150305391)
258
+ 2120 train 4.541253 (lr=5.5023e-04) (hash(x)=156452783)
259
+ 2130 train 4.524720 (lr=5.4964e-04) (hash(x)=167481875)
260
+ 2140 train 4.491261 (lr=5.4904e-04) (hash(x)=141782256)
261
+ 2150 train 4.525728 (lr=5.4843e-04) (hash(x)=145035419)
262
+ 2160 train 4.574007 (lr=5.4783e-04) (hash(x)=142000400)
263
+ 2170 train 4.530458 (lr=5.4722e-04) (hash(x)=144229380)
264
+ 2180 train 4.579500 (lr=5.4661e-04) (hash(x)=166204176)
265
+ 2190 train 4.506018 (lr=5.4599e-04) (hash(x)=150712057)
266
+ 2200 val loss 4.4805
267
+ 2200 val perplexity 88.2752
268
+ 2200 train 4.396422 (lr=5.4537e-04) (hash(x)=154954810)
269
+ 2210 train 4.591577 (lr=5.4475e-04) (hash(x)=155668760)
270
+ 2220 train 4.421214 (lr=5.4413e-04) (hash(x)=144834708)
271
+ 2230 train 4.412834 (lr=5.4350e-04) (hash(x)=159744566)
272
+ 2240 train 4.380198 (lr=5.4287e-04) (hash(x)=158543377)
273
+ 2250 train 4.391350 (lr=5.4223e-04) (hash(x)=151004281)
274
+ 2260 train 4.494900 (lr=5.4160e-04) (hash(x)=151242482)
275
+ 2270 train 4.444022 (lr=5.4096e-04) (hash(x)=152056559)
276
+ 2280 train 4.370753 (lr=5.4031e-04) (hash(x)=148864076)
277
+ 2290 train 4.498991 (lr=5.3967e-04) (hash(x)=144294586)
278
+ 2300 val loss 4.4655
279
+ 2300 val perplexity 86.9604
280
+ 2300 train 4.405931 (lr=5.3902e-04) (hash(x)=151878111)
281
+ 2310 train 4.409058 (lr=5.3837e-04) (hash(x)=154139457)
282
+ 2320 train 4.342014 (lr=5.3771e-04) (hash(x)=155571592)
283
+ 2330 train 4.381868 (lr=5.3705e-04) (hash(x)=154823764)
284
+ 2340 train 4.436868 (lr=5.3639e-04) (hash(x)=153252177)
285
+ 2350 train 4.370737 (lr=5.3573e-04) (hash(x)=136051830)
286
+ 2360 train 4.414216 (lr=5.3506e-04) (hash(x)=153814175)
287
+ 2370 train 4.416036 (lr=5.3439e-04) (hash(x)=146391078)
288
+ 2380 train 4.382503 (lr=5.3372e-04) (hash(x)=150782698)
289
+ 2390 train 4.449402 (lr=5.3304e-04) (hash(x)=142752256)
290
+ 2400 val loss 4.4705
291
+ 2400 val perplexity 87.4020
292
+ 2400 train 4.350583 (lr=5.3236e-04) (hash(x)=158661057)
293
+ 2410 train 4.422263 (lr=5.3168e-04) (hash(x)=153611303)
294
+ 2420 train 4.598189 (lr=5.3099e-04) (hash(x)=159508684)
295
+ 2430 train 4.392899 (lr=5.3030e-04) (hash(x)=149501192)
296
+ 2440 train 4.059241 (lr=5.2961e-04) (hash(x)=181177177)
297
+ 2450 train 4.527268 (lr=5.2892e-04) (hash(x)=163664732)
298
+ 2460 train 4.213627 (lr=5.2822e-04) (hash(x)=194527955)
299
+ 2470 train 4.437664 (lr=5.2752e-04) (hash(x)=158088521)
300
+ 2480 train 4.392514 (lr=5.2682e-04) (hash(x)=152865678)
301
+ 2490 train 4.382834 (lr=5.2612e-04) (hash(x)=165703871)
302
+ 2500 val loss 4.4230
303
+ 2500 val perplexity 83.3499
304
+ 2500 train 4.470942 (lr=5.2541e-04) (hash(x)=150925584)
305
+ 2510 train 4.375011 (lr=5.2470e-04) (hash(x)=144406177)
306
+ 2520 train 4.262231 (lr=5.2398e-04) (hash(x)=120133043)
307
+ 2530 train 4.474709 (lr=5.2327e-04) (hash(x)=167900247)
308
+ 2540 train 4.356240 (lr=5.2255e-04) (hash(x)=162599203)
309
+ 2550 train 4.412279 (lr=5.2183e-04) (hash(x)=149328180)
310
+ 2560 train 4.310459 (lr=5.2110e-04) (hash(x)=150049968)
311
+ 2570 train 4.448700 (lr=5.2037e-04) (hash(x)=162876159)
312
+ 2580 train 4.271173 (lr=5.1964e-04) (hash(x)=148260649)
313
+ 2590 train 4.409980 (lr=5.1891e-04) (hash(x)=148998018)
314
+ 2600 val loss 4.4101
315
+ 2600 val perplexity 82.2796
316
+ 2600 train 4.329995 (lr=5.1817e-04) (hash(x)=144515755)
317
+ 2610 train 4.448966 (lr=5.1743e-04) (hash(x)=154455224)
318
+ 2620 train 4.250093 (lr=5.1669e-04) (hash(x)=142920048)
319
+ 2630 train 4.402009 (lr=5.1595e-04) (hash(x)=143931236)
320
+ 2640 train 4.257731 (lr=5.1520e-04) (hash(x)=150983323)
321
+ 2650 train 4.360910 (lr=5.1445e-04) (hash(x)=139360405)
322
+ 2660 train 4.264170 (lr=5.1370e-04) (hash(x)=148850470)
323
+ 2670 train 4.520264 (lr=5.1295e-04) (hash(x)=149929606)
324
+ 2680 train 4.494000 (lr=5.1219e-04) (hash(x)=151673000)
325
+ 2690 train 4.364605 (lr=5.1143e-04) (hash(x)=140631266)
326
+ 2700 val loss 4.4025
327
+ 2700 val perplexity 81.6556
328
+ 2700 train 4.338117 (lr=5.1067e-04) (hash(x)=153109144)
329
+ 2710 train 4.218790 (lr=5.0990e-04) (hash(x)=135332836)
330
+ 2720 train 4.582860 (lr=5.0913e-04) (hash(x)=131611182)
331
+ 2730 train 4.410504 (lr=5.0836e-04) (hash(x)=142909388)
332
+ 2740 train 4.445091 (lr=5.0759e-04) (hash(x)=145593419)
333
+ 2750 train 4.332325 (lr=5.0681e-04) (hash(x)=158225769)
334
+ 2760 train 4.313293 (lr=5.0603e-04) (hash(x)=170509938)
335
+ 2770 train 4.324759 (lr=5.0525e-04) (hash(x)=150785805)
336
+ 2780 train 4.269176 (lr=5.0447e-04) (hash(x)=167281736)
337
+ 2790 train 4.455155 (lr=5.0368e-04) (hash(x)=161660187)
338
+ 2800 val loss 4.3755
339
+ 2800 val perplexity 79.4785
340
+ 2800 train 4.408805 (lr=5.0290e-04) (hash(x)=151152897)
341
+ 2810 train 4.374192 (lr=5.0210e-04) (hash(x)=143172067)
342
+ 2820 train 4.388582 (lr=5.0131e-04) (hash(x)=141569169)
343
+ 2830 train 4.267646 (lr=5.0052e-04) (hash(x)=159709681)
344
+ 2840 train 4.329834 (lr=4.9972e-04) (hash(x)=162552294)
345
+ 2850 train 4.443244 (lr=4.9892e-04) (hash(x)=146762690)
346
+ 2860 train 4.316198 (lr=4.9811e-04) (hash(x)=145729422)
347
+ 2870 train 4.388108 (lr=4.9731e-04) (hash(x)=144360628)
348
+ 2880 train 4.203232 (lr=4.9650e-04) (hash(x)=147737098)
349
+ 2890 train 4.314510 (lr=4.9569e-04) (hash(x)=146849057)
350
+ 2900 val loss 4.3576
351
+ 2900 val perplexity 78.0701
352
+ 2900 train 4.378337 (lr=4.9487e-04) (hash(x)=145800210)
353
+ 2910 train 4.185465 (lr=4.9406e-04) (hash(x)=147316313)
354
+ 2920 train 4.234604 (lr=4.9324e-04) (hash(x)=152595415)
355
+ 2930 train 4.399158 (lr=4.9242e-04) (hash(x)=156542647)
356
+ 2940 train 4.257132 (lr=4.9160e-04) (hash(x)=141183896)
357
+ 2950 train 4.358592 (lr=4.9077e-04) (hash(x)=156329036)
358
+ 2960 train 4.397128 (lr=4.8995e-04) (hash(x)=156717793)
359
+ 2970 train 4.259643 (lr=4.8912e-04) (hash(x)=143949931)
360
+ 2980 train 4.325744 (lr=4.8829e-04) (hash(x)=155067290)
361
+ 2990 train 4.290055 (lr=4.8745e-04) (hash(x)=148774729)
362
+ 3000 val loss 4.3704
363
+ 3000 val perplexity 79.0792
364
+ 3000 train 4.270265 (lr=4.8662e-04) (hash(x)=141997485)
365
+ 3010 train 4.238098 (lr=4.8578e-04) (hash(x)=160762953)
366
+ 3020 train 4.209878 (lr=4.8494e-04) (hash(x)=170341665)
367
+ 3030 train 4.473457 (lr=4.8409e-04) (hash(x)=150722836)
368
+ 3040 train 4.265443 (lr=4.8325e-04) (hash(x)=154243307)
369
+ 3050 train 4.313826 (lr=4.8240e-04) (hash(x)=146077234)
370
+ 3060 train 4.266632 (lr=4.8155e-04) (hash(x)=152125542)
371
+ 3070 train 4.449606 (lr=4.8070e-04) (hash(x)=144149387)
372
+ 3080 train 4.279145 (lr=4.7984e-04) (hash(x)=147508712)
373
+ 3090 train 4.274778 (lr=4.7899e-04) (hash(x)=143583369)
374
+ 3100 val loss 4.3284
375
+ 3100 val perplexity 75.8227
376
+ 3100 train 4.305622 (lr=4.7813e-04) (hash(x)=154049740)
377
+ 3110 train 4.365994 (lr=4.7727e-04) (hash(x)=147054835)
378
+ 3120 train 4.347172 (lr=4.7641e-04) (hash(x)=152468568)
379
+ 3130 train 4.287776 (lr=4.7554e-04) (hash(x)=151635194)
380
+ 3140 train 4.255615 (lr=4.7467e-04) (hash(x)=150396274)
381
+ 3150 train 4.307040 (lr=4.7380e-04) (hash(x)=141306267)
382
+ 3160 train 4.250585 (lr=4.7293e-04) (hash(x)=151110762)
383
+ 3170 train 4.344000 (lr=4.7206e-04) (hash(x)=146518875)
384
+ 3180 train 4.218246 (lr=4.7118e-04) (hash(x)=139914401)
385
+ 3190 train 4.226621 (lr=4.7031e-04) (hash(x)=161631880)
386
+ 3200 val loss 4.3408
387
+ 3200 val perplexity 76.7651
388
+ 3200 train 4.252343 (lr=4.6943e-04) (hash(x)=150471842)
389
+ 3210 train 4.312123 (lr=4.6855e-04) (hash(x)=147355138)
390
+ 3220 train 4.111239 (lr=4.6766e-04) (hash(x)=138879678)
391
+ 3230 train 4.218341 (lr=4.6678e-04) (hash(x)=123929679)
392
+ 3240 train 4.286832 (lr=4.6589e-04) (hash(x)=157263734)
393
+ 3250 train 4.294387 (lr=4.6500e-04) (hash(x)=148065539)
394
+ 3260 train 4.213184 (lr=4.6411e-04) (hash(x)=138020742)
395
+ 3270 train 4.077530 (lr=4.6322e-04) (hash(x)=142053611)
396
+ 3280 train 4.233654 (lr=4.6232e-04) (hash(x)=155444784)
397
+ 3290 train 4.061300 (lr=4.6142e-04) (hash(x)=148500315)
398
+ 3300 val loss 4.3312
399
+ 3300 val perplexity 76.0333
400
+ 3300 train 4.298663 (lr=4.6052e-04) (hash(x)=149048126)
401
+ 3310 train 4.245136 (lr=4.5962e-04) (hash(x)=141680674)
402
+ 3320 train 4.189808 (lr=4.5872e-04) (hash(x)=143310642)
403
+ 3330 train 4.290866 (lr=4.5782e-04) (hash(x)=149891496)
404
+ 3340 train 4.248428 (lr=4.5691e-04) (hash(x)=151244609)
405
+ 3350 train 4.260502 (lr=4.5600e-04) (hash(x)=157158479)
406
+ 3360 train 4.115778 (lr=4.5509e-04) (hash(x)=151709876)
407
+ 3370 train 4.266237 (lr=4.5418e-04) (hash(x)=145297090)
408
+ 3380 train 4.285998 (lr=4.5326e-04) (hash(x)=154767881)
409
+ 3390 train 4.260479 (lr=4.5235e-04) (hash(x)=152600366)
410
+ 3400 val loss 4.3074
411
+ 3400 val perplexity 74.2492
412
+ 3400 train 4.491827 (lr=4.5143e-04) (hash(x)=161261339)
413
+ 3410 train 4.242117 (lr=4.5051e-04) (hash(x)=146859162)
414
+ 3420 train 4.334076 (lr=4.4959e-04) (hash(x)=149276224)
415
+ 3430 train 4.361734 (lr=4.4867e-04) (hash(x)=155866933)
416
+ 3440 train 4.214377 (lr=4.4774e-04) (hash(x)=143191904)
417
+ 3450 train 4.436650 (lr=4.4682e-04) (hash(x)=156910661)
418
+ 3460 train 4.225041 (lr=4.4589e-04) (hash(x)=140850879)
419
+ 3470 train 4.445935 (lr=4.4496e-04) (hash(x)=162140445)
420
+ 3480 train 4.190761 (lr=4.4403e-04) (hash(x)=145607495)
421
+ 3490 train 4.205517 (lr=4.4310e-04) (hash(x)=152496121)
422
+ 3500 val loss 4.3006
423
+ 3500 val perplexity 73.7430
424
+ 3500 train 4.175681 (lr=4.4216e-04) (hash(x)=157495564)
425
+ 3510 train 4.298810 (lr=4.4123e-04) (hash(x)=147657004)
426
+ 3520 train 4.267686 (lr=4.4029e-04) (hash(x)=152978060)
427
+ 3530 train 4.205024 (lr=4.3935e-04) (hash(x)=146800180)
428
+ 3540 train 4.279503 (lr=4.3841e-04) (hash(x)=195450444)
429
+ 3550 train 4.260127 (lr=4.3747e-04) (hash(x)=151114129)
430
+ 3560 train 4.244706 (lr=4.3652e-04) (hash(x)=143833190)
431
+ 3570 train 4.051334 (lr=4.3558e-04) (hash(x)=146039971)
432
+ 3580 train 4.116214 (lr=4.3463e-04) (hash(x)=145172249)
433
+ 3590 train 4.035852 (lr=4.3368e-04) (hash(x)=159164125)
434
+ 3600 val loss 4.2968
435
+ 3600 val perplexity 73.4613
436
+ 3600 train 4.271433 (lr=4.3273e-04) (hash(x)=144352932)
437
+ 3610 train 4.239083 (lr=4.3178e-04) (hash(x)=150550670)
438
+ 3620 train 4.367783 (lr=4.3083e-04) (hash(x)=147170125)
439
+ 3630 train 4.166487 (lr=4.2987e-04) (hash(x)=139473935)
440
+ 3640 train 4.158472 (lr=4.2892e-04) (hash(x)=159294424)
441
+ 3650 train 4.276155 (lr=4.2796e-04) (hash(x)=145943278)
442
+ 3660 train 4.334130 (lr=4.2700e-04) (hash(x)=141647872)
443
+ 3670 train 4.305375 (lr=4.2604e-04) (hash(x)=147109351)
444
+ 3680 train 4.328221 (lr=4.2508e-04) (hash(x)=140683368)
445
+ 3690 train 4.245663 (lr=4.2411e-04) (hash(x)=156276284)
446
+ 3700 val loss 4.2802
447
+ 3700 val perplexity 72.2530
448
+ 3700 train 4.196733 (lr=4.2315e-04) (hash(x)=149389012)
449
+ 3710 train 4.326389 (lr=4.2218e-04) (hash(x)=142107929)
450
+ 3720 train 4.301076 (lr=4.2122e-04) (hash(x)=147828843)
451
+ 3730 train 4.365184 (lr=4.2025e-04) (hash(x)=141784679)
452
+ 3740 train 4.265013 (lr=4.1928e-04) (hash(x)=153375464)
453
+ 3750 train 4.203296 (lr=4.1831e-04) (hash(x)=153716001)
454
+ 3760 train 4.164290 (lr=4.1734e-04) (hash(x)=146848822)
455
+ 3770 train 4.123320 (lr=4.1636e-04) (hash(x)=148396782)
456
+ 3780 train 4.191072 (lr=4.1539e-04) (hash(x)=153563244)
457
+ 3790 train 4.104450 (lr=4.1441e-04) (hash(x)=160808734)
458
+ 3800 val loss 4.2704
459
+ 3800 val perplexity 71.5513
460
+ 3800 train 4.292404 (lr=4.1343e-04) (hash(x)=146607620)
461
+ 3810 train 4.258980 (lr=4.1246e-04) (hash(x)=152580677)
462
+ 3820 train 4.617507 (lr=4.1148e-04) (hash(x)=144860879)
463
+ 3830 train 4.278432 (lr=4.1050e-04) (hash(x)=162099851)
464
+ 3840 train 4.239708 (lr=4.0951e-04) (hash(x)=159506855)
465
+ 3850 train 4.164797 (lr=4.0853e-04) (hash(x)=140022686)
466
+ 3860 train 4.237893 (lr=4.0755e-04) (hash(x)=152137850)
467
+ 3870 train 4.356323 (lr=4.0656e-04) (hash(x)=142666785)
468
+ 3880 train 4.204819 (lr=4.0557e-04) (hash(x)=148285868)
469
+ 3890 train 4.171728 (lr=4.0459e-04) (hash(x)=147910067)
470
+ 3900 val loss 4.2705
471
+ 3900 val perplexity 71.5566
472
+ 3900 train 4.208207 (lr=4.0360e-04) (hash(x)=143754617)
473
+ 3910 train 4.207408 (lr=4.0261e-04) (hash(x)=154779949)
474
+ 3920 train 4.047403 (lr=4.0162e-04) (hash(x)=137824061)
475
+ 3930 train 4.017090 (lr=4.0063e-04) (hash(x)=136933373)
476
+ 3940 train 4.094826 (lr=3.9963e-04) (hash(x)=164054337)
477
+ 3950 train 4.318359 (lr=3.9864e-04) (hash(x)=155752984)
478
+ 3960 train 4.403209 (lr=3.9764e-04) (hash(x)=160957673)
479
+ 3970 train 4.272636 (lr=3.9665e-04) (hash(x)=157158690)
480
+ 3980 train 4.278957 (lr=3.9565e-04) (hash(x)=153282027)
481
+ 3990 train 4.304569 (lr=3.9465e-04) (hash(x)=152183510)
482
+ 4000 val loss 4.2443
483
+ 4000 val perplexity 69.7064
484
+ 4000 train 4.196175 (lr=3.9365e-04) (hash(x)=156930722)
485
+ 4010 train 4.187428 (lr=3.9266e-04) (hash(x)=150798740)
486
+ 4020 train 4.405786 (lr=3.9165e-04) (hash(x)=142632400)
487
+ 4030 train 4.304651 (lr=3.9065e-04) (hash(x)=173326287)
488
+ 4040 train 4.197855 (lr=3.8965e-04) (hash(x)=128797443)
489
+ 4050 train 4.192305 (lr=3.8865e-04) (hash(x)=146937792)
490
+ 4060 train 4.300669 (lr=3.8764e-04) (hash(x)=164579030)
491
+ 4070 train 4.162514 (lr=3.8664e-04) (hash(x)=149459820)
492
+ 4080 train 4.134852 (lr=3.8563e-04) (hash(x)=160159595)
493
+ 4090 train 4.076524 (lr=3.8463e-04) (hash(x)=141507853)
494
+ 4100 val loss 4.2426
495
+ 4100 val perplexity 69.5908
496
+ 4100 train 4.212006 (lr=3.8362e-04) (hash(x)=147500519)
497
+ 4110 train 4.189104 (lr=3.8261e-04) (hash(x)=153297081)
498
+ 4120 train 4.196279 (lr=3.8160e-04) (hash(x)=142560288)
499
+ 4130 train 4.212025 (lr=3.8059e-04) (hash(x)=153380118)
500
+ 4140 train 4.117969 (lr=3.7958e-04) (hash(x)=143150720)
501
+ 4150 train 4.160090 (lr=3.7857e-04) (hash(x)=143459564)
502
+ 4160 train 4.192156 (lr=3.7756e-04) (hash(x)=153955741)
503
+ 4170 train 4.297305 (lr=3.7655e-04) (hash(x)=152199253)
504
+ 4180 train 4.064004 (lr=3.7553e-04) (hash(x)=165957668)
505
+ 4190 train 4.118344 (lr=3.7452e-04) (hash(x)=144866923)
506
+ 4200 val loss 4.2466
507
+ 4200 val perplexity 69.8644
508
+ 4200 train 4.116647 (lr=3.7351e-04) (hash(x)=143232237)
509
+ 4210 train 4.033844 (lr=3.7249e-04) (hash(x)=151907163)
510
+ 4220 train 4.135174 (lr=3.7148e-04) (hash(x)=145917587)
511
+ 4230 train 3.967760 (lr=3.7046e-04) (hash(x)=147258846)
512
+ 4240 train 4.124782 (lr=3.6944e-04) (hash(x)=143449843)
513
+ 4250 train 4.085752 (lr=3.6843e-04) (hash(x)=184883715)
514
+ 4260 train 4.176322 (lr=3.6741e-04) (hash(x)=156958761)
515
+ 4270 train 4.250105 (lr=3.6639e-04) (hash(x)=168870544)
516
+ 4280 train 4.175034 (lr=3.6537e-04) (hash(x)=179446735)
517
+ 4290 train 4.219192 (lr=3.6435e-04) (hash(x)=145649386)
518
+ 4300 val loss 4.2332
519
+ 4300 val perplexity 68.9401
520
+ 4300 train 4.066499 (lr=3.6333e-04) (hash(x)=146811670)
521
+ 4310 train 4.336442 (lr=3.6231e-04) (hash(x)=151907075)
522
+ 4320 train 4.138725 (lr=3.6129e-04) (hash(x)=138628365)
523
+ 4330 train 4.197901 (lr=3.6027e-04) (hash(x)=151608240)
524
+ 4340 train 4.128697 (lr=3.5925e-04) (hash(x)=160111319)
525
+ 4350 train 4.278884 (lr=3.5822e-04) (hash(x)=144958652)
526
+ 4360 train 4.068399 (lr=3.5720e-04) (hash(x)=149176222)
527
+ 4370 train 4.125728 (lr=3.5618e-04) (hash(x)=216221371)
528
+ 4380 train 4.071362 (lr=3.5515e-04) (hash(x)=141760648)
529
+ 4390 train 4.159255 (lr=3.5413e-04) (hash(x)=151710283)
530
+ 4400 val loss 4.2200
531
+ 4400 val perplexity 68.0334
532
+ 4400 train 4.280020 (lr=3.5311e-04) (hash(x)=158418746)
533
+ 4410 train 4.136970 (lr=3.5208e-04) (hash(x)=142331301)
534
+ 4420 train 4.217907 (lr=3.5106e-04) (hash(x)=146507180)
535
+ 4430 train 4.309099 (lr=3.5003e-04) (hash(x)=166506937)
536
+ 4440 train 4.245703 (lr=3.4901e-04) (hash(x)=151209447)
537
+ 4450 train 4.208759 (lr=3.4798e-04) (hash(x)=149186822)
538
+ 4460 train 4.195794 (lr=3.4695e-04) (hash(x)=144348412)
539
+ 4470 train 4.081789 (lr=3.4593e-04) (hash(x)=156494284)
540
+ 4480 train 4.115552 (lr=3.4490e-04) (hash(x)=147238744)
541
+ 4490 train 4.059175 (lr=3.4387e-04) (hash(x)=150746185)
542
+ 4500 val loss 4.2153
543
+ 4500 val perplexity 67.7136
544
+ 4500 train 4.153756 (lr=3.4285e-04) (hash(x)=156695778)
545
+ 4510 train 3.995005 (lr=3.4182e-04) (hash(x)=135988958)
546
+ 4520 train 4.163864 (lr=3.4079e-04) (hash(x)=148272110)
547
+ 4530 train 4.080733 (lr=3.3977e-04) (hash(x)=136459794)
548
+ 4540 train 4.115305 (lr=3.3874e-04) (hash(x)=147624954)
549
+ 4550 train 4.115876 (lr=3.3771e-04) (hash(x)=154947199)
550
+ 4560 train 4.174077 (lr=3.3668e-04) (hash(x)=147869166)
551
+ 4570 train 4.023650 (lr=3.3565e-04) (hash(x)=148415463)
552
+ 4580 train 4.192674 (lr=3.3463e-04) (hash(x)=152104066)
553
+ 4590 train 4.090989 (lr=3.3360e-04) (hash(x)=147060040)
554
+ 4600 val loss 4.2122
555
+ 4600 val perplexity 67.5075
556
+ 4600 train 4.163352 (lr=3.3257e-04) (hash(x)=147791497)
557
+ 4610 train 4.362179 (lr=3.3154e-04) (hash(x)=155595556)
558
+ 4620 train 4.207492 (lr=3.3051e-04) (hash(x)=147178858)
559
+ 4630 train 4.258422 (lr=3.2949e-04) (hash(x)=152617112)
560
+ 4640 train 4.267780 (lr=3.2846e-04) (hash(x)=161766869)
561
+ 4650 train 4.242554 (lr=3.2743e-04) (hash(x)=158075561)
562
+ 4660 train 4.279838 (lr=3.2640e-04) (hash(x)=145360002)
563
+ 4670 train 4.162302 (lr=3.2537e-04) (hash(x)=144538791)
564
+ 4680 train 4.194803 (lr=3.2435e-04) (hash(x)=142711273)
565
+ 4690 train 4.356268 (lr=3.2332e-04) (hash(x)=193907362)
566
+ 4700 val loss 4.1921
567
+ 4700 val perplexity 66.1593
568
+ 4700 train 4.161912 (lr=3.2229e-04) (hash(x)=155533088)
569
+ 4710 train 4.120488 (lr=3.2126e-04) (hash(x)=148618528)
570
+ 4720 train 4.228819 (lr=3.2023e-04) (hash(x)=150502624)
571
+ 4730 train 4.103030 (lr=3.1921e-04) (hash(x)=189205847)
572
+ 4740 train 4.159419 (lr=3.1818e-04) (hash(x)=167322800)
573
+ 4750 train 4.279221 (lr=3.1715e-04) (hash(x)=162169095)
574
+ 4760 train 3.984187 (lr=3.1613e-04) (hash(x)=143459469)
575
+ 4770 train 4.055861 (lr=3.1510e-04) (hash(x)=144386593)
576
+ 4780 train 4.026664 (lr=3.1407e-04) (hash(x)=143721184)
577
+ 4790 train 4.094540 (lr=3.1305e-04) (hash(x)=146077222)
578
+ 4800 val loss 4.1973
579
+ 4800 val perplexity 66.5080
580
+ 4800 train 4.074190 (lr=3.1202e-04) (hash(x)=138350044)
581
+ 4810 train 4.074332 (lr=3.1099e-04) (hash(x)=154960112)
582
+ 4820 train 3.985426 (lr=3.0997e-04) (hash(x)=146950685)
583
+ 4830 train 3.986783 (lr=3.0894e-04) (hash(x)=140224554)
584
+ 4840 train 3.984618 (lr=3.0792e-04) (hash(x)=156640495)
585
+ 4850 train 4.193567 (lr=3.0689e-04) (hash(x)=153409150)
586
+ 4860 train 4.110546 (lr=3.0587e-04) (hash(x)=151647700)
587
+ 4870 train 4.507183 (lr=3.0485e-04) (hash(x)=155465603)
588
+ 4880 train 4.350823 (lr=3.0382e-04) (hash(x)=153404400)
589
+ 4890 train 4.237038 (lr=3.0280e-04) (hash(x)=170409281)
590
+ 4900 val loss 4.1829
591
+ 4900 val perplexity 65.5532
592
+ 4900 train 4.104501 (lr=3.0178e-04) (hash(x)=143735284)
593
+ 4910 train 4.239965 (lr=3.0075e-04) (hash(x)=150784329)
594
+ 4920 train 4.126100 (lr=2.9973e-04) (hash(x)=152353574)
595
+ 4930 train 4.079868 (lr=2.9871e-04) (hash(x)=152677008)
596
+ 4940 train 4.157333 (lr=2.9769e-04) (hash(x)=140553861)
597
+ 4950 train 4.247647 (lr=2.9667e-04) (hash(x)=152869181)
598
+ 4960 train 4.198486 (lr=2.9565e-04) (hash(x)=153011079)
599
+ 4970 train 4.151272 (lr=2.9463e-04) (hash(x)=152796617)
600
+ 4980 train 4.263351 (lr=2.9361e-04) (hash(x)=146868417)
601
+ 4990 train 4.140583 (lr=2.9259e-04) (hash(x)=140168188)
602
+ 5000 val loss 4.1718
603
+ 5000 val perplexity 64.8329
604
+ 5000 train 4.124722 (lr=2.9157e-04) (hash(x)=154976463)
605
+ 5010 train 4.190895 (lr=2.9056e-04) (hash(x)=148582690)
606
+ 5020 train 4.192195 (lr=2.8954e-04) (hash(x)=166715464)
607
+ 5030 train 4.154720 (lr=2.8852e-04) (hash(x)=150616397)
608
+ 5040 train 4.201216 (lr=2.8751e-04) (hash(x)=150775548)
609
+ 5050 train 4.184381 (lr=2.8649e-04) (hash(x)=150575367)
610
+ 5060 train 4.107418 (lr=2.8548e-04) (hash(x)=152352082)
611
+ 5070 train 4.150015 (lr=2.8447e-04) (hash(x)=150269272)
612
+ 5080 train 4.204904 (lr=2.8345e-04) (hash(x)=146499361)
613
+ 5090 train 3.890438 (lr=2.8244e-04) (hash(x)=148734454)
614
+ 5100 val loss 4.1734
615
+ 5100 val perplexity 64.9384
616
+ 5100 train 4.077798 (lr=2.8143e-04) (hash(x)=149894982)
617
+ 5110 train 4.130024 (lr=2.8042e-04) (hash(x)=154487751)
618
+ 5120 train 4.074249 (lr=2.7941e-04) (hash(x)=144169582)
619
+ 5130 train 4.052025 (lr=2.7840e-04) (hash(x)=152217630)
620
+ 5140 train 4.084290 (lr=2.7739e-04) (hash(x)=158233831)
621
+ 5150 train 4.140765 (lr=2.7638e-04) (hash(x)=142746426)
622
+ 5160 train 4.132466 (lr=2.7537e-04) (hash(x)=139184458)
623
+ 5170 train 4.219120 (lr=2.7437e-04) (hash(x)=140624857)
624
+ 5180 train 3.941659 (lr=2.7336e-04) (hash(x)=154107423)
625
+ 5190 train 3.882357 (lr=2.7236e-04) (hash(x)=144892610)
626
+ 5200 val loss 4.1729
627
+ 5200 val perplexity 64.9035
628
+ 5200 train 3.836998 (lr=2.7135e-04) (hash(x)=159326689)
629
+ 5210 train 3.999723 (lr=2.7035e-04) (hash(x)=151186017)
630
+ 5220 train 3.814011 (lr=2.6935e-04) (hash(x)=148996819)
631
+ 5230 train 3.761153 (lr=2.6835e-04) (hash(x)=150829917)
632
+ 5240 train 3.874853 (lr=2.6734e-04) (hash(x)=152758972)
633
+ 5250 train 3.841526 (lr=2.6635e-04) (hash(x)=154345903)
634
+ 5260 train 3.839329 (lr=2.6535e-04) (hash(x)=150134947)
635
+ 5270 train 3.933956 (lr=2.6435e-04) (hash(x)=149689310)
636
+ 5280 train 4.160221 (lr=2.6335e-04) (hash(x)=147662223)
637
+ 5290 train 4.229419 (lr=2.6236e-04) (hash(x)=165770608)
638
+ 5300 val loss 4.1613
639
+ 5300 val perplexity 64.1526
640
+ 5300 train 4.209962 (lr=2.6136e-04) (hash(x)=159484800)
641
+ 5310 train 4.250412 (lr=2.6037e-04) (hash(x)=152672536)
642
+ 5320 train 4.067711 (lr=2.5937e-04) (hash(x)=152728812)
643
+ 5330 train 4.014211 (lr=2.5838e-04) (hash(x)=152778685)
644
+ 5340 train 4.104506 (lr=2.5739e-04) (hash(x)=149818849)
645
+ 5350 train 4.043990 (lr=2.5640e-04) (hash(x)=168239739)
646
+ 5360 train 4.186938 (lr=2.5541e-04) (hash(x)=139859423)
647
+ 5370 train 4.012883 (lr=2.5443e-04) (hash(x)=139315327)
648
+ 5380 train 4.216764 (lr=2.5344e-04) (hash(x)=163068509)
649
+ 5390 train 4.173079 (lr=2.5245e-04) (hash(x)=147645415)
650
+ 5400 val loss 4.1419
651
+ 5400 val perplexity 62.9238
652
+ 5400 train 4.131252 (lr=2.5147e-04) (hash(x)=140385615)
653
+ 5410 train 3.990021 (lr=2.5049e-04) (hash(x)=151381862)
654
+ 5420 train 4.146679 (lr=2.4950e-04) (hash(x)=152445092)
655
+ 5430 train 4.156535 (lr=2.4852e-04) (hash(x)=142454479)
656
+ 5440 train 4.111445 (lr=2.4754e-04) (hash(x)=151308771)
657
+ 5450 train 3.961064 (lr=2.4657e-04) (hash(x)=150886070)
658
+ 5460 train 4.065333 (lr=2.4559e-04) (hash(x)=146080408)
659
+ 5470 train 4.092682 (lr=2.4461e-04) (hash(x)=149828857)
660
+ 5480 train 4.064167 (lr=2.4364e-04) (hash(x)=139922034)
661
+ 5490 train 4.013615 (lr=2.4266e-04) (hash(x)=150262276)
662
+ 5500 val loss 4.1429
663
+ 5500 val perplexity 62.9838
664
+ 5500 train 3.983670 (lr=2.4169e-04) (hash(x)=148498335)
665
+ 5510 train 4.095223 (lr=2.4072e-04) (hash(x)=157036224)
666
+ 5520 train 3.978663 (lr=2.3975e-04) (hash(x)=134093409)
667
+ 5530 train 4.008155 (lr=2.3878e-04) (hash(x)=153126250)
668
+ 5540 train 4.013888 (lr=2.3782e-04) (hash(x)=136170985)
669
+ 5550 train 4.001459 (lr=2.3685e-04) (hash(x)=152841201)
670
+ 5560 train 4.100555 (lr=2.3589e-04) (hash(x)=147439014)
671
+ 5570 train 4.017499 (lr=2.3492e-04) (hash(x)=143476132)
672
+ 5580 train 3.789842 (lr=2.3396e-04) (hash(x)=139503437)
673
+ 5590 train 3.834293 (lr=2.3300e-04) (hash(x)=131664955)
674
+ 5600 val loss 4.1513
675
+ 5600 val perplexity 63.5171
676
+ 5600 train 3.930974 (lr=2.3204e-04) (hash(x)=151907614)
677
+ 5610 train 3.818095 (lr=2.3108e-04) (hash(x)=157132674)
678
+ 5620 train 4.014812 (lr=2.3013e-04) (hash(x)=146793775)
679
+ 5630 train 3.964301 (lr=2.2917e-04) (hash(x)=147237910)
680
+ 5640 train 3.873759 (lr=2.2822e-04) (hash(x)=141120024)
681
+ 5650 train 3.853304 (lr=2.2727e-04) (hash(x)=144275645)
682
+ 5660 train 3.882977 (lr=2.2632e-04) (hash(x)=159088002)
683
+ 5670 train 3.939816 (lr=2.2537e-04) (hash(x)=147224479)
684
+ 5680 train 4.283384 (lr=2.2442e-04) (hash(x)=147208743)
685
+ 5690 train 4.087255 (lr=2.2348e-04) (hash(x)=154384701)
686
+ 5700 val loss 4.1429
687
+ 5700 val perplexity 62.9852
688
+ 5700 train 4.122369 (lr=2.2253e-04) (hash(x)=155192267)
689
+ 5710 train 4.061213 (lr=2.2159e-04) (hash(x)=148458753)
690
+ 5720 train 4.166955 (lr=2.2065e-04) (hash(x)=159947204)
691
+ 5730 train 4.161260 (lr=2.1971e-04) (hash(x)=150746366)
692
+ 5740 train 4.148029 (lr=2.1877e-04) (hash(x)=155201884)
693
+ 5750 train 4.150617 (lr=2.1784e-04) (hash(x)=156432008)
694
+ 5760 train 4.132886 (lr=2.1690e-04) (hash(x)=185977746)
695
+ 5770 train 4.132131 (lr=2.1597e-04) (hash(x)=133620000)
696
+ 5780 train 4.140632 (lr=2.1504e-04) (hash(x)=153778044)
697
+ 5790 train 3.990527 (lr=2.1411e-04) (hash(x)=147713822)
698
+ 5800 val loss 4.1225
699
+ 5800 val perplexity 61.7122
700
+ 5800 train 4.144154 (lr=2.1318e-04) (hash(x)=153132158)
701
+ 5810 train 4.095840 (lr=2.1226e-04) (hash(x)=144293895)
702
+ 5820 train 4.077534 (lr=2.1133e-04) (hash(x)=151500590)
703
+ 5830 train 4.025883 (lr=2.1041e-04) (hash(x)=139308707)
704
+ 5840 train 4.143580 (lr=2.0949e-04) (hash(x)=153591278)
705
+ 5850 train 4.122774 (lr=2.0857e-04) (hash(x)=146077037)
706
+ 5860 train 4.084943 (lr=2.0765e-04) (hash(x)=149160733)
707
+ 5870 train 4.069598 (lr=2.0674e-04) (hash(x)=138482476)
708
+ 5880 train 4.084016 (lr=2.0582e-04) (hash(x)=152173137)
709
+ 5890 train 4.054162 (lr=2.0491e-04) (hash(x)=142387057)
710
+ 5900 val loss 4.1294
711
+ 5900 val perplexity 62.1406
712
+ 5900 train 4.130868 (lr=2.0400e-04) (hash(x)=161446764)
713
+ 5910 train 4.006062 (lr=2.0309e-04) (hash(x)=144512138)
714
+ 5920 train 4.065510 (lr=2.0218e-04) (hash(x)=152555618)
715
+ 5930 train 4.024787 (lr=2.0128e-04) (hash(x)=146129367)
716
+ 5940 train 3.964532 (lr=2.0038e-04) (hash(x)=154607527)
717
+ 5950 train 4.052170 (lr=1.9948e-04) (hash(x)=143827143)
718
+ 5960 train 4.181569 (lr=1.9858e-04) (hash(x)=151168510)
719
+ 5970 train 3.971842 (lr=1.9768e-04) (hash(x)=158961574)
720
+ 5980 train 3.800040 (lr=1.9678e-04) (hash(x)=148757787)
721
+ 5990 train 4.304749 (lr=1.9589e-04) (hash(x)=161719494)
722
+ 6000 val loss 4.1289
723
+ 6000 val perplexity 62.1090
724
+ 6000 train 3.803596 (lr=1.9500e-04) (hash(x)=151512446)
725
+ 6010 train 3.928452 (lr=1.9411e-04) (hash(x)=173500735)
726
+ 6020 train 4.054879 (lr=1.9322e-04) (hash(x)=157621031)
727
+ 6030 train 3.941328 (lr=1.9234e-04) (hash(x)=142662684)
728
+ 6040 train 3.926851 (lr=1.9145e-04) (hash(x)=146180940)
729
+ 6050 train 3.845347 (lr=1.9057e-04) (hash(x)=147654412)
730
+ 6060 train 3.725557 (lr=1.8969e-04) (hash(x)=156393157)
731
+ 6070 train 4.119762 (lr=1.8882e-04) (hash(x)=156947300)
732
+ 6080 train 4.114617 (lr=1.8794e-04) (hash(x)=163108626)
733
+ 6090 train 4.212381 (lr=1.8707e-04) (hash(x)=158654713)
734
+ 6100 val loss 4.1114
735
+ 6100 val perplexity 61.0335
736
+ 6100 train 3.941209 (lr=1.8620e-04) (hash(x)=188094053)
737
+ 6110 train 4.104810 (lr=1.8533e-04) (hash(x)=145132600)
738
+ 6120 train 3.973321 (lr=1.8446e-04) (hash(x)=146999707)
739
+ 6130 train 4.207595 (lr=1.8359e-04) (hash(x)=145719530)
740
+ 6140 train 4.146309 (lr=1.8273e-04) (hash(x)=153230722)
741
+ 6150 train 4.078898 (lr=1.8187e-04) (hash(x)=147455934)
742
+ 6160 train 4.128797 (lr=1.8101e-04) (hash(x)=146596242)
743
+ 6170 train 4.223723 (lr=1.8016e-04) (hash(x)=152029855)
744
+ 6180 train 4.086768 (lr=1.7930e-04) (hash(x)=151480034)
745
+ 6190 train 4.021400 (lr=1.7845e-04) (hash(x)=147381047)
746
+ 6200 val loss 4.0979
747
+ 6200 val perplexity 60.2145
748
+ 6200 train 4.008654 (lr=1.7760e-04) (hash(x)=149389789)
749
+ 6210 train 4.056643 (lr=1.7675e-04) (hash(x)=154894138)
750
+ 6220 train 4.092628 (lr=1.7591e-04) (hash(x)=150074302)
751
+ 6230 train 3.964304 (lr=1.7506e-04) (hash(x)=142881794)
752
+ 6240 train 3.946099 (lr=1.7422e-04) (hash(x)=164747461)
753
+ 6250 train 4.111120 (lr=1.7338e-04) (hash(x)=158378292)
754
+ 6260 train 4.119335 (lr=1.7255e-04) (hash(x)=147675938)
755
+ 6270 train 4.062585 (lr=1.7171e-04) (hash(x)=148197228)
756
+ 6280 train 4.023065 (lr=1.7088e-04) (hash(x)=135296747)
757
+ 6290 train 4.102140 (lr=1.7005e-04) (hash(x)=156439346)
758
+ 6300 val loss 4.0971
759
+ 6300 val perplexity 60.1641
760
+ 6300 train 3.917470 (lr=1.6923e-04) (hash(x)=138212820)
761
+ 6310 train 4.217722 (lr=1.6840e-04) (hash(x)=165985336)
762
+ 6320 train 4.081685 (lr=1.6758e-04) (hash(x)=145921897)
763
+ 6330 train 4.295026 (lr=1.6676e-04) (hash(x)=145502689)
764
+ 6340 train 4.025110 (lr=1.6594e-04) (hash(x)=141704937)
765
+ 6350 train 4.083145 (lr=1.6513e-04) (hash(x)=161789400)
766
+ 6360 train 3.802051 (lr=1.6431e-04) (hash(x)=145457780)
767
+ 6370 train 4.056484 (lr=1.6350e-04) (hash(x)=171628530)
768
+ 6380 train 3.849670 (lr=1.6269e-04) (hash(x)=153467923)
769
+ 6390 train 4.222523 (lr=1.6189e-04) (hash(x)=158066904)
770
+ 6400 val loss 4.1026
771
+ 6400 val perplexity 60.4967
772
+ 6400 train 3.873353 (lr=1.6108e-04) (hash(x)=146535423)
773
+ 6410 train 3.737594 (lr=1.6028e-04) (hash(x)=147264155)
774
+ 6420 train 3.840661 (lr=1.5948e-04) (hash(x)=145595485)
775
+ 6430 train 3.986471 (lr=1.5869e-04) (hash(x)=146929074)
776
+ 6440 train 3.911173 (lr=1.5790e-04) (hash(x)=147290878)
777
+ 6450 train 4.010109 (lr=1.5710e-04) (hash(x)=141154205)
778
+ 6460 train 3.855596 (lr=1.5632e-04) (hash(x)=154360901)
779
+ 6470 train 4.191347 (lr=1.5553e-04) (hash(x)=146350558)
780
+ 6480 train 4.154114 (lr=1.5475e-04) (hash(x)=153927683)
781
+ 6490 train 4.065804 (lr=1.5397e-04) (hash(x)=149337047)
782
+ 6500 val loss 4.0905
783
+ 6500 val perplexity 59.7677
784
+ 6500 train 3.990298 (lr=1.5319e-04) (hash(x)=145950843)
785
+ 6510 train 4.055512 (lr=1.5241e-04) (hash(x)=142577653)
786
+ 6520 train 4.114680 (lr=1.5164e-04) (hash(x)=165060689)
787
+ 6530 train 3.950322 (lr=1.5087e-04) (hash(x)=148367505)
788
+ 6540 train 4.050065 (lr=1.5010e-04) (hash(x)=154313651)
789
+ 6550 train 3.966772 (lr=1.4933e-04) (hash(x)=147477315)
790
+ 6560 train 4.057713 (lr=1.4857e-04) (hash(x)=139825403)
791
+ 6570 train 4.073455 (lr=1.4781e-04) (hash(x)=141482518)
792
+ 6580 train 4.111005 (lr=1.4705e-04) (hash(x)=144968144)
793
+ 6590 train 4.052938 (lr=1.4630e-04) (hash(x)=154319720)
794
+ 6600 val loss 4.0872
795
+ 6600 val perplexity 59.5708
796
+ 6600 train 4.098924 (lr=1.4555e-04) (hash(x)=141162902)
797
+ 6610 train 4.064279 (lr=1.4480e-04) (hash(x)=159490449)
798
+ 6620 train 3.944865 (lr=1.4405e-04) (hash(x)=151189772)
799
+ 6630 train 3.929213 (lr=1.4331e-04) (hash(x)=148451651)
800
+ 6640 train 4.048608 (lr=1.4257e-04) (hash(x)=152815550)
801
+ 6650 train 3.899125 (lr=1.4183e-04) (hash(x)=144316094)
802
+ 6660 train 4.218923 (lr=1.4109e-04) (hash(x)=143951240)
803
+ 6670 train 3.841168 (lr=1.4036e-04) (hash(x)=143203472)
804
+ 6680 train 3.883866 (lr=1.3963e-04) (hash(x)=132063806)
805
+ 6690 train 3.975191 (lr=1.3890e-04) (hash(x)=140739750)
806
+ 6700 val loss 4.0906
807
+ 6700 val perplexity 59.7781
808
+ 6700 train 4.037490 (lr=1.3817e-04) (hash(x)=153018737)
809
+ 6710 train 3.898584 (lr=1.3745e-04) (hash(x)=140107197)
810
+ 6720 train 4.004174 (lr=1.3673e-04) (hash(x)=144396559)
811
+ 6730 train 3.850376 (lr=1.3602e-04) (hash(x)=148038991)
812
+ 6740 train 4.063259 (lr=1.3530e-04) (hash(x)=145504746)
813
+ 6750 train 4.052204 (lr=1.3459e-04) (hash(x)=119579094)
814
+ 6760 train 4.065996 (lr=1.3388e-04) (hash(x)=161725430)
815
+ 6770 train 4.082900 (lr=1.3318e-04) (hash(x)=153646966)
816
+ 6780 train 4.088391 (lr=1.3248e-04) (hash(x)=153838463)
817
+ 6790 train 4.215812 (lr=1.3178e-04) (hash(x)=157891196)
818
+ 6800 val loss 4.0746
819
+ 6800 val perplexity 58.8259
820
+ 6800 train 3.928339 (lr=1.3108e-04) (hash(x)=155640155)
821
+ 6810 train 4.126523 (lr=1.3039e-04) (hash(x)=154913809)
822
+ 6820 train 4.039434 (lr=1.2970e-04) (hash(x)=159412493)
823
+ 6830 train 3.998991 (lr=1.2901e-04) (hash(x)=144756549)
824
+ 6840 train 4.210915 (lr=1.2832e-04) (hash(x)=143824638)
825
+ 6850 train 4.539484 (lr=1.2764e-04) (hash(x)=117180982)
826
+ 6860 train 4.055289 (lr=1.2696e-04) (hash(x)=141887150)
827
+ 6870 train 4.125406 (lr=1.2628e-04) (hash(x)=140094872)
828
+ 6880 train 3.961360 (lr=1.2561e-04) (hash(x)=141479953)
829
+ 6890 train 4.027465 (lr=1.2494e-04) (hash(x)=153166840)
830
+ 6900 val loss 4.0723
831
+ 6900 val perplexity 58.6907
832
+ 6900 train 4.051608 (lr=1.2427e-04) (hash(x)=153722115)
833
+ 6910 train 4.156342 (lr=1.2361e-04) (hash(x)=149578602)
834
+ 6920 train 4.013233 (lr=1.2295e-04) (hash(x)=149925108)
835
+ 6930 train 3.959399 (lr=1.2229e-04) (hash(x)=148589489)
836
+ 6940 train 3.865299 (lr=1.2163e-04) (hash(x)=144243794)
837
+ 6950 train 3.951268 (lr=1.2098e-04) (hash(x)=157743480)
838
+ 6960 train 3.939175 (lr=1.2033e-04) (hash(x)=139858247)
839
+ 6970 train 3.865141 (lr=1.1969e-04) (hash(x)=148727536)
840
+ 6980 train 3.978397 (lr=1.1904e-04) (hash(x)=153382595)
841
+ 6990 train 4.121367 (lr=1.1840e-04) (hash(x)=147529919)
842
+ 7000 val loss 4.0668
843
+ 7000 val perplexity 58.3719
844
+ 7000 train 4.085890 (lr=1.1777e-04) (hash(x)=146953450)
845
+ 7010 train 3.999857 (lr=1.1713e-04) (hash(x)=134561015)
846
+ 7020 train 4.108835 (lr=1.1650e-04) (hash(x)=148433515)
847
+ 7030 train 3.959126 (lr=1.1587e-04) (hash(x)=140394326)
848
+ 7040 train 3.971859 (lr=1.1525e-04) (hash(x)=136740763)
849
+ 7050 train 3.811120 (lr=1.1463e-04) (hash(x)=148637938)
850
+ 7060 train 3.966332 (lr=1.1401e-04) (hash(x)=147262147)
851
+ 7070 train 3.942297 (lr=1.1339e-04) (hash(x)=140191039)
852
+ 7080 train 3.959689 (lr=1.1278e-04) (hash(x)=148223305)
853
+ 7090 train 3.878488 (lr=1.1217e-04) (hash(x)=139984988)
854
+ 7100 val loss 4.0709
855
+ 7100 val perplexity 58.6080
856
+ 7100 train 3.991115 (lr=1.1157e-04) (hash(x)=137663885)
857
+ 7110 train 3.870194 (lr=1.1096e-04) (hash(x)=150955021)
858
+ 7120 train 3.781229 (lr=1.1036e-04) (hash(x)=148585682)
859
+ 7130 train 3.980319 (lr=1.0977e-04) (hash(x)=146055548)
860
+ 7140 train 4.023724 (lr=1.0917e-04) (hash(x)=155723541)
861
+ 7150 train 4.093482 (lr=1.0858e-04) (hash(x)=140465834)
862
+ 7160 train 4.069212 (lr=1.0800e-04) (hash(x)=163246922)
863
+ 7170 train 3.923124 (lr=1.0741e-04) (hash(x)=150754166)
864
+ 7180 train 4.459840 (lr=1.0683e-04) (hash(x)=142844151)
865
+ 7190 train 4.051715 (lr=1.0626e-04) (hash(x)=145167470)
866
+ 7200 val loss 4.0558
867
+ 7200 val perplexity 57.7315
868
+ 7200 train 4.245859 (lr=1.0568e-04) (hash(x)=146172950)
869
+ 7210 train 4.069397 (lr=1.0511e-04) (hash(x)=143155287)
870
+ 7220 train 4.012595 (lr=1.0454e-04) (hash(x)=151976669)
871
+ 7230 train 4.131927 (lr=1.0398e-04) (hash(x)=145979940)
872
+ 7240 train 4.099851 (lr=1.0342e-04) (hash(x)=155656034)
873
+ 7250 train 4.103560 (lr=1.0286e-04) (hash(x)=135610136)
874
+ 7260 train 4.178649 (lr=1.0231e-04) (hash(x)=148133023)
875
+ 7270 train 4.054573 (lr=1.0176e-04) (hash(x)=148006681)
876
+ 7280 train 4.087193 (lr=1.0121e-04) (hash(x)=151397627)
877
+ 7290 train 4.051458 (lr=1.0066e-04) (hash(x)=141696871)
878
+ 7300 val loss 4.0521
879
+ 7300 val perplexity 57.5208
880
+ 7300 train 3.987123 (lr=1.0012e-04) (hash(x)=150018163)
881
+ 7310 train 4.026327 (lr=9.9586e-05) (hash(x)=154900050)
882
+ 7320 train 3.923479 (lr=9.9052e-05) (hash(x)=142608283)
883
+ 7330 train 4.023843 (lr=9.8521e-05) (hash(x)=148977837)
884
+ 7340 train 4.039836 (lr=9.7993e-05) (hash(x)=141676907)
885
+ 7350 train 3.929224 (lr=9.7469e-05) (hash(x)=147846997)
886
+ 7360 train 4.172251 (lr=9.6948e-05) (hash(x)=143252181)
887
+ 7370 train 3.990706 (lr=9.6431e-05) (hash(x)=155081717)
888
+ 7380 train 4.076017 (lr=9.5917e-05) (hash(x)=149678451)
889
+ 7390 train 3.903874 (lr=9.5406e-05) (hash(x)=159341637)
890
+ 7400 val loss 4.0528
891
+ 7400 val perplexity 57.5576
892
+ 7400 train 4.190771 (lr=9.4899e-05) (hash(x)=145351166)
893
+ 7410 train 4.010730 (lr=9.4395e-05) (hash(x)=162274009)
894
+ 7420 train 3.855141 (lr=9.3894e-05) (hash(x)=143254858)
895
+ 7430 train 3.970327 (lr=9.3397e-05) (hash(x)=143049884)
896
+ 7440 train 3.891670 (lr=9.2904e-05) (hash(x)=141768538)
897
+ 7450 train 3.909357 (lr=9.2413e-05) (hash(x)=139895310)
898
+ 7460 train 3.787262 (lr=9.1927e-05) (hash(x)=155900365)
899
+ 7470 train 3.772058 (lr=9.1443e-05) (hash(x)=145048215)
900
+ 7480 train 3.908541 (lr=9.0964e-05) (hash(x)=136992624)
901
+ 7490 train 4.018570 (lr=9.0487e-05) (hash(x)=141424789)
902
+ 7500 val loss 4.0561
903
+ 7500 val perplexity 57.7496
904
+ 7500 train 3.868097 (lr=9.0014e-05) (hash(x)=145292116)
905
+ 7510 train 3.908433 (lr=8.9545e-05) (hash(x)=144818029)
906
+ 7520 train 4.077182 (lr=8.9079e-05) (hash(x)=152903515)
907
+ 7530 train 4.084540 (lr=8.8617e-05) (hash(x)=156934724)
908
+ 7540 train 4.309597 (lr=8.8158e-05) (hash(x)=156695926)
909
+ 7550 train 4.071164 (lr=8.7702e-05) (hash(x)=149115609)
910
+ 7560 train 4.053979 (lr=8.7251e-05) (hash(x)=143974224)
911
+ 7570 train 4.006804 (lr=8.6802e-05) (hash(x)=148204424)
912
+ 7580 train 4.087867 (lr=8.6357e-05) (hash(x)=150177910)
913
+ 7590 train 4.101072 (lr=8.5916e-05) (hash(x)=140325629)
914
+ 7600 val loss 4.0446
915
+ 7600 val perplexity 57.0909
916
+ 7600 train 3.985204 (lr=8.5478e-05) (hash(x)=150235132)
917
+ 7610 train 4.110114 (lr=8.5044e-05) (hash(x)=157594837)
918
+ 7620 train 4.119292 (lr=8.4613e-05) (hash(x)=146696749)
919
+ 7630 train 4.158076 (lr=8.4186e-05) (hash(x)=148463580)
920
+ 7640 train 3.973668 (lr=8.3763e-05) (hash(x)=140271046)
921
+ 7650 train 4.038039 (lr=8.3343e-05) (hash(x)=159952319)
922
+ 7660 train 4.034402 (lr=8.2926e-05) (hash(x)=146307392)
923
+ 7670 train 3.989335 (lr=8.2514e-05) (hash(x)=145027988)
924
+ 7680 train 4.054815 (lr=8.2104e-05) (hash(x)=147729698)
925
+ 7690 train 4.038677 (lr=8.1699e-05) (hash(x)=143401210)
926
+ 7700 val loss 4.0396
927
+ 7700 val perplexity 56.8057
928
+ 7700 train 3.969559 (lr=8.1297e-05) (hash(x)=154543455)
929
+ 7710 train 4.043605 (lr=8.0898e-05) (hash(x)=151600456)
930
+ 7720 train 3.959592 (lr=8.0503e-05) (hash(x)=165913803)
931
+ 7730 train 3.830664 (lr=8.0112e-05) (hash(x)=144942059)
932
+ 7740 train 4.057778 (lr=7.9725e-05) (hash(x)=150302103)
933
+ 7750 train 4.001315 (lr=7.9341e-05) (hash(x)=155782458)
934
+ 7760 train 4.029814 (lr=7.8960e-05) (hash(x)=147824384)
935
+ 7770 train 4.042323 (lr=7.8584e-05) (hash(x)=149177657)
936
+ 7780 train 4.068219 (lr=7.8211e-05) (hash(x)=164446738)
937
+ 7790 train 3.872403 (lr=7.7841e-05) (hash(x)=146417077)
938
+ 7800 val loss 4.0418
939
+ 7800 val perplexity 56.9286
940
+ 7800 train 4.019204 (lr=7.7476e-05) (hash(x)=142456852)
941
+ 7810 train 3.718816 (lr=7.7114e-05) (hash(x)=146221807)
942
+ 7820 train 3.913092 (lr=7.6755e-05) (hash(x)=154136533)
943
+ 7830 train 3.827279 (lr=7.6400e-05) (hash(x)=147492735)
944
+ 7840 train 3.978069 (lr=7.6049e-05) (hash(x)=143481291)
945
+ 7850 train 3.808123 (lr=7.5702e-05) (hash(x)=136749307)
946
+ 7860 train 3.832468 (lr=7.5358e-05) (hash(x)=128985347)
947
+ 7870 train 3.735158 (lr=7.5018e-05) (hash(x)=152897363)
948
+ 7880 train 3.940140 (lr=7.4682e-05) (hash(x)=151460360)
949
+ 7890 train 3.758469 (lr=7.4350e-05) (hash(x)=137481738)
950
+ 7900 val loss 4.0428
951
+ 7900 val perplexity 56.9839
952
+ 7900 train 3.836514 (lr=7.4021e-05) (hash(x)=147363479)
953
+ 7910 train 4.058046 (lr=7.3696e-05) (hash(x)=151165744)
954
+ 7920 train 4.230208 (lr=7.3374e-05) (hash(x)=146214226)
955
+ 7930 train 4.041663 (lr=7.3056e-05) (hash(x)=154457502)
956
+ 7940 train 3.949116 (lr=7.2742e-05) (hash(x)=162025201)
957
+ 7950 train 3.940442 (lr=7.2432e-05) (hash(x)=156488145)
958
+ 7960 train 4.217037 (lr=7.2126e-05) (hash(x)=154011584)
959
+ 7970 train 4.064871 (lr=7.1823e-05) (hash(x)=145330320)
960
+ 7980 train 3.970730 (lr=7.1524e-05) (hash(x)=148216312)
961
+ 7990 train 3.905138 (lr=7.1228e-05) (hash(x)=156819091)
962
+ 8000 val loss 4.0332
963
+ 8000 val perplexity 56.4407
964
+ 8000 train 4.066167 (lr=7.0937e-05) (hash(x)=156122973)
965
+ 8010 train 4.008729 (lr=7.0649e-05) (hash(x)=146933088)
966
+ 8020 train 4.105101 (lr=7.0365e-05) (hash(x)=151650746)
967
+ 8030 train 4.029701 (lr=7.0085e-05) (hash(x)=140692373)
968
+ 8040 train 4.217567 (lr=6.9808e-05) (hash(x)=147850094)
969
+ 8050 train 4.041002 (lr=6.9536e-05) (hash(x)=144454109)
970
+ 8060 train 3.900548 (lr=6.9267e-05) (hash(x)=152205124)
971
+ 8070 train 4.119748 (lr=6.9002e-05) (hash(x)=148918066)
972
+ 8080 train 4.046878 (lr=6.8740e-05) (hash(x)=146481024)
973
+ 8090 train 4.087018 (lr=6.8483e-05) (hash(x)=168785549)
974
+ 8100 val loss 4.0309
975
+ 8100 val perplexity 56.3093
976
+ 8100 train 4.111370 (lr=6.8229e-05) (hash(x)=156153179)
977
+ 8110 train 4.061481 (lr=6.7979e-05) (hash(x)=150498601)
978
+ 8120 train 4.070875 (lr=6.7733e-05) (hash(x)=142002714)
979
+ 8130 train 3.953250 (lr=6.7490e-05) (hash(x)=138450483)
980
+ 8140 train 4.037171 (lr=6.7252e-05) (hash(x)=145306351)
981
+ 8150 train 3.917471 (lr=6.7017e-05) (hash(x)=172502865)
982
+ 8160 train 4.017513 (lr=6.6786e-05) (hash(x)=156689649)
983
+ 8170 train 3.935237 (lr=6.6559e-05) (hash(x)=143766932)
984
+ 8180 train 4.074619 (lr=6.6335e-05) (hash(x)=150361321)
985
+ 8190 train 3.908869 (lr=6.6116e-05) (hash(x)=152664241)
986
+ 8200 val loss 4.0305
987
+ 8200 val perplexity 56.2880
988
+ 8200 train 4.183567 (lr=6.5900e-05) (hash(x)=146430698)
989
+ 8210 train 3.867009 (lr=6.5688e-05) (hash(x)=167362078)
990
+ 8220 train 4.093256 (lr=6.5480e-05) (hash(x)=157650942)
991
+ 8230 train 3.901181 (lr=6.5276e-05) (hash(x)=142102044)
992
+ 8240 train 3.941739 (lr=6.5076e-05) (hash(x)=153405900)
993
+ 8250 train 3.893206 (lr=6.4879e-05) (hash(x)=140222909)
994
+ 8260 train 4.024216 (lr=6.4687e-05) (hash(x)=137981854)
995
+ 8270 train 3.940482 (lr=6.4498e-05) (hash(x)=147576272)
996
+ 8280 train 3.954591 (lr=6.4313e-05) (hash(x)=157988195)
997
+ 8290 train 4.008123 (lr=6.4132e-05) (hash(x)=145715681)
998
+ 8300 val loss 4.0318
999
+ 8300 val perplexity 56.3649
1000
+ 8300 train 3.955391 (lr=6.3954e-05) (hash(x)=143507257)
1001
+ 8310 train 4.012368 (lr=6.3781e-05) (hash(x)=148930064)
1002
+ 8320 train 3.940289 (lr=6.3612e-05) (hash(x)=144134805)
1003
+ 8330 train 4.057148 (lr=6.3446e-05) (hash(x)=148418079)
1004
+ 8340 train 3.786915 (lr=6.3284e-05) (hash(x)=166518807)
1005
+ 8350 train 4.088065 (lr=6.3126e-05) (hash(x)=140042036)
1006
+ 8360 train 3.959472 (lr=6.2972e-05) (hash(x)=151427074)
1007
+ 8370 train 4.044179 (lr=6.2822e-05) (hash(x)=158389185)
1008
+ 8380 train 3.948700 (lr=6.2676e-05) (hash(x)=148193308)
1009
+ 8390 train 4.018093 (lr=6.2533e-05) (hash(x)=144289008)
1010
+ 8400 val loss 4.0231
1011
+ 8400 val perplexity 55.8756
1012
+ 8400 train 3.730502 (lr=6.2395e-05) (hash(x)=166272643)
1013
+ 8410 train 3.945931 (lr=6.2260e-05) (hash(x)=162244433)
1014
+ 8420 train 3.980301 (lr=6.2129e-05) (hash(x)=149271299)
1015
+ 8430 train 4.122830 (lr=6.2002e-05) (hash(x)=154135338)
1016
+ 8440 train 4.166543 (lr=6.1879e-05) (hash(x)=149661838)
1017
+ 8450 train 4.001691 (lr=6.1760e-05) (hash(x)=142205056)
1018
+ 8460 train 3.936829 (lr=6.1645e-05) (hash(x)=166652398)
1019
+ 8470 train 4.108621 (lr=6.1533e-05) (hash(x)=149029710)
1020
+ 8480 train 3.878456 (lr=6.1426e-05) (hash(x)=154217415)
1021
+ 8490 train 3.956630 (lr=6.1322e-05) (hash(x)=151526557)
1022
+ 8500 val loss 4.0245
1023
+ 8500 val perplexity 55.9505
1024
+ 8500 train 3.953250 (lr=6.1223e-05) (hash(x)=143887848)
1025
+ 8510 train 3.933973 (lr=6.1127e-05) (hash(x)=152324645)
1026
+ 8520 train 3.669834 (lr=6.1035e-05) (hash(x)=145124959)
1027
+ 8530 train 3.988543 (lr=6.0947e-05) (hash(x)=186319340)
1028
+ 8540 train 3.998351 (lr=6.0863e-05) (hash(x)=147388339)
1029
+ 8550 train 4.030067 (lr=6.0783e-05) (hash(x)=150118928)
1030
+ 8560 train 4.035618 (lr=6.0706e-05) (hash(x)=153985286)
1031
+ 8570 train 3.982238 (lr=6.0634e-05) (hash(x)=149582799)
1032
+ 8580 train 3.983272 (lr=6.0566e-05) (hash(x)=156867870)
1033
+ 8590 train 3.867692 (lr=6.0501e-05) (hash(x)=148773298)
1034
+ 8600 val loss 4.0264
1035
+ 8600 val perplexity 56.0602
1036
+ 8600 train 3.975746 (lr=6.0440e-05) (hash(x)=156900341)
1037
+ 8610 train 4.127031 (lr=6.0384e-05) (hash(x)=156790089)
1038
+ 8620 train 3.927780 (lr=6.0331e-05) (hash(x)=154680805)
1039
+ 8630 train 3.971957 (lr=6.0282e-05) (hash(x)=138107873)
1040
+ 8640 train 4.058555 (lr=6.0237e-05) (hash(x)=142554243)
1041
+ 8650 train 3.861350 (lr=6.0196e-05) (hash(x)=134360956)
1042
+ 8660 train 3.940917 (lr=6.0159e-05) (hash(x)=128599610)
1043
+ 8670 train 3.970823 (lr=6.0125e-05) (hash(x)=159628945)
1044
+ 8680 train 3.998497 (lr=6.0096e-05) (hash(x)=143161532)
1045
+ 8690 train 4.033208 (lr=6.0070e-05) (hash(x)=140104912)
1046
+ 8700 val loss 4.0187
1047
+ 8700 val perplexity 55.6291
1048
+ 8700 train 4.209576 (lr=6.0049e-05) (hash(x)=146417632)
1049
+ 8710 train 4.181637 (lr=6.0031e-05) (hash(x)=153671695)
1050
+ 8720 train 4.049231 (lr=6.0018e-05) (hash(x)=141680750)
1051
+ 8730 train 3.930593 (lr=6.0008e-05) (hash(x)=147279231)
1052
+ 8740 train 4.216293 (lr=6.0002e-05) (hash(x)=129729371)
1053
+ 8749 val loss 4.0193
1054
+ 8749 val perplexity 55.6630
lr6e-4_total_batch_size61440_baseline_seed1339/model_08749.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ceb2613f6f378609f0a5aaa260de28c00cb0c688169a9a2036966df5a5dd8b82
3
+ size 92843394
lr6e-4_total_batch_size61440_baseline_seed1339/optimizer_08749.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9789733031cde5e1b8a9b52a97f37623631b6b70ac5db1debe0274e14ae8edd7
3
+ size 179406214