andrew-healey commited on
Commit
2399e01
·
verified ·
1 Parent(s): eac01f3

Upload folder using huggingface_hub

Browse files
lr6e-4_total_batch_size61440_baseline_seed1341/args.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_11/lr6e-4_total_batch_size61440_baseline_seed1341", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_11", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1341, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.0006, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "6e-4_61440", "n_embd": 256}
lr6e-4_total_batch_size61440_baseline_seed1341/dataloader_08749.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:953385078aa3787b69fc6857dfd48b0a2cd2f4d27c6f8892e01211aca53d07f5
3
+ size 964
lr6e-4_total_batch_size61440_baseline_seed1341/log2.txt ADDED
@@ -0,0 +1,1054 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ max_steps: 8750
2
+ 0 val loss 11.3073
3
+ 0 val perplexity 81417.7266
4
+ 0 train 11.312937 (lr=1.2000e-06) (hash(x)=145079536)
5
+ 10 train 10.368252 (lr=1.3200e-05) (hash(x)=142935521)
6
+ 20 train 9.903193 (lr=2.5200e-05) (hash(x)=136726044)
7
+ 30 train 9.625055 (lr=3.7200e-05) (hash(x)=149725277)
8
+ 40 train 9.245327 (lr=4.9200e-05) (hash(x)=137527851)
9
+ 50 train 8.813496 (lr=6.1200e-05) (hash(x)=154745873)
10
+ 60 train 8.451530 (lr=7.3200e-05) (hash(x)=158449312)
11
+ 70 train 8.049711 (lr=8.5200e-05) (hash(x)=158867878)
12
+ 80 train 7.775861 (lr=9.7200e-05) (hash(x)=152693855)
13
+ 90 train 7.624683 (lr=1.0920e-04) (hash(x)=152089620)
14
+ 100 val loss 7.5940
15
+ 100 val perplexity 1986.1552
16
+ 100 train 7.712552 (lr=1.2120e-04) (hash(x)=155800595)
17
+ 110 train 7.585830 (lr=1.3320e-04) (hash(x)=156324852)
18
+ 120 train 7.370051 (lr=1.4520e-04) (hash(x)=148079157)
19
+ 130 train 7.419324 (lr=1.5720e-04) (hash(x)=151260070)
20
+ 140 train 7.308308 (lr=1.6920e-04) (hash(x)=161098412)
21
+ 150 train 7.215111 (lr=1.8120e-04) (hash(x)=148595389)
22
+ 160 train 7.113561 (lr=1.9320e-04) (hash(x)=162558230)
23
+ 170 train 6.841525 (lr=2.0520e-04) (hash(x)=136967959)
24
+ 180 train 7.058576 (lr=2.1720e-04) (hash(x)=160279390)
25
+ 190 train 6.758896 (lr=2.2920e-04) (hash(x)=139072589)
26
+ 200 val loss 6.8114
27
+ 200 val perplexity 908.1339
28
+ 200 train 6.630161 (lr=2.4120e-04) (hash(x)=145606733)
29
+ 210 train 6.715324 (lr=2.5320e-04) (hash(x)=151385952)
30
+ 220 train 6.557688 (lr=2.6520e-04) (hash(x)=141556767)
31
+ 230 train 6.759032 (lr=2.7720e-04) (hash(x)=159727511)
32
+ 240 train 6.365868 (lr=2.8920e-04) (hash(x)=145935602)
33
+ 250 train 6.389328 (lr=3.0120e-04) (hash(x)=138221231)
34
+ 260 train 6.427613 (lr=3.1320e-04) (hash(x)=148897684)
35
+ 270 train 6.943334 (lr=3.2520e-04) (hash(x)=155063217)
36
+ 280 train 6.274597 (lr=3.3720e-04) (hash(x)=147263890)
37
+ 290 train 6.319882 (lr=3.4920e-04) (hash(x)=151660516)
38
+ 300 val loss 6.3536
39
+ 300 val perplexity 574.5654
40
+ 300 train 6.283481 (lr=3.6120e-04) (hash(x)=150367139)
41
+ 310 train 6.020897 (lr=3.7320e-04) (hash(x)=151170832)
42
+ 320 train 6.205254 (lr=3.8520e-04) (hash(x)=138591183)
43
+ 330 train 5.936298 (lr=3.9720e-04) (hash(x)=143603420)
44
+ 340 train 6.074450 (lr=4.0920e-04) (hash(x)=140068134)
45
+ 350 train 6.120420 (lr=4.2120e-04) (hash(x)=155579314)
46
+ 360 train 5.943379 (lr=4.3320e-04) (hash(x)=148496629)
47
+ 370 train 6.032781 (lr=4.4520e-04) (hash(x)=156763563)
48
+ 380 train 5.906313 (lr=4.5720e-04) (hash(x)=160905666)
49
+ 390 train 6.287946 (lr=4.6920e-04) (hash(x)=157064388)
50
+ 400 val loss 6.0206
51
+ 400 val perplexity 411.8191
52
+ 400 train 6.307537 (lr=4.8120e-04) (hash(x)=155747374)
53
+ 410 train 5.967244 (lr=4.9320e-04) (hash(x)=147294192)
54
+ 420 train 5.991858 (lr=5.0520e-04) (hash(x)=144952473)
55
+ 430 train 5.933919 (lr=5.1720e-04) (hash(x)=160908725)
56
+ 440 train 5.977492 (lr=5.2920e-04) (hash(x)=145105827)
57
+ 450 train 6.092792 (lr=5.4120e-04) (hash(x)=159334575)
58
+ 460 train 5.872944 (lr=5.5320e-04) (hash(x)=161983315)
59
+ 470 train 5.769945 (lr=5.6520e-04) (hash(x)=138900475)
60
+ 480 train 5.778133 (lr=5.7720e-04) (hash(x)=152981338)
61
+ 490 train 5.722609 (lr=5.8920e-04) (hash(x)=143353862)
62
+ 500 val loss 5.7599
63
+ 500 val perplexity 317.3160
64
+ 500 train 5.634831 (lr=6.0000e-04) (hash(x)=140604760)
65
+ 510 train 5.554125 (lr=6.0000e-04) (hash(x)=150184398)
66
+ 520 train 5.690992 (lr=5.9999e-04) (hash(x)=141863041)
67
+ 530 train 5.695804 (lr=5.9998e-04) (hash(x)=153148776)
68
+ 540 train 5.605615 (lr=5.9997e-04) (hash(x)=144196873)
69
+ 550 train 5.616800 (lr=5.9995e-04) (hash(x)=146208052)
70
+ 560 train 5.552891 (lr=5.9993e-04) (hash(x)=139601869)
71
+ 570 train 5.671474 (lr=5.9990e-04) (hash(x)=150976466)
72
+ 580 train 5.617141 (lr=5.9987e-04) (hash(x)=150801563)
73
+ 590 train 5.752127 (lr=5.9984e-04) (hash(x)=167531956)
74
+ 600 val loss 5.5874
75
+ 600 val perplexity 267.0494
76
+ 600 train 5.480707 (lr=5.9980e-04) (hash(x)=148404734)
77
+ 610 train 5.441929 (lr=5.9976e-04) (hash(x)=146223362)
78
+ 620 train 5.410588 (lr=5.9972e-04) (hash(x)=147269386)
79
+ 630 train 5.355142 (lr=5.9967e-04) (hash(x)=149819251)
80
+ 640 train 5.675205 (lr=5.9962e-04) (hash(x)=146771854)
81
+ 650 train 5.610947 (lr=5.9956e-04) (hash(x)=155681970)
82
+ 660 train 5.329986 (lr=5.9950e-04) (hash(x)=138901204)
83
+ 670 train 5.169038 (lr=5.9943e-04) (hash(x)=143417964)
84
+ 680 train 5.311356 (lr=5.9937e-04) (hash(x)=141188344)
85
+ 690 train 5.435308 (lr=5.9929e-04) (hash(x)=148101044)
86
+ 700 val loss 5.4478
87
+ 700 val perplexity 232.2390
88
+ 700 train 5.447738 (lr=5.9922e-04) (hash(x)=148115934)
89
+ 710 train 5.489768 (lr=5.9914e-04) (hash(x)=142019907)
90
+ 720 train 5.410671 (lr=5.9905e-04) (hash(x)=144138833)
91
+ 730 train 5.484633 (lr=5.9897e-04) (hash(x)=150694253)
92
+ 740 train 5.314012 (lr=5.9887e-04) (hash(x)=157694921)
93
+ 750 train 5.407532 (lr=5.9878e-04) (hash(x)=157074034)
94
+ 760 train 5.332727 (lr=5.9868e-04) (hash(x)=157110692)
95
+ 770 train 5.248004 (lr=5.9857e-04) (hash(x)=145925624)
96
+ 780 train 5.363815 (lr=5.9847e-04) (hash(x)=156687510)
97
+ 790 train 5.321131 (lr=5.9836e-04) (hash(x)=150458321)
98
+ 800 val loss 5.2872
99
+ 800 val perplexity 197.7977
100
+ 800 train 5.190647 (lr=5.9824e-04) (hash(x)=137464699)
101
+ 810 train 5.217458 (lr=5.9812e-04) (hash(x)=158371555)
102
+ 820 train 5.218845 (lr=5.9800e-04) (hash(x)=149201877)
103
+ 830 train 5.168445 (lr=5.9787e-04) (hash(x)=143564160)
104
+ 840 train 5.298893 (lr=5.9774e-04) (hash(x)=153937595)
105
+ 850 train 5.205542 (lr=5.9761e-04) (hash(x)=166955614)
106
+ 860 train 5.198522 (lr=5.9747e-04) (hash(x)=164264614)
107
+ 870 train 5.056293 (lr=5.9732e-04) (hash(x)=139219447)
108
+ 880 train 5.254245 (lr=5.9718e-04) (hash(x)=151139580)
109
+ 890 train 5.119195 (lr=5.9703e-04) (hash(x)=147154297)
110
+ 900 val loss 5.1865
111
+ 900 val perplexity 178.8493
112
+ 900 train 4.982620 (lr=5.9687e-04) (hash(x)=143886042)
113
+ 910 train 4.931602 (lr=5.9672e-04) (hash(x)=144136925)
114
+ 920 train 5.106339 (lr=5.9655e-04) (hash(x)=139764865)
115
+ 930 train 5.073709 (lr=5.9639e-04) (hash(x)=137258208)
116
+ 940 train 5.146003 (lr=5.9622e-04) (hash(x)=151881247)
117
+ 950 train 5.100431 (lr=5.9605e-04) (hash(x)=193610391)
118
+ 960 train 5.032833 (lr=5.9587e-04) (hash(x)=149364435)
119
+ 970 train 5.070024 (lr=5.9569e-04) (hash(x)=148828909)
120
+ 980 train 4.851474 (lr=5.9550e-04) (hash(x)=143605331)
121
+ 990 train 4.866031 (lr=5.9531e-04) (hash(x)=153415979)
122
+ 1000 val loss 5.1044
123
+ 1000 val perplexity 164.7389
124
+ 1000 train 5.153195 (lr=5.9512e-04) (hash(x)=163799796)
125
+ 1010 train 5.123326 (lr=5.9492e-04) (hash(x)=145937873)
126
+ 1020 train 5.011211 (lr=5.9472e-04) (hash(x)=165459184)
127
+ 1030 train 5.155272 (lr=5.9452e-04) (hash(x)=150932001)
128
+ 1040 train 5.113722 (lr=5.9431e-04) (hash(x)=162567106)
129
+ 1050 train 5.125001 (lr=5.9410e-04) (hash(x)=154107339)
130
+ 1060 train 5.092893 (lr=5.9388e-04) (hash(x)=149509318)
131
+ 1070 train 4.914678 (lr=5.9366e-04) (hash(x)=152383589)
132
+ 1080 train 4.964736 (lr=5.9344e-04) (hash(x)=155926898)
133
+ 1090 train 4.977837 (lr=5.9321e-04) (hash(x)=150988851)
134
+ 1100 val loss 5.0137
135
+ 1100 val perplexity 150.4616
136
+ 1100 train 5.075565 (lr=5.9298e-04) (hash(x)=144592844)
137
+ 1110 train 5.014026 (lr=5.9275e-04) (hash(x)=146758273)
138
+ 1120 train 4.895799 (lr=5.9251e-04) (hash(x)=146142884)
139
+ 1130 train 5.016222 (lr=5.9227e-04) (hash(x)=152078362)
140
+ 1140 train 5.077383 (lr=5.9202e-04) (hash(x)=160186914)
141
+ 1150 train 5.011677 (lr=5.9177e-04) (hash(x)=154535861)
142
+ 1160 train 4.861885 (lr=5.9152e-04) (hash(x)=151399108)
143
+ 1170 train 4.935675 (lr=5.9126e-04) (hash(x)=146833517)
144
+ 1180 train 4.850985 (lr=5.9100e-04) (hash(x)=140850009)
145
+ 1190 train 4.821474 (lr=5.9073e-04) (hash(x)=160947364)
146
+ 1200 val loss 4.9028
147
+ 1200 val perplexity 134.6711
148
+ 1200 train 4.823929 (lr=5.9046e-04) (hash(x)=204706354)
149
+ 1210 train 4.707417 (lr=5.9019e-04) (hash(x)=145764356)
150
+ 1220 train 4.770687 (lr=5.8992e-04) (hash(x)=153116691)
151
+ 1230 train 4.793217 (lr=5.8963e-04) (hash(x)=160544763)
152
+ 1240 train 4.698187 (lr=5.8935e-04) (hash(x)=137389978)
153
+ 1250 train 4.776669 (lr=5.8906e-04) (hash(x)=143783202)
154
+ 1260 train 4.581037 (lr=5.8877e-04) (hash(x)=153470243)
155
+ 1270 train 4.709948 (lr=5.8848e-04) (hash(x)=149055354)
156
+ 1280 train 4.692148 (lr=5.8818e-04) (hash(x)=146874658)
157
+ 1290 train 4.882227 (lr=5.8787e-04) (hash(x)=152527777)
158
+ 1300 val loss 4.8340
159
+ 1300 val perplexity 125.7093
160
+ 1300 train 4.785907 (lr=5.8757e-04) (hash(x)=150862210)
161
+ 1310 train 4.793396 (lr=5.8726e-04) (hash(x)=157185054)
162
+ 1320 train 4.854813 (lr=5.8694e-04) (hash(x)=153733727)
163
+ 1330 train 4.836212 (lr=5.8663e-04) (hash(x)=163696656)
164
+ 1340 train 4.972438 (lr=5.8630e-04) (hash(x)=156931875)
165
+ 1350 train 4.750898 (lr=5.8598e-04) (hash(x)=150753426)
166
+ 1360 train 4.756314 (lr=5.8565e-04) (hash(x)=145642367)
167
+ 1370 train 4.789042 (lr=5.8532e-04) (hash(x)=159217491)
168
+ 1380 train 4.764640 (lr=5.8498e-04) (hash(x)=151499952)
169
+ 1390 train 4.874175 (lr=5.8464e-04) (hash(x)=147137197)
170
+ 1400 val loss 4.8000
171
+ 1400 val perplexity 121.5108
172
+ 1400 train 4.837022 (lr=5.8430e-04) (hash(x)=147766811)
173
+ 1410 train 4.653432 (lr=5.8395e-04) (hash(x)=155728979)
174
+ 1420 train 4.603916 (lr=5.8360e-04) (hash(x)=151858238)
175
+ 1430 train 4.677294 (lr=5.8324e-04) (hash(x)=147007501)
176
+ 1440 train 4.638676 (lr=5.8289e-04) (hash(x)=123449142)
177
+ 1450 train 4.636933 (lr=5.8252e-04) (hash(x)=151177814)
178
+ 1460 train 4.783847 (lr=5.8216e-04) (hash(x)=151804649)
179
+ 1470 train 4.737172 (lr=5.8179e-04) (hash(x)=140614506)
180
+ 1480 train 4.942430 (lr=5.8142e-04) (hash(x)=154590935)
181
+ 1490 train 4.725112 (lr=5.8104e-04) (hash(x)=155969672)
182
+ 1500 val loss 4.7176
183
+ 1500 val perplexity 111.8998
184
+ 1500 train 4.637668 (lr=5.8066e-04) (hash(x)=135925327)
185
+ 1510 train 4.628280 (lr=5.8028e-04) (hash(x)=140488340)
186
+ 1520 train 4.628044 (lr=5.7989e-04) (hash(x)=150707923)
187
+ 1530 train 4.546852 (lr=5.7950e-04) (hash(x)=141081542)
188
+ 1540 train 4.596313 (lr=5.7910e-04) (hash(x)=150136050)
189
+ 1550 train 4.459705 (lr=5.7870e-04) (hash(x)=124842353)
190
+ 1560 train 4.585103 (lr=5.7830e-04) (hash(x)=142320279)
191
+ 1570 train 4.540831 (lr=5.7790e-04) (hash(x)=137593322)
192
+ 1580 train 4.600926 (lr=5.7749e-04) (hash(x)=154607301)
193
+ 1590 train 4.483570 (lr=5.7707e-04) (hash(x)=143089856)
194
+ 1600 val loss 4.7190
195
+ 1600 val perplexity 112.0528
196
+ 1600 train 4.700891 (lr=5.7666e-04) (hash(x)=160440642)
197
+ 1610 train 4.911601 (lr=5.7624e-04) (hash(x)=150875971)
198
+ 1620 train 4.787804 (lr=5.7581e-04) (hash(x)=165443918)
199
+ 1630 train 4.590433 (lr=5.7539e-04) (hash(x)=145853246)
200
+ 1640 train 4.684464 (lr=5.7496e-04) (hash(x)=120920586)
201
+ 1650 train 4.619179 (lr=5.7452e-04) (hash(x)=146732412)
202
+ 1660 train 4.665538 (lr=5.7408e-04) (hash(x)=148900413)
203
+ 1670 train 4.915817 (lr=5.7364e-04) (hash(x)=147090451)
204
+ 1680 train 4.679054 (lr=5.7320e-04) (hash(x)=153099341)
205
+ 1690 train 4.705451 (lr=5.7275e-04) (hash(x)=148556821)
206
+ 1700 val loss 4.6333
207
+ 1700 val perplexity 102.8577
208
+ 1700 train 4.755726 (lr=5.7230e-04) (hash(x)=151184106)
209
+ 1710 train 4.668022 (lr=5.7184e-04) (hash(x)=151304861)
210
+ 1720 train 4.466074 (lr=5.7138e-04) (hash(x)=147047589)
211
+ 1730 train 4.571427 (lr=5.7092e-04) (hash(x)=157613602)
212
+ 1740 train 4.492076 (lr=5.7045e-04) (hash(x)=145631923)
213
+ 1750 train 4.450287 (lr=5.6999e-04) (hash(x)=153416694)
214
+ 1760 train 4.637472 (lr=5.6951e-04) (hash(x)=143038500)
215
+ 1770 train 4.591135 (lr=5.6904e-04) (hash(x)=163545780)
216
+ 1780 train 4.526033 (lr=5.6856e-04) (hash(x)=159712197)
217
+ 1790 train 4.433856 (lr=5.6807e-04) (hash(x)=146293430)
218
+ 1800 val loss 4.5992
219
+ 1800 val perplexity 99.4005
220
+ 1800 train 4.560413 (lr=5.6759e-04) (hash(x)=148100580)
221
+ 1810 train 4.519947 (lr=5.6710e-04) (hash(x)=137858614)
222
+ 1820 train 4.542280 (lr=5.6660e-04) (hash(x)=155765218)
223
+ 1830 train 4.465188 (lr=5.6611e-04) (hash(x)=147554787)
224
+ 1840 train 4.546216 (lr=5.6561e-04) (hash(x)=160001341)
225
+ 1850 train 4.432173 (lr=5.6510e-04) (hash(x)=153059873)
226
+ 1860 train 4.388655 (lr=5.6459e-04) (hash(x)=155656532)
227
+ 1870 train 4.440943 (lr=5.6408e-04) (hash(x)=146515528)
228
+ 1880 train 4.311157 (lr=5.6357e-04) (hash(x)=144385809)
229
+ 1890 train 4.361921 (lr=5.6305e-04) (hash(x)=150375274)
230
+ 1900 val loss 4.6136
231
+ 1900 val perplexity 100.8466
232
+ 1900 train 4.361186 (lr=5.6253e-04) (hash(x)=149434659)
233
+ 1910 train 4.457406 (lr=5.6201e-04) (hash(x)=140811482)
234
+ 1920 train 4.538684 (lr=5.6148e-04) (hash(x)=149776450)
235
+ 1930 train 4.516073 (lr=5.6095e-04) (hash(x)=153611546)
236
+ 1940 train 4.602579 (lr=5.6041e-04) (hash(x)=152628274)
237
+ 1950 train 4.535326 (lr=5.5988e-04) (hash(x)=151183924)
238
+ 1960 train 4.609030 (lr=5.5934e-04) (hash(x)=143507386)
239
+ 1970 train 4.646500 (lr=5.5879e-04) (hash(x)=180301574)
240
+ 1980 train 4.562404 (lr=5.5824e-04) (hash(x)=156383157)
241
+ 1990 train 4.604955 (lr=5.5769e-04) (hash(x)=145911523)
242
+ 2000 val loss 4.5370
243
+ 2000 val perplexity 93.4098
244
+ 2000 train 4.592966 (lr=5.5714e-04) (hash(x)=152285486)
245
+ 2010 train 4.510575 (lr=5.5658e-04) (hash(x)=154531062)
246
+ 2020 train 4.466523 (lr=5.5602e-04) (hash(x)=154403800)
247
+ 2030 train 4.559129 (lr=5.5546e-04) (hash(x)=153109813)
248
+ 2040 train 4.618740 (lr=5.5489e-04) (hash(x)=149156251)
249
+ 2050 train 4.415614 (lr=5.5432e-04) (hash(x)=147613800)
250
+ 2060 train 4.555984 (lr=5.5374e-04) (hash(x)=160283013)
251
+ 2070 train 4.509446 (lr=5.5317e-04) (hash(x)=141632717)
252
+ 2080 train 4.469609 (lr=5.5259e-04) (hash(x)=147292755)
253
+ 2090 train 4.637484 (lr=5.5200e-04) (hash(x)=146447537)
254
+ 2100 val loss 4.5029
255
+ 2100 val perplexity 90.2812
256
+ 2100 train 4.279584 (lr=5.5142e-04) (hash(x)=144294295)
257
+ 2110 train 4.829531 (lr=5.5083e-04) (hash(x)=142838110)
258
+ 2120 train 4.783645 (lr=5.5023e-04) (hash(x)=151878701)
259
+ 2130 train 4.367728 (lr=5.4964e-04) (hash(x)=143929160)
260
+ 2140 train 4.349949 (lr=5.4904e-04) (hash(x)=149368632)
261
+ 2150 train 4.424512 (lr=5.4843e-04) (hash(x)=141460003)
262
+ 2160 train 4.387996 (lr=5.4783e-04) (hash(x)=140793655)
263
+ 2170 train 4.537252 (lr=5.4722e-04) (hash(x)=146336171)
264
+ 2180 train 4.358654 (lr=5.4661e-04) (hash(x)=150200011)
265
+ 2190 train 4.407820 (lr=5.4599e-04) (hash(x)=155337561)
266
+ 2200 val loss 4.5166
267
+ 2200 val perplexity 91.5197
268
+ 2200 train 4.415632 (lr=5.4537e-04) (hash(x)=175030215)
269
+ 2210 train 4.561185 (lr=5.4475e-04) (hash(x)=139653118)
270
+ 2220 train 4.427787 (lr=5.4413e-04) (hash(x)=151690464)
271
+ 2230 train 4.169754 (lr=5.4350e-04) (hash(x)=164526288)
272
+ 2240 train 4.279044 (lr=5.4287e-04) (hash(x)=141536420)
273
+ 2250 train 4.489822 (lr=5.4223e-04) (hash(x)=167362519)
274
+ 2260 train 4.239832 (lr=5.4160e-04) (hash(x)=142323410)
275
+ 2270 train 4.654835 (lr=5.4096e-04) (hash(x)=176298114)
276
+ 2280 train 4.339916 (lr=5.4031e-04) (hash(x)=149454362)
277
+ 2290 train 4.460354 (lr=5.3967e-04) (hash(x)=172920991)
278
+ 2300 val loss 4.4944
279
+ 2300 val perplexity 89.5110
280
+ 2300 train 4.432306 (lr=5.3902e-04) (hash(x)=150831428)
281
+ 2310 train 4.298184 (lr=5.3837e-04) (hash(x)=141773102)
282
+ 2320 train 4.238669 (lr=5.3771e-04) (hash(x)=146520072)
283
+ 2330 train 4.445845 (lr=5.3705e-04) (hash(x)=174782103)
284
+ 2340 train 4.545360 (lr=5.3639e-04) (hash(x)=171102155)
285
+ 2350 train 4.382959 (lr=5.3573e-04) (hash(x)=152910404)
286
+ 2360 train 4.378536 (lr=5.3506e-04) (hash(x)=147122637)
287
+ 2370 train 4.312759 (lr=5.3439e-04) (hash(x)=152558061)
288
+ 2380 train 4.385630 (lr=5.3372e-04) (hash(x)=146957950)
289
+ 2390 train 4.428930 (lr=5.3304e-04) (hash(x)=156879460)
290
+ 2400 val loss 4.4428
291
+ 2400 val perplexity 85.0095
292
+ 2400 train 4.586795 (lr=5.3236e-04) (hash(x)=140808297)
293
+ 2410 train 4.313769 (lr=5.3168e-04) (hash(x)=164207329)
294
+ 2420 train 4.518151 (lr=5.3099e-04) (hash(x)=150119566)
295
+ 2430 train 4.366627 (lr=5.3030e-04) (hash(x)=148059202)
296
+ 2440 train 4.443774 (lr=5.2961e-04) (hash(x)=144913823)
297
+ 2450 train 4.606112 (lr=5.2892e-04) (hash(x)=155469321)
298
+ 2460 train 4.643090 (lr=5.2822e-04) (hash(x)=147971519)
299
+ 2470 train 4.351307 (lr=5.2752e-04) (hash(x)=151115240)
300
+ 2480 train 4.353699 (lr=5.2682e-04) (hash(x)=158904683)
301
+ 2490 train 4.358671 (lr=5.2612e-04) (hash(x)=153639698)
302
+ 2500 val loss 4.4394
303
+ 2500 val perplexity 84.7268
304
+ 2500 train 4.399701 (lr=5.2541e-04) (hash(x)=153160275)
305
+ 2510 train 4.225253 (lr=5.2470e-04) (hash(x)=166439539)
306
+ 2520 train 4.480737 (lr=5.2398e-04) (hash(x)=141364112)
307
+ 2530 train 4.434113 (lr=5.2327e-04) (hash(x)=147911679)
308
+ 2540 train 4.391127 (lr=5.2255e-04) (hash(x)=152607847)
309
+ 2550 train 4.323769 (lr=5.2183e-04) (hash(x)=149178874)
310
+ 2560 train 4.477662 (lr=5.2110e-04) (hash(x)=158403024)
311
+ 2570 train 4.347175 (lr=5.2037e-04) (hash(x)=148252901)
312
+ 2580 train 4.372376 (lr=5.1964e-04) (hash(x)=156933994)
313
+ 2590 train 4.320841 (lr=5.1891e-04) (hash(x)=147869995)
314
+ 2600 val loss 4.4240
315
+ 2600 val perplexity 83.4309
316
+ 2600 train 4.324703 (lr=5.1817e-04) (hash(x)=133990623)
317
+ 2610 train 4.403966 (lr=5.1743e-04) (hash(x)=155745526)
318
+ 2620 train 4.314090 (lr=5.1669e-04) (hash(x)=145640933)
319
+ 2630 train 4.391921 (lr=5.1595e-04) (hash(x)=144600130)
320
+ 2640 train 4.218779 (lr=5.1520e-04) (hash(x)=143319960)
321
+ 2650 train 4.158856 (lr=5.1445e-04) (hash(x)=144841781)
322
+ 2660 train 4.415002 (lr=5.1370e-04) (hash(x)=142306571)
323
+ 2670 train 4.339683 (lr=5.1295e-04) (hash(x)=150824939)
324
+ 2680 train 4.390085 (lr=5.1219e-04) (hash(x)=144954685)
325
+ 2690 train 4.179630 (lr=5.1143e-04) (hash(x)=144071317)
326
+ 2700 val loss 4.4350
327
+ 2700 val perplexity 84.3479
328
+ 2700 train 4.325372 (lr=5.1067e-04) (hash(x)=142860944)
329
+ 2710 train 4.310826 (lr=5.0990e-04) (hash(x)=150879676)
330
+ 2720 train 4.192308 (lr=5.0913e-04) (hash(x)=158434126)
331
+ 2730 train 4.467624 (lr=5.0836e-04) (hash(x)=149786230)
332
+ 2740 train 4.475477 (lr=5.0759e-04) (hash(x)=148769334)
333
+ 2750 train 4.403395 (lr=5.0681e-04) (hash(x)=151565702)
334
+ 2760 train 4.544497 (lr=5.0603e-04) (hash(x)=162471158)
335
+ 2770 train 4.437764 (lr=5.0525e-04) (hash(x)=168950803)
336
+ 2780 train 4.493452 (lr=5.0447e-04) (hash(x)=149815020)
337
+ 2790 train 4.432085 (lr=5.0368e-04) (hash(x)=161235555)
338
+ 2800 val loss 4.3949
339
+ 2800 val perplexity 81.0404
340
+ 2800 train 4.327058 (lr=5.0290e-04) (hash(x)=137959511)
341
+ 2810 train 4.476348 (lr=5.0210e-04) (hash(x)=148420397)
342
+ 2820 train 4.372009 (lr=5.0131e-04) (hash(x)=151862744)
343
+ 2830 train 4.461427 (lr=5.0052e-04) (hash(x)=153223303)
344
+ 2840 train 4.408423 (lr=4.9972e-04) (hash(x)=149794760)
345
+ 2850 train 4.477295 (lr=4.9892e-04) (hash(x)=153517085)
346
+ 2860 train 4.459608 (lr=4.9811e-04) (hash(x)=152044927)
347
+ 2870 train 4.642197 (lr=4.9731e-04) (hash(x)=139708749)
348
+ 2880 train 4.367671 (lr=4.9650e-04) (hash(x)=152675893)
349
+ 2890 train 4.401156 (lr=4.9569e-04) (hash(x)=153950772)
350
+ 2900 val loss 4.3681
351
+ 2900 val perplexity 78.8948
352
+ 2900 train 4.326475 (lr=4.9487e-04) (hash(x)=147009873)
353
+ 2910 train 4.429042 (lr=4.9406e-04) (hash(x)=147497701)
354
+ 2920 train 4.540160 (lr=4.9324e-04) (hash(x)=149615491)
355
+ 2930 train 4.538839 (lr=4.9242e-04) (hash(x)=152588554)
356
+ 2940 train 4.364810 (lr=4.9160e-04) (hash(x)=147080266)
357
+ 2950 train 4.578605 (lr=4.9077e-04) (hash(x)=149607682)
358
+ 2960 train 4.316250 (lr=4.8995e-04) (hash(x)=156855690)
359
+ 2970 train 4.300284 (lr=4.8912e-04) (hash(x)=147759452)
360
+ 2980 train 4.233245 (lr=4.8829e-04) (hash(x)=168906251)
361
+ 2990 train 4.368851 (lr=4.8745e-04) (hash(x)=159612982)
362
+ 3000 val loss 4.3641
363
+ 3000 val perplexity 78.5791
364
+ 3000 train 4.223536 (lr=4.8662e-04) (hash(x)=158264841)
365
+ 3010 train 4.230308 (lr=4.8578e-04) (hash(x)=151971710)
366
+ 3020 train 4.304003 (lr=4.8494e-04) (hash(x)=148961929)
367
+ 3030 train 4.093127 (lr=4.8409e-04) (hash(x)=144951447)
368
+ 3040 train 4.144810 (lr=4.8325e-04) (hash(x)=137727737)
369
+ 3050 train 4.166666 (lr=4.8240e-04) (hash(x)=145598216)
370
+ 3060 train 4.172855 (lr=4.8155e-04) (hash(x)=139250122)
371
+ 3070 train 4.230353 (lr=4.8070e-04) (hash(x)=158020889)
372
+ 3080 train 4.062794 (lr=4.7984e-04) (hash(x)=147345272)
373
+ 3090 train 4.092636 (lr=4.7899e-04) (hash(x)=148006111)
374
+ 3100 val loss 4.3681
375
+ 3100 val perplexity 78.8975
376
+ 3100 train 4.223536 (lr=4.7813e-04) (hash(x)=139232251)
377
+ 3110 train 4.042999 (lr=4.7727e-04) (hash(x)=164654684)
378
+ 3120 train 4.270670 (lr=4.7641e-04) (hash(x)=153975753)
379
+ 3130 train 4.423926 (lr=4.7554e-04) (hash(x)=154784772)
380
+ 3140 train 4.279055 (lr=4.7467e-04) (hash(x)=156671619)
381
+ 3150 train 4.266232 (lr=4.7380e-04) (hash(x)=158188012)
382
+ 3160 train 4.362569 (lr=4.7293e-04) (hash(x)=152266024)
383
+ 3170 train 4.337701 (lr=4.7206e-04) (hash(x)=145418617)
384
+ 3180 train 4.387589 (lr=4.7118e-04) (hash(x)=149947903)
385
+ 3190 train 4.254520 (lr=4.7031e-04) (hash(x)=156664225)
386
+ 3200 val loss 4.3322
387
+ 3200 val perplexity 76.1127
388
+ 3200 train 4.375142 (lr=4.6943e-04) (hash(x)=153436104)
389
+ 3210 train 4.458385 (lr=4.6855e-04) (hash(x)=163550923)
390
+ 3220 train 4.215328 (lr=4.6766e-04) (hash(x)=144583183)
391
+ 3230 train 4.300598 (lr=4.6678e-04) (hash(x)=165547680)
392
+ 3240 train 4.590939 (lr=4.6589e-04) (hash(x)=152195267)
393
+ 3250 train 4.358457 (lr=4.6500e-04) (hash(x)=150040234)
394
+ 3260 train 4.351537 (lr=4.6411e-04) (hash(x)=143788229)
395
+ 3270 train 4.211358 (lr=4.6322e-04) (hash(x)=154472495)
396
+ 3280 train 4.213633 (lr=4.6232e-04) (hash(x)=162620947)
397
+ 3290 train 4.331361 (lr=4.6142e-04) (hash(x)=157731379)
398
+ 3300 val loss 4.3279
399
+ 3300 val perplexity 75.7850
400
+ 3300 train 4.190994 (lr=4.6052e-04) (hash(x)=149681831)
401
+ 3310 train 4.261853 (lr=4.5962e-04) (hash(x)=147216223)
402
+ 3320 train 4.192186 (lr=4.5872e-04) (hash(x)=146478959)
403
+ 3330 train 4.299156 (lr=4.5782e-04) (hash(x)=161840239)
404
+ 3340 train 4.277519 (lr=4.5691e-04) (hash(x)=141748686)
405
+ 3350 train 4.104204 (lr=4.5600e-04) (hash(x)=134782096)
406
+ 3360 train 4.313343 (lr=4.5509e-04) (hash(x)=164103503)
407
+ 3370 train 4.151342 (lr=4.5418e-04) (hash(x)=144658716)
408
+ 3380 train 4.077283 (lr=4.5326e-04) (hash(x)=144343513)
409
+ 3390 train 4.158581 (lr=4.5235e-04) (hash(x)=145365816)
410
+ 3400 val loss 4.3260
411
+ 3400 val perplexity 75.6434
412
+ 3400 train 4.174162 (lr=4.5143e-04) (hash(x)=168425516)
413
+ 3410 train 4.136054 (lr=4.5051e-04) (hash(x)=139704073)
414
+ 3420 train 4.257899 (lr=4.4959e-04) (hash(x)=156592513)
415
+ 3430 train 4.243215 (lr=4.4867e-04) (hash(x)=136773063)
416
+ 3440 train 4.248217 (lr=4.4774e-04) (hash(x)=162468494)
417
+ 3450 train 4.303308 (lr=4.4682e-04) (hash(x)=143829320)
418
+ 3460 train 4.359573 (lr=4.4589e-04) (hash(x)=158562331)
419
+ 3470 train 4.361857 (lr=4.4496e-04) (hash(x)=151882487)
420
+ 3480 train 4.350006 (lr=4.4403e-04) (hash(x)=150575558)
421
+ 3490 train 4.244628 (lr=4.4310e-04) (hash(x)=147407741)
422
+ 3500 val loss 4.2936
423
+ 3500 val perplexity 73.2325
424
+ 3500 train 4.344105 (lr=4.4216e-04) (hash(x)=163104338)
425
+ 3510 train 4.262851 (lr=4.4123e-04) (hash(x)=157461421)
426
+ 3520 train 4.231164 (lr=4.4029e-04) (hash(x)=151459070)
427
+ 3530 train 4.160058 (lr=4.3935e-04) (hash(x)=150777169)
428
+ 3540 train 4.316420 (lr=4.3841e-04) (hash(x)=159138904)
429
+ 3550 train 4.227777 (lr=4.3747e-04) (hash(x)=157853732)
430
+ 3560 train 4.194306 (lr=4.3652e-04) (hash(x)=152552809)
431
+ 3570 train 4.249184 (lr=4.3558e-04) (hash(x)=145219770)
432
+ 3580 train 4.193099 (lr=4.3463e-04) (hash(x)=147325185)
433
+ 3590 train 4.296300 (lr=4.3368e-04) (hash(x)=146746257)
434
+ 3600 val loss 4.2866
435
+ 3600 val perplexity 72.7178
436
+ 3600 train 4.140559 (lr=4.3273e-04) (hash(x)=165109772)
437
+ 3610 train 4.093875 (lr=4.3178e-04) (hash(x)=142561694)
438
+ 3620 train 4.248956 (lr=4.3083e-04) (hash(x)=153512606)
439
+ 3630 train 4.099589 (lr=4.2987e-04) (hash(x)=140485474)
440
+ 3640 train 4.221697 (lr=4.2892e-04) (hash(x)=142084056)
441
+ 3650 train 4.261376 (lr=4.2796e-04) (hash(x)=143729044)
442
+ 3660 train 4.177237 (lr=4.2700e-04) (hash(x)=129606055)
443
+ 3670 train 4.203621 (lr=4.2604e-04) (hash(x)=149878209)
444
+ 3680 train 4.024297 (lr=4.2508e-04) (hash(x)=147988422)
445
+ 3690 train 4.125041 (lr=4.2411e-04) (hash(x)=134245780)
446
+ 3700 val loss 4.3092
447
+ 3700 val perplexity 74.3820
448
+ 3700 train 4.043869 (lr=4.2315e-04) (hash(x)=153420306)
449
+ 3710 train 4.211823 (lr=4.2218e-04) (hash(x)=151618359)
450
+ 3720 train 4.117893 (lr=4.2122e-04) (hash(x)=146201479)
451
+ 3730 train 4.161988 (lr=4.2025e-04) (hash(x)=173571327)
452
+ 3740 train 4.294584 (lr=4.1928e-04) (hash(x)=141793500)
453
+ 3750 train 4.385358 (lr=4.1831e-04) (hash(x)=184628568)
454
+ 3760 train 4.347248 (lr=4.1734e-04) (hash(x)=143308741)
455
+ 3770 train 4.464727 (lr=4.1636e-04) (hash(x)=151283512)
456
+ 3780 train 4.270392 (lr=4.1539e-04) (hash(x)=143336516)
457
+ 3790 train 4.286648 (lr=4.1441e-04) (hash(x)=148124486)
458
+ 3800 val loss 4.2683
459
+ 3800 val perplexity 71.4031
460
+ 3800 train 4.177742 (lr=4.1343e-04) (hash(x)=160168863)
461
+ 3810 train 4.204525 (lr=4.1246e-04) (hash(x)=155072251)
462
+ 3820 train 4.396286 (lr=4.1148e-04) (hash(x)=153837417)
463
+ 3830 train 4.457480 (lr=4.1050e-04) (hash(x)=130032011)
464
+ 3840 train 4.156067 (lr=4.0951e-04) (hash(x)=146270962)
465
+ 3850 train 4.188394 (lr=4.0853e-04) (hash(x)=146936934)
466
+ 3860 train 4.107306 (lr=4.0755e-04) (hash(x)=165785187)
467
+ 3870 train 4.306210 (lr=4.0656e-04) (hash(x)=145393482)
468
+ 3880 train 4.243460 (lr=4.0557e-04) (hash(x)=148819533)
469
+ 3890 train 4.118350 (lr=4.0459e-04) (hash(x)=146870411)
470
+ 3900 val loss 4.2647
471
+ 3900 val perplexity 71.1412
472
+ 3900 train 4.136272 (lr=4.0360e-04) (hash(x)=153906073)
473
+ 3910 train 4.232559 (lr=4.0261e-04) (hash(x)=135632500)
474
+ 3920 train 4.248977 (lr=4.0162e-04) (hash(x)=147393037)
475
+ 3930 train 4.397795 (lr=4.0063e-04) (hash(x)=150847811)
476
+ 3940 train 4.175187 (lr=3.9963e-04) (hash(x)=150253123)
477
+ 3950 train 4.060656 (lr=3.9864e-04) (hash(x)=162609448)
478
+ 3960 train 4.064752 (lr=3.9764e-04) (hash(x)=150901940)
479
+ 3970 train 4.099853 (lr=3.9665e-04) (hash(x)=149698197)
480
+ 3980 train 4.193648 (lr=3.9565e-04) (hash(x)=156055141)
481
+ 3990 train 4.159819 (lr=3.9465e-04) (hash(x)=163154074)
482
+ 4000 val loss 4.2616
483
+ 4000 val perplexity 70.9223
484
+ 4000 train 4.216847 (lr=3.9365e-04) (hash(x)=151055067)
485
+ 4010 train 4.162566 (lr=3.9266e-04) (hash(x)=149313232)
486
+ 4020 train 4.202842 (lr=3.9165e-04) (hash(x)=144350585)
487
+ 4030 train 4.208403 (lr=3.9065e-04) (hash(x)=143139724)
488
+ 4040 train 4.278615 (lr=3.8965e-04) (hash(x)=137534879)
489
+ 4050 train 4.155915 (lr=3.8865e-04) (hash(x)=147037092)
490
+ 4060 train 4.190468 (lr=3.8764e-04) (hash(x)=149915654)
491
+ 4070 train 4.136100 (lr=3.8664e-04) (hash(x)=148002990)
492
+ 4080 train 4.126393 (lr=3.8563e-04) (hash(x)=141860091)
493
+ 4090 train 4.266189 (lr=3.8463e-04) (hash(x)=152510717)
494
+ 4100 val loss 4.2502
495
+ 4100 val perplexity 70.1216
496
+ 4100 train 4.156748 (lr=3.8362e-04) (hash(x)=149629830)
497
+ 4110 train 4.141899 (lr=3.8261e-04) (hash(x)=143677674)
498
+ 4120 train 4.260850 (lr=3.8160e-04) (hash(x)=143870235)
499
+ 4130 train 4.229340 (lr=3.8059e-04) (hash(x)=156172240)
500
+ 4140 train 4.092404 (lr=3.7958e-04) (hash(x)=142768662)
501
+ 4150 train 4.134804 (lr=3.7857e-04) (hash(x)=151256841)
502
+ 4160 train 4.219779 (lr=3.7756e-04) (hash(x)=144941747)
503
+ 4170 train 4.118941 (lr=3.7655e-04) (hash(x)=143588738)
504
+ 4180 train 4.082337 (lr=3.7553e-04) (hash(x)=145117633)
505
+ 4190 train 4.250305 (lr=3.7452e-04) (hash(x)=158244222)
506
+ 4200 val loss 4.2388
507
+ 4200 val perplexity 69.3226
508
+ 4200 train 4.151377 (lr=3.7351e-04) (hash(x)=143101381)
509
+ 4210 train 4.154030 (lr=3.7249e-04) (hash(x)=148704069)
510
+ 4220 train 4.082290 (lr=3.7148e-04) (hash(x)=165334845)
511
+ 4230 train 4.123566 (lr=3.7046e-04) (hash(x)=147150527)
512
+ 4240 train 4.113924 (lr=3.6944e-04) (hash(x)=150118606)
513
+ 4250 train 4.079200 (lr=3.6843e-04) (hash(x)=142159597)
514
+ 4260 train 4.114458 (lr=3.6741e-04) (hash(x)=152185092)
515
+ 4270 train 4.122750 (lr=3.6639e-04) (hash(x)=148870173)
516
+ 4280 train 4.141642 (lr=3.6537e-04) (hash(x)=150977664)
517
+ 4290 train 4.219389 (lr=3.6435e-04) (hash(x)=165032394)
518
+ 4300 val loss 4.2392
519
+ 4300 val perplexity 69.3521
520
+ 4300 train 4.224743 (lr=3.6333e-04) (hash(x)=149712044)
521
+ 4310 train 4.144774 (lr=3.6231e-04) (hash(x)=151956500)
522
+ 4320 train 4.332068 (lr=3.6129e-04) (hash(x)=136107484)
523
+ 4330 train 4.409291 (lr=3.6027e-04) (hash(x)=160783980)
524
+ 4340 train 4.287406 (lr=3.5925e-04) (hash(x)=151772228)
525
+ 4350 train 4.095201 (lr=3.5822e-04) (hash(x)=176468048)
526
+ 4360 train 4.177223 (lr=3.5720e-04) (hash(x)=144588632)
527
+ 4370 train 4.208097 (lr=3.5618e-04) (hash(x)=156374655)
528
+ 4380 train 4.310132 (lr=3.5515e-04) (hash(x)=148502191)
529
+ 4390 train 4.402782 (lr=3.5413e-04) (hash(x)=152391280)
530
+ 4400 val loss 4.2231
531
+ 4400 val perplexity 68.2476
532
+ 4400 train 4.125841 (lr=3.5311e-04) (hash(x)=153446449)
533
+ 4410 train 4.208029 (lr=3.5208e-04) (hash(x)=167668129)
534
+ 4420 train 4.145264 (lr=3.5106e-04) (hash(x)=148598803)
535
+ 4430 train 4.210530 (lr=3.5003e-04) (hash(x)=161720292)
536
+ 4440 train 4.045743 (lr=3.4901e-04) (hash(x)=144133776)
537
+ 4450 train 4.154221 (lr=3.4798e-04) (hash(x)=151057987)
538
+ 4460 train 4.180992 (lr=3.4695e-04) (hash(x)=151971972)
539
+ 4470 train 4.038364 (lr=3.4593e-04) (hash(x)=140885903)
540
+ 4480 train 4.065793 (lr=3.4490e-04) (hash(x)=138198700)
541
+ 4490 train 4.161359 (lr=3.4387e-04) (hash(x)=150424198)
542
+ 4500 val loss 4.2142
543
+ 4500 val perplexity 67.6377
544
+ 4500 train 4.162060 (lr=3.4285e-04) (hash(x)=146086947)
545
+ 4510 train 4.152469 (lr=3.4182e-04) (hash(x)=145752336)
546
+ 4520 train 4.095489 (lr=3.4079e-04) (hash(x)=145005726)
547
+ 4530 train 4.099135 (lr=3.3977e-04) (hash(x)=144116626)
548
+ 4540 train 4.152851 (lr=3.3874e-04) (hash(x)=164507978)
549
+ 4550 train 4.035840 (lr=3.3771e-04) (hash(x)=154512699)
550
+ 4560 train 4.132565 (lr=3.3668e-04) (hash(x)=149920787)
551
+ 4570 train 4.037919 (lr=3.3565e-04) (hash(x)=155720526)
552
+ 4580 train 4.112408 (lr=3.3463e-04) (hash(x)=149918053)
553
+ 4590 train 4.260189 (lr=3.3360e-04) (hash(x)=170567219)
554
+ 4600 val loss 4.2155
555
+ 4600 val perplexity 67.7305
556
+ 4600 train 4.100365 (lr=3.3257e-04) (hash(x)=153800173)
557
+ 4610 train 4.075234 (lr=3.3154e-04) (hash(x)=151762309)
558
+ 4620 train 4.158895 (lr=3.3051e-04) (hash(x)=138620238)
559
+ 4630 train 4.088101 (lr=3.2949e-04) (hash(x)=155847676)
560
+ 4640 train 4.217817 (lr=3.2846e-04) (hash(x)=159145173)
561
+ 4650 train 4.266030 (lr=3.2743e-04) (hash(x)=163662103)
562
+ 4660 train 4.149409 (lr=3.2640e-04) (hash(x)=168862572)
563
+ 4670 train 5.295560 (lr=3.2537e-04) (hash(x)=128395338)
564
+ 4680 train 4.059878 (lr=3.2435e-04) (hash(x)=149975049)
565
+ 4690 train 4.089979 (lr=3.2332e-04) (hash(x)=156604294)
566
+ 4700 val loss 4.1979
567
+ 4700 val perplexity 66.5486
568
+ 4700 train 4.365446 (lr=3.2229e-04) (hash(x)=155962726)
569
+ 4710 train 4.090021 (lr=3.2126e-04) (hash(x)=160931268)
570
+ 4720 train 4.173439 (lr=3.2023e-04) (hash(x)=173485074)
571
+ 4730 train 4.277673 (lr=3.1921e-04) (hash(x)=146001945)
572
+ 4740 train 4.195193 (lr=3.1818e-04) (hash(x)=148305446)
573
+ 4750 train 4.152917 (lr=3.1715e-04) (hash(x)=131655842)
574
+ 4760 train 4.099238 (lr=3.1613e-04) (hash(x)=145775165)
575
+ 4770 train 4.034110 (lr=3.1510e-04) (hash(x)=146613282)
576
+ 4780 train 4.044159 (lr=3.1407e-04) (hash(x)=137882160)
577
+ 4790 train 4.211039 (lr=3.1305e-04) (hash(x)=157177531)
578
+ 4800 val loss 4.1937
579
+ 4800 val perplexity 66.2681
580
+ 4800 train 4.513865 (lr=3.1202e-04) (hash(x)=142045616)
581
+ 4810 train 4.360612 (lr=3.1099e-04) (hash(x)=153739117)
582
+ 4820 train 3.996757 (lr=3.0997e-04) (hash(x)=159336444)
583
+ 4830 train 4.191407 (lr=3.0894e-04) (hash(x)=162617526)
584
+ 4840 train 4.232656 (lr=3.0792e-04) (hash(x)=141336820)
585
+ 4850 train 4.155486 (lr=3.0689e-04) (hash(x)=146718291)
586
+ 4860 train 4.113620 (lr=3.0587e-04) (hash(x)=159542679)
587
+ 4870 train 4.103887 (lr=3.0485e-04) (hash(x)=149318029)
588
+ 4880 train 4.108254 (lr=3.0382e-04) (hash(x)=150182605)
589
+ 4890 train 4.083578 (lr=3.0280e-04) (hash(x)=139657075)
590
+ 4900 val loss 4.1868
591
+ 4900 val perplexity 65.8129
592
+ 4900 train 4.218506 (lr=3.0178e-04) (hash(x)=143418248)
593
+ 4910 train 4.285641 (lr=3.0075e-04) (hash(x)=153395021)
594
+ 4920 train 4.190434 (lr=2.9973e-04) (hash(x)=144977141)
595
+ 4930 train 4.199266 (lr=2.9871e-04) (hash(x)=143569016)
596
+ 4940 train 4.163790 (lr=2.9769e-04) (hash(x)=145702219)
597
+ 4950 train 4.153457 (lr=2.9667e-04) (hash(x)=150669399)
598
+ 4960 train 4.146342 (lr=2.9565e-04) (hash(x)=147697301)
599
+ 4970 train 4.063084 (lr=2.9463e-04) (hash(x)=158617139)
600
+ 4980 train 4.172106 (lr=2.9361e-04) (hash(x)=158937457)
601
+ 4990 train 4.075258 (lr=2.9259e-04) (hash(x)=155540364)
602
+ 5000 val loss 4.1836
603
+ 5000 val perplexity 65.6038
604
+ 5000 train 4.121487 (lr=2.9157e-04) (hash(x)=145789790)
605
+ 5010 train 4.142146 (lr=2.9056e-04) (hash(x)=142919148)
606
+ 5020 train 4.026702 (lr=2.8954e-04) (hash(x)=149129606)
607
+ 5030 train 4.265334 (lr=2.8852e-04) (hash(x)=143060789)
608
+ 5040 train 4.089305 (lr=2.8751e-04) (hash(x)=145733956)
609
+ 5050 train 3.915463 (lr=2.8649e-04) (hash(x)=139750793)
610
+ 5060 train 4.198373 (lr=2.8548e-04) (hash(x)=148027808)
611
+ 5070 train 4.019546 (lr=2.8447e-04) (hash(x)=149119770)
612
+ 5080 train 4.054741 (lr=2.8345e-04) (hash(x)=145741944)
613
+ 5090 train 4.124260 (lr=2.8244e-04) (hash(x)=144783744)
614
+ 5100 val loss 4.1842
615
+ 5100 val perplexity 65.6379
616
+ 5100 train 4.152290 (lr=2.8143e-04) (hash(x)=137795633)
617
+ 5110 train 4.047652 (lr=2.8042e-04) (hash(x)=145995538)
618
+ 5120 train 4.053296 (lr=2.7941e-04) (hash(x)=143019933)
619
+ 5130 train 4.035030 (lr=2.7840e-04) (hash(x)=143746069)
620
+ 5140 train 3.826921 (lr=2.7739e-04) (hash(x)=152212469)
621
+ 5150 train 4.129814 (lr=2.7638e-04) (hash(x)=152608669)
622
+ 5160 train 4.164292 (lr=2.7537e-04) (hash(x)=150750381)
623
+ 5170 train 4.103247 (lr=2.7437e-04) (hash(x)=133198485)
624
+ 5180 train 4.171725 (lr=2.7336e-04) (hash(x)=142233420)
625
+ 5190 train 4.005584 (lr=2.7236e-04) (hash(x)=147366941)
626
+ 5200 val loss 4.1704
627
+ 5200 val perplexity 64.7407
628
+ 5200 train 4.258163 (lr=2.7135e-04) (hash(x)=148907132)
629
+ 5210 train 4.313727 (lr=2.7035e-04) (hash(x)=154827138)
630
+ 5220 train 4.291863 (lr=2.6935e-04) (hash(x)=148717408)
631
+ 5230 train 4.458136 (lr=2.6835e-04) (hash(x)=147711017)
632
+ 5240 train 4.123367 (lr=2.6734e-04) (hash(x)=141796353)
633
+ 5250 train 4.045487 (lr=2.6635e-04) (hash(x)=143529037)
634
+ 5260 train 4.050275 (lr=2.6535e-04) (hash(x)=144773706)
635
+ 5270 train 4.106303 (lr=2.6435e-04) (hash(x)=148894329)
636
+ 5280 train 4.067476 (lr=2.6335e-04) (hash(x)=153761544)
637
+ 5290 train 4.092900 (lr=2.6236e-04) (hash(x)=150927827)
638
+ 5300 val loss 4.1689
639
+ 5300 val perplexity 64.6428
640
+ 5300 train 3.991432 (lr=2.6136e-04) (hash(x)=152343580)
641
+ 5310 train 3.932438 (lr=2.6037e-04) (hash(x)=146690249)
642
+ 5320 train 4.094585 (lr=2.5937e-04) (hash(x)=155991065)
643
+ 5330 train 4.086306 (lr=2.5838e-04) (hash(x)=151437785)
644
+ 5340 train 4.057670 (lr=2.5739e-04) (hash(x)=158840015)
645
+ 5350 train 4.004462 (lr=2.5640e-04) (hash(x)=155080378)
646
+ 5360 train 4.059451 (lr=2.5541e-04) (hash(x)=150513270)
647
+ 5370 train 3.954507 (lr=2.5443e-04) (hash(x)=147729861)
648
+ 5380 train 3.987256 (lr=2.5344e-04) (hash(x)=149870080)
649
+ 5390 train 3.817827 (lr=2.5245e-04) (hash(x)=137577816)
650
+ 5400 val loss 4.1606
651
+ 5400 val perplexity 64.1130
652
+ 5400 train 4.007288 (lr=2.5147e-04) (hash(x)=148578264)
653
+ 5410 train 4.126563 (lr=2.5049e-04) (hash(x)=149338196)
654
+ 5420 train 4.017113 (lr=2.4950e-04) (hash(x)=149581125)
655
+ 5430 train 4.059157 (lr=2.4852e-04) (hash(x)=146340931)
656
+ 5440 train 3.960557 (lr=2.4754e-04) (hash(x)=127884657)
657
+ 5450 train 4.133589 (lr=2.4657e-04) (hash(x)=142632516)
658
+ 5460 train 4.118123 (lr=2.4559e-04) (hash(x)=146519487)
659
+ 5470 train 4.204058 (lr=2.4461e-04) (hash(x)=136409211)
660
+ 5480 train 4.117671 (lr=2.4364e-04) (hash(x)=158622762)
661
+ 5490 train 4.056888 (lr=2.4266e-04) (hash(x)=154703333)
662
+ 5500 val loss 4.1463
663
+ 5500 val perplexity 63.1980
664
+ 5500 train 4.206609 (lr=2.4169e-04) (hash(x)=145635833)
665
+ 5510 train 4.185198 (lr=2.4072e-04) (hash(x)=158781769)
666
+ 5520 train 4.162925 (lr=2.3975e-04) (hash(x)=142550540)
667
+ 5530 train 4.171016 (lr=2.3878e-04) (hash(x)=152024805)
668
+ 5540 train 3.977600 (lr=2.3782e-04) (hash(x)=145812646)
669
+ 5550 train 4.057363 (lr=2.3685e-04) (hash(x)=144941659)
670
+ 5560 train 4.027712 (lr=2.3589e-04) (hash(x)=145887398)
671
+ 5570 train 4.037846 (lr=2.3492e-04) (hash(x)=144988642)
672
+ 5580 train 4.047574 (lr=2.3396e-04) (hash(x)=146366431)
673
+ 5590 train 3.986114 (lr=2.3300e-04) (hash(x)=143301433)
674
+ 5600 val loss 4.1443
675
+ 5600 val perplexity 63.0730
676
+ 5600 train 4.161259 (lr=2.3204e-04) (hash(x)=156337844)
677
+ 5610 train 4.132920 (lr=2.3108e-04) (hash(x)=146546252)
678
+ 5620 train 4.057903 (lr=2.3013e-04) (hash(x)=137610278)
679
+ 5630 train 4.081373 (lr=2.2917e-04) (hash(x)=148847402)
680
+ 5640 train 4.038005 (lr=2.2822e-04) (hash(x)=148805052)
681
+ 5650 train 3.952793 (lr=2.2727e-04) (hash(x)=144882220)
682
+ 5660 train 4.031598 (lr=2.2632e-04) (hash(x)=147894128)
683
+ 5670 train 3.953664 (lr=2.2537e-04) (hash(x)=144855036)
684
+ 5680 train 3.937950 (lr=2.2442e-04) (hash(x)=148400728)
685
+ 5690 train 3.924554 (lr=2.2348e-04) (hash(x)=138377186)
686
+ 5700 val loss 4.1459
687
+ 5700 val perplexity 63.1775
688
+ 5700 train 3.957550 (lr=2.2253e-04) (hash(x)=147168506)
689
+ 5710 train 4.088017 (lr=2.2159e-04) (hash(x)=150486217)
690
+ 5720 train 4.050665 (lr=2.2065e-04) (hash(x)=148658778)
691
+ 5730 train 3.851579 (lr=2.1971e-04) (hash(x)=148402206)
692
+ 5740 train 3.907508 (lr=2.1877e-04) (hash(x)=144432268)
693
+ 5750 train 4.276690 (lr=2.1784e-04) (hash(x)=153770308)
694
+ 5760 train 4.027662 (lr=2.1690e-04) (hash(x)=150546820)
695
+ 5770 train 4.210451 (lr=2.1597e-04) (hash(x)=140176711)
696
+ 5780 train 4.096617 (lr=2.1504e-04) (hash(x)=164276928)
697
+ 5790 train 3.996442 (lr=2.1411e-04) (hash(x)=153418756)
698
+ 5800 val loss 4.1327
699
+ 5800 val perplexity 62.3450
700
+ 5800 train 4.097345 (lr=2.1318e-04) (hash(x)=159566920)
701
+ 5810 train 4.026237 (lr=2.1226e-04) (hash(x)=161266875)
702
+ 5820 train 4.045896 (lr=2.1133e-04) (hash(x)=156298754)
703
+ 5830 train 4.180385 (lr=2.1041e-04) (hash(x)=139302200)
704
+ 5840 train 4.084721 (lr=2.0949e-04) (hash(x)=126108545)
705
+ 5850 train 3.998365 (lr=2.0857e-04) (hash(x)=147526120)
706
+ 5860 train 4.084702 (lr=2.0765e-04) (hash(x)=150242466)
707
+ 5870 train 4.052094 (lr=2.0674e-04) (hash(x)=139851741)
708
+ 5880 train 3.973731 (lr=2.0582e-04) (hash(x)=159464779)
709
+ 5890 train 4.121731 (lr=2.0491e-04) (hash(x)=148069134)
710
+ 5900 val loss 4.1276
711
+ 5900 val perplexity 62.0295
712
+ 5900 train 4.110882 (lr=2.0400e-04) (hash(x)=158273929)
713
+ 5910 train 3.809114 (lr=2.0309e-04) (hash(x)=150013727)
714
+ 5920 train 4.080310 (lr=2.0218e-04) (hash(x)=148803117)
715
+ 5930 train 4.121289 (lr=2.0128e-04) (hash(x)=156729045)
716
+ 5940 train 4.027078 (lr=2.0038e-04) (hash(x)=150475415)
717
+ 5950 train 3.914383 (lr=1.9948e-04) (hash(x)=153585864)
718
+ 5960 train 3.916728 (lr=1.9858e-04) (hash(x)=145189335)
719
+ 5970 train 3.931371 (lr=1.9768e-04) (hash(x)=145673396)
720
+ 5980 train 4.064659 (lr=1.9678e-04) (hash(x)=138413986)
721
+ 5990 train 3.860515 (lr=1.9589e-04) (hash(x)=140270894)
722
+ 6000 val loss 4.1299
723
+ 6000 val perplexity 62.1728
724
+ 6000 train 4.056568 (lr=1.9500e-04) (hash(x)=156649749)
725
+ 6010 train 4.169707 (lr=1.9411e-04) (hash(x)=154762134)
726
+ 6020 train 4.166695 (lr=1.9322e-04) (hash(x)=156088385)
727
+ 6030 train 4.024520 (lr=1.9234e-04) (hash(x)=152521323)
728
+ 6040 train 3.939079 (lr=1.9145e-04) (hash(x)=148376918)
729
+ 6050 train 4.106024 (lr=1.9057e-04) (hash(x)=155673243)
730
+ 6060 train 4.077501 (lr=1.8969e-04) (hash(x)=142344218)
731
+ 6070 train 4.219913 (lr=1.8882e-04) (hash(x)=151751923)
732
+ 6080 train 4.031391 (lr=1.8794e-04) (hash(x)=156311576)
733
+ 6090 train 3.986192 (lr=1.8707e-04) (hash(x)=145058290)
734
+ 6100 val loss 4.1172
735
+ 6100 val perplexity 61.3866
736
+ 6100 train 4.068705 (lr=1.8620e-04) (hash(x)=146812388)
737
+ 6110 train 4.118849 (lr=1.8533e-04) (hash(x)=138440385)
738
+ 6120 train 4.127853 (lr=1.8446e-04) (hash(x)=162764713)
739
+ 6130 train 4.020015 (lr=1.8359e-04) (hash(x)=156456134)
740
+ 6140 train 3.948278 (lr=1.8273e-04) (hash(x)=136402679)
741
+ 6150 train 3.953569 (lr=1.8187e-04) (hash(x)=153380357)
742
+ 6160 train 3.695956 (lr=1.8101e-04) (hash(x)=170571967)
743
+ 6170 train 3.999976 (lr=1.8016e-04) (hash(x)=150984242)
744
+ 6180 train 4.117376 (lr=1.7930e-04) (hash(x)=143187168)
745
+ 6190 train 4.112566 (lr=1.7845e-04) (hash(x)=142999298)
746
+ 6200 val loss 4.1164
747
+ 6200 val perplexity 61.3371
748
+ 6200 train 4.030761 (lr=1.7760e-04) (hash(x)=143522146)
749
+ 6210 train 4.029361 (lr=1.7675e-04) (hash(x)=141961270)
750
+ 6220 train 4.057020 (lr=1.7591e-04) (hash(x)=158572673)
751
+ 6230 train 4.017491 (lr=1.7506e-04) (hash(x)=142949296)
752
+ 6240 train 3.866611 (lr=1.7422e-04) (hash(x)=128564671)
753
+ 6250 train 3.960763 (lr=1.7338e-04) (hash(x)=147515208)
754
+ 6260 train 3.914749 (lr=1.7255e-04) (hash(x)=151825788)
755
+ 6270 train 3.890643 (lr=1.7171e-04) (hash(x)=136625971)
756
+ 6280 train 3.947548 (lr=1.7088e-04) (hash(x)=146904276)
757
+ 6290 train 3.854913 (lr=1.7005e-04) (hash(x)=151876945)
758
+ 6300 val loss 4.1149
759
+ 6300 val perplexity 61.2467
760
+ 6300 train 3.894540 (lr=1.6923e-04) (hash(x)=150124474)
761
+ 6310 train 3.863656 (lr=1.6840e-04) (hash(x)=139897420)
762
+ 6320 train 4.003336 (lr=1.6758e-04) (hash(x)=136670912)
763
+ 6330 train 3.882281 (lr=1.6676e-04) (hash(x)=140302490)
764
+ 6340 train 3.822499 (lr=1.6594e-04) (hash(x)=145564419)
765
+ 6350 train 4.165613 (lr=1.6513e-04) (hash(x)=164988308)
766
+ 6360 train 4.143365 (lr=1.6431e-04) (hash(x)=139721549)
767
+ 6370 train 4.075988 (lr=1.6350e-04) (hash(x)=154183530)
768
+ 6380 train 4.072599 (lr=1.6269e-04) (hash(x)=143895743)
769
+ 6390 train 4.113451 (lr=1.6189e-04) (hash(x)=153091571)
770
+ 6400 val loss 4.1077
771
+ 6400 val perplexity 60.8069
772
+ 6400 train 4.082397 (lr=1.6108e-04) (hash(x)=141242117)
773
+ 6410 train 4.157856 (lr=1.6028e-04) (hash(x)=146638943)
774
+ 6420 train 4.060442 (lr=1.5948e-04) (hash(x)=147429468)
775
+ 6430 train 4.153398 (lr=1.5869e-04) (hash(x)=147104391)
776
+ 6440 train 4.006453 (lr=1.5790e-04) (hash(x)=147003825)
777
+ 6450 train 3.968657 (lr=1.5710e-04) (hash(x)=150910430)
778
+ 6460 train 3.915046 (lr=1.5632e-04) (hash(x)=165853376)
779
+ 6470 train 4.078721 (lr=1.5553e-04) (hash(x)=145957630)
780
+ 6480 train 3.976671 (lr=1.5475e-04) (hash(x)=155685880)
781
+ 6490 train 4.213078 (lr=1.5397e-04) (hash(x)=150307407)
782
+ 6500 val loss 4.0959
783
+ 6500 val perplexity 60.0939
784
+ 6500 train 3.868759 (lr=1.5319e-04) (hash(x)=143529762)
785
+ 6510 train 4.132121 (lr=1.5241e-04) (hash(x)=145652949)
786
+ 6520 train 4.013954 (lr=1.5164e-04) (hash(x)=142033417)
787
+ 6530 train 4.020709 (lr=1.5087e-04) (hash(x)=131869635)
788
+ 6540 train 4.065619 (lr=1.5010e-04) (hash(x)=151253771)
789
+ 6550 train 4.112511 (lr=1.4933e-04) (hash(x)=164286071)
790
+ 6560 train 3.994016 (lr=1.4857e-04) (hash(x)=148872674)
791
+ 6570 train 4.006173 (lr=1.4781e-04) (hash(x)=147653143)
792
+ 6580 train 4.008394 (lr=1.4705e-04) (hash(x)=151684539)
793
+ 6590 train 4.014357 (lr=1.4630e-04) (hash(x)=143354218)
794
+ 6600 val loss 4.0923
795
+ 6600 val perplexity 59.8763
796
+ 6600 train 3.994524 (lr=1.4555e-04) (hash(x)=136948374)
797
+ 6610 train 3.988180 (lr=1.4480e-04) (hash(x)=149846260)
798
+ 6620 train 3.882590 (lr=1.4405e-04) (hash(x)=151615259)
799
+ 6630 train 3.835550 (lr=1.4331e-04) (hash(x)=153129395)
800
+ 6640 train 4.025148 (lr=1.4257e-04) (hash(x)=133054060)
801
+ 6650 train 4.055900 (lr=1.4183e-04) (hash(x)=149911476)
802
+ 6660 train 4.021441 (lr=1.4109e-04) (hash(x)=150548384)
803
+ 6670 train 3.879672 (lr=1.4036e-04) (hash(x)=154950888)
804
+ 6680 train 3.745126 (lr=1.3963e-04) (hash(x)=145532409)
805
+ 6690 train 3.945278 (lr=1.3890e-04) (hash(x)=151197362)
806
+ 6700 val loss 4.0975
807
+ 6700 val perplexity 60.1884
808
+ 6700 train 3.864486 (lr=1.3817e-04) (hash(x)=146268592)
809
+ 6710 train 3.838888 (lr=1.3745e-04) (hash(x)=154495757)
810
+ 6720 train 3.946452 (lr=1.3673e-04) (hash(x)=168472165)
811
+ 6730 train 3.921844 (lr=1.3602e-04) (hash(x)=150287847)
812
+ 6740 train 3.874869 (lr=1.3530e-04) (hash(x)=151465011)
813
+ 6750 train 3.814865 (lr=1.3459e-04) (hash(x)=151898013)
814
+ 6760 train 3.960382 (lr=1.3388e-04) (hash(x)=145836413)
815
+ 6770 train 4.213591 (lr=1.3318e-04) (hash(x)=146057728)
816
+ 6780 train 4.091931 (lr=1.3248e-04) (hash(x)=160113674)
817
+ 6790 train 4.104212 (lr=1.3178e-04) (hash(x)=149844509)
818
+ 6800 val loss 4.0829
819
+ 6800 val perplexity 59.3199
820
+ 6800 train 4.065522 (lr=1.3108e-04) (hash(x)=152676836)
821
+ 6810 train 4.137956 (lr=1.3039e-04) (hash(x)=147510869)
822
+ 6820 train 4.208323 (lr=1.2970e-04) (hash(x)=149778657)
823
+ 6830 train 4.141599 (lr=1.2901e-04) (hash(x)=143851885)
824
+ 6840 train 4.145150 (lr=1.2832e-04) (hash(x)=158011137)
825
+ 6850 train 4.206882 (lr=1.2764e-04) (hash(x)=171197810)
826
+ 6860 train 4.121864 (lr=1.2696e-04) (hash(x)=151171060)
827
+ 6870 train 4.062595 (lr=1.2628e-04) (hash(x)=135613468)
828
+ 6880 train 4.101982 (lr=1.2561e-04) (hash(x)=153789364)
829
+ 6890 train 4.044775 (lr=1.2494e-04) (hash(x)=128928397)
830
+ 6900 val loss 4.0726
831
+ 6900 val perplexity 58.7084
832
+ 6900 train 4.098212 (lr=1.2427e-04) (hash(x)=134657776)
833
+ 6910 train 4.046336 (lr=1.2361e-04) (hash(x)=170095944)
834
+ 6920 train 4.305051 (lr=1.2295e-04) (hash(x)=147408535)
835
+ 6930 train 4.072970 (lr=1.2229e-04) (hash(x)=141187674)
836
+ 6940 train 3.956513 (lr=1.2163e-04) (hash(x)=157752429)
837
+ 6950 train 4.151587 (lr=1.2098e-04) (hash(x)=149793159)
838
+ 6960 train 4.026319 (lr=1.2033e-04) (hash(x)=135786360)
839
+ 6970 train 3.935618 (lr=1.1969e-04) (hash(x)=144372214)
840
+ 6980 train 4.046692 (lr=1.1904e-04) (hash(x)=147592321)
841
+ 6990 train 3.984569 (lr=1.1840e-04) (hash(x)=135072395)
842
+ 7000 val loss 4.0753
843
+ 7000 val perplexity 58.8684
844
+ 7000 train 3.970591 (lr=1.1777e-04) (hash(x)=166721861)
845
+ 7010 train 4.053738 (lr=1.1713e-04) (hash(x)=157177130)
846
+ 7020 train 3.946535 (lr=1.1650e-04) (hash(x)=154916555)
847
+ 7030 train 4.073021 (lr=1.1587e-04) (hash(x)=139686863)
848
+ 7040 train 4.031479 (lr=1.1525e-04) (hash(x)=139606744)
849
+ 7050 train 4.082181 (lr=1.1463e-04) (hash(x)=149487898)
850
+ 7060 train 3.985038 (lr=1.1401e-04) (hash(x)=162536366)
851
+ 7070 train 4.025688 (lr=1.1339e-04) (hash(x)=144765016)
852
+ 7080 train 4.006957 (lr=1.1278e-04) (hash(x)=146001679)
853
+ 7090 train 3.801087 (lr=1.1217e-04) (hash(x)=153352255)
854
+ 7100 val loss 4.0725
855
+ 7100 val perplexity 58.7055
856
+ 7100 train 3.832635 (lr=1.1157e-04) (hash(x)=135496702)
857
+ 7110 train 4.013935 (lr=1.1096e-04) (hash(x)=155057574)
858
+ 7120 train 3.964860 (lr=1.1036e-04) (hash(x)=145862613)
859
+ 7130 train 3.672335 (lr=1.0977e-04) (hash(x)=160806933)
860
+ 7140 train 3.745825 (lr=1.0917e-04) (hash(x)=154582601)
861
+ 7150 train 3.840306 (lr=1.0858e-04) (hash(x)=151854329)
862
+ 7160 train 4.015784 (lr=1.0800e-04) (hash(x)=153875998)
863
+ 7170 train 3.814543 (lr=1.0741e-04) (hash(x)=143448354)
864
+ 7180 train 3.839606 (lr=1.0683e-04) (hash(x)=141619703)
865
+ 7190 train 4.101428 (lr=1.0626e-04) (hash(x)=154769375)
866
+ 7200 val loss 4.0706
867
+ 7200 val perplexity 58.5926
868
+ 7200 train 4.199346 (lr=1.0568e-04) (hash(x)=155567461)
869
+ 7210 train 4.144293 (lr=1.0511e-04) (hash(x)=160242796)
870
+ 7220 train 4.179840 (lr=1.0454e-04) (hash(x)=146362204)
871
+ 7230 train 3.912757 (lr=1.0398e-04) (hash(x)=155673641)
872
+ 7240 train 4.125019 (lr=1.0342e-04) (hash(x)=163454733)
873
+ 7250 train 4.125295 (lr=1.0286e-04) (hash(x)=166254410)
874
+ 7260 train 3.972138 (lr=1.0231e-04) (hash(x)=149576049)
875
+ 7270 train 4.029835 (lr=1.0176e-04) (hash(x)=159937867)
876
+ 7280 train 4.173132 (lr=1.0121e-04) (hash(x)=151335403)
877
+ 7290 train 4.001433 (lr=1.0066e-04) (hash(x)=148867536)
878
+ 7300 val loss 4.0570
879
+ 7300 val perplexity 57.7994
880
+ 7300 train 3.953234 (lr=1.0012e-04) (hash(x)=142803829)
881
+ 7310 train 4.109261 (lr=9.9586e-05) (hash(x)=149482208)
882
+ 7320 train 3.898044 (lr=9.9052e-05) (hash(x)=144226264)
883
+ 7330 train 3.943022 (lr=9.8521e-05) (hash(x)=189898865)
884
+ 7340 train 4.107451 (lr=9.7993e-05) (hash(x)=153884999)
885
+ 7350 train 4.015296 (lr=9.7469e-05) (hash(x)=149106483)
886
+ 7360 train 4.041057 (lr=9.6948e-05) (hash(x)=146033939)
887
+ 7370 train 4.038353 (lr=9.6431e-05) (hash(x)=133850095)
888
+ 7380 train 4.047404 (lr=9.5917e-05) (hash(x)=155008783)
889
+ 7390 train 4.053233 (lr=9.5406e-05) (hash(x)=158169816)
890
+ 7400 val loss 4.0590
891
+ 7400 val perplexity 57.9187
892
+ 7400 train 3.988124 (lr=9.4899e-05) (hash(x)=145294178)
893
+ 7410 train 3.912531 (lr=9.4395e-05) (hash(x)=156501995)
894
+ 7420 train 4.015022 (lr=9.3894e-05) (hash(x)=154883953)
895
+ 7430 train 4.025871 (lr=9.3397e-05) (hash(x)=153861637)
896
+ 7440 train 3.978514 (lr=9.2904e-05) (hash(x)=143884732)
897
+ 7450 train 4.088097 (lr=9.2413e-05) (hash(x)=147629077)
898
+ 7460 train 3.982697 (lr=9.1927e-05) (hash(x)=147092200)
899
+ 7470 train 3.851269 (lr=9.1443e-05) (hash(x)=153667163)
900
+ 7480 train 3.911943 (lr=9.0964e-05) (hash(x)=155400781)
901
+ 7490 train 4.162216 (lr=9.0487e-05) (hash(x)=158198995)
902
+ 7500 val loss 4.0602
903
+ 7500 val perplexity 57.9852
904
+ 7500 train 3.868596 (lr=9.0014e-05) (hash(x)=150573713)
905
+ 7510 train 3.936697 (lr=8.9545e-05) (hash(x)=159059956)
906
+ 7520 train 3.760239 (lr=8.9079e-05) (hash(x)=148811846)
907
+ 7530 train 3.912411 (lr=8.8617e-05) (hash(x)=143727205)
908
+ 7540 train 3.803520 (lr=8.8158e-05) (hash(x)=154980561)
909
+ 7550 train 3.921439 (lr=8.7702e-05) (hash(x)=151143108)
910
+ 7560 train 3.879455 (lr=8.7251e-05) (hash(x)=133379076)
911
+ 7570 train 3.820880 (lr=8.6802e-05) (hash(x)=148338995)
912
+ 7580 train 3.895715 (lr=8.6357e-05) (hash(x)=156733806)
913
+ 7590 train 4.013241 (lr=8.5916e-05) (hash(x)=159299389)
914
+ 7600 val loss 4.0578
915
+ 7600 val perplexity 57.8479
916
+ 7600 train 4.046965 (lr=8.5478e-05) (hash(x)=142771511)
917
+ 7610 train 4.140900 (lr=8.5044e-05) (hash(x)=157959626)
918
+ 7620 train 4.164261 (lr=8.4613e-05) (hash(x)=156257811)
919
+ 7630 train 4.200882 (lr=8.4186e-05) (hash(x)=142730196)
920
+ 7640 train 4.043718 (lr=8.3763e-05) (hash(x)=147923184)
921
+ 7650 train 3.937852 (lr=8.3343e-05) (hash(x)=154290536)
922
+ 7660 train 3.951697 (lr=8.2926e-05) (hash(x)=156433550)
923
+ 7670 train 4.099843 (lr=8.2514e-05) (hash(x)=146611945)
924
+ 7680 train 3.924201 (lr=8.2104e-05) (hash(x)=153745377)
925
+ 7690 train 4.021882 (lr=8.1699e-05) (hash(x)=150833491)
926
+ 7700 val loss 4.0488
927
+ 7700 val perplexity 57.3271
928
+ 7700 train 4.033277 (lr=8.1297e-05) (hash(x)=143602175)
929
+ 7710 train 3.921625 (lr=8.0898e-05) (hash(x)=137035204)
930
+ 7720 train 4.341174 (lr=8.0503e-05) (hash(x)=157409397)
931
+ 7730 train 3.972918 (lr=8.0112e-05) (hash(x)=180056426)
932
+ 7740 train 4.125537 (lr=7.9725e-05) (hash(x)=167791182)
933
+ 7750 train 3.916767 (lr=7.9341e-05) (hash(x)=144902753)
934
+ 7760 train 4.020420 (lr=7.8960e-05) (hash(x)=142048628)
935
+ 7770 train 4.226341 (lr=7.8584e-05) (hash(x)=150623561)
936
+ 7780 train 3.828993 (lr=7.8211e-05) (hash(x)=133068514)
937
+ 7790 train 4.054572 (lr=7.7841e-05) (hash(x)=144372942)
938
+ 7800 val loss 4.0476
939
+ 7800 val perplexity 57.2592
940
+ 7800 train 4.217091 (lr=7.7476e-05) (hash(x)=152379862)
941
+ 7810 train 3.915911 (lr=7.7114e-05) (hash(x)=149929258)
942
+ 7820 train 4.086800 (lr=7.6755e-05) (hash(x)=147738863)
943
+ 7830 train 4.091173 (lr=7.6400e-05) (hash(x)=150885420)
944
+ 7840 train 4.100220 (lr=7.6049e-05) (hash(x)=151751961)
945
+ 7850 train 4.007580 (lr=7.5702e-05) (hash(x)=141061924)
946
+ 7860 train 4.068019 (lr=7.5358e-05) (hash(x)=165946945)
947
+ 7870 train 3.953148 (lr=7.5018e-05) (hash(x)=162222193)
948
+ 7880 train 4.074504 (lr=7.4682e-05) (hash(x)=156110032)
949
+ 7890 train 3.831291 (lr=7.4350e-05) (hash(x)=155496634)
950
+ 7900 val loss 4.0466
951
+ 7900 val perplexity 57.2030
952
+ 7900 train 3.787856 (lr=7.4021e-05) (hash(x)=146655921)
953
+ 7910 train 3.769818 (lr=7.3696e-05) (hash(x)=154169360)
954
+ 7920 train 3.945996 (lr=7.3374e-05) (hash(x)=164630987)
955
+ 7930 train 3.868266 (lr=7.3056e-05) (hash(x)=157591350)
956
+ 7940 train 3.845605 (lr=7.2742e-05) (hash(x)=147986483)
957
+ 7950 train 3.681630 (lr=7.2432e-05) (hash(x)=153958875)
958
+ 7960 train 3.968261 (lr=7.2126e-05) (hash(x)=149307886)
959
+ 7970 train 3.803093 (lr=7.1823e-05) (hash(x)=143190550)
960
+ 7980 train 3.812559 (lr=7.1524e-05) (hash(x)=157802449)
961
+ 7990 train 3.831206 (lr=7.1228e-05) (hash(x)=167181278)
962
+ 8000 val loss 4.0514
963
+ 8000 val perplexity 57.4791
964
+ 8000 train 4.193656 (lr=7.0937e-05) (hash(x)=148262482)
965
+ 8010 train 4.034598 (lr=7.0649e-05) (hash(x)=151119149)
966
+ 8020 train 3.980860 (lr=7.0365e-05) (hash(x)=149748120)
967
+ 8030 train 3.968684 (lr=7.0085e-05) (hash(x)=152462863)
968
+ 8040 train 4.102503 (lr=6.9808e-05) (hash(x)=148714170)
969
+ 8050 train 4.152256 (lr=6.9536e-05) (hash(x)=150219940)
970
+ 8060 train 4.048285 (lr=6.9267e-05) (hash(x)=154859690)
971
+ 8070 train 4.008626 (lr=6.9002e-05) (hash(x)=156635675)
972
+ 8080 train 4.161001 (lr=6.8740e-05) (hash(x)=164430040)
973
+ 8090 train 3.975182 (lr=6.8483e-05) (hash(x)=149003088)
974
+ 8100 val loss 4.0363
975
+ 8100 val perplexity 56.6147
976
+ 8100 train 4.078068 (lr=6.8229e-05) (hash(x)=147683655)
977
+ 8110 train 3.944183 (lr=6.7979e-05) (hash(x)=142849380)
978
+ 8120 train 3.977230 (lr=6.7733e-05) (hash(x)=147408722)
979
+ 8130 train 4.023798 (lr=6.7490e-05) (hash(x)=154095861)
980
+ 8140 train 4.068021 (lr=6.7252e-05) (hash(x)=154770296)
981
+ 8150 train 4.065643 (lr=6.7017e-05) (hash(x)=150175912)
982
+ 8160 train 4.058464 (lr=6.6786e-05) (hash(x)=146846694)
983
+ 8170 train 3.451766 (lr=6.6559e-05) (hash(x)=161621402)
984
+ 8180 train 4.241355 (lr=6.6335e-05) (hash(x)=151058128)
985
+ 8190 train 4.180078 (lr=6.6116e-05) (hash(x)=157265995)
986
+ 8200 val loss 4.0398
987
+ 8200 val perplexity 56.8169
988
+ 8200 train 4.227847 (lr=6.5900e-05) (hash(x)=157312987)
989
+ 8210 train 4.046945 (lr=6.5688e-05) (hash(x)=158755166)
990
+ 8220 train 3.906518 (lr=6.5480e-05) (hash(x)=137031704)
991
+ 8230 train 3.870870 (lr=6.5276e-05) (hash(x)=133909075)
992
+ 8240 train 4.064303 (lr=6.5076e-05) (hash(x)=152756441)
993
+ 8250 train 4.005052 (lr=6.4879e-05) (hash(x)=161956442)
994
+ 8260 train 4.044327 (lr=6.4687e-05) (hash(x)=146994534)
995
+ 8270 train 4.106925 (lr=6.4498e-05) (hash(x)=154925653)
996
+ 8280 train 4.093291 (lr=6.4313e-05) (hash(x)=141129942)
997
+ 8290 train 3.991355 (lr=6.4132e-05) (hash(x)=146178879)
998
+ 8300 val loss 4.0391
999
+ 8300 val perplexity 56.7766
1000
+ 8300 train 3.955336 (lr=6.3954e-05) (hash(x)=141107543)
1001
+ 8310 train 3.978231 (lr=6.3781e-05) (hash(x)=153322855)
1002
+ 8320 train 4.062834 (lr=6.3612e-05) (hash(x)=151197408)
1003
+ 8330 train 4.235330 (lr=6.3446e-05) (hash(x)=155927851)
1004
+ 8340 train 4.151821 (lr=6.3284e-05) (hash(x)=162484336)
1005
+ 8350 train 4.035906 (lr=6.3126e-05) (hash(x)=144280686)
1006
+ 8360 train 3.989989 (lr=6.2972e-05) (hash(x)=147652229)
1007
+ 8370 train 4.127994 (lr=6.2822e-05) (hash(x)=150071753)
1008
+ 8380 train 3.981030 (lr=6.2676e-05) (hash(x)=146798938)
1009
+ 8390 train 4.007610 (lr=6.2533e-05) (hash(x)=149596226)
1010
+ 8400 val loss 4.0308
1011
+ 8400 val perplexity 56.3058
1012
+ 8400 train 4.083535 (lr=6.2395e-05) (hash(x)=141323024)
1013
+ 8410 train 4.378178 (lr=6.2260e-05) (hash(x)=154823144)
1014
+ 8420 train 3.987642 (lr=6.2129e-05) (hash(x)=151554301)
1015
+ 8430 train 4.007751 (lr=6.2002e-05) (hash(x)=153707717)
1016
+ 8440 train 4.005811 (lr=6.1879e-05) (hash(x)=156091926)
1017
+ 8450 train 4.034133 (lr=6.1760e-05) (hash(x)=147111043)
1018
+ 8460 train 4.144534 (lr=6.1645e-05) (hash(x)=135068736)
1019
+ 8470 train 4.063171 (lr=6.1533e-05) (hash(x)=142255415)
1020
+ 8480 train 4.069883 (lr=6.1426e-05) (hash(x)=138060402)
1021
+ 8490 train 3.970289 (lr=6.1322e-05) (hash(x)=154789940)
1022
+ 8500 val loss 4.0297
1023
+ 8500 val perplexity 56.2454
1024
+ 8500 train 3.955011 (lr=6.1223e-05) (hash(x)=150696521)
1025
+ 8510 train 3.867834 (lr=6.1127e-05) (hash(x)=139242896)
1026
+ 8520 train 3.949209 (lr=6.1035e-05) (hash(x)=148066730)
1027
+ 8530 train 4.064819 (lr=6.0947e-05) (hash(x)=148649882)
1028
+ 8540 train 3.870282 (lr=6.0863e-05) (hash(x)=148449712)
1029
+ 8550 train 4.044040 (lr=6.0783e-05) (hash(x)=157155657)
1030
+ 8560 train 3.956589 (lr=6.0706e-05) (hash(x)=139492416)
1031
+ 8570 train 3.866205 (lr=6.0634e-05) (hash(x)=157567533)
1032
+ 8580 train 3.932952 (lr=6.0566e-05) (hash(x)=143032656)
1033
+ 8590 train 4.060696 (lr=6.0501e-05) (hash(x)=150899581)
1034
+ 8600 val loss 4.0329
1035
+ 8600 val perplexity 56.4264
1036
+ 8600 train 3.946125 (lr=6.0440e-05) (hash(x)=162288191)
1037
+ 8610 train 4.099604 (lr=6.0384e-05) (hash(x)=148361485)
1038
+ 8620 train 3.993620 (lr=6.0331e-05) (hash(x)=151740051)
1039
+ 8630 train 3.943242 (lr=6.0282e-05) (hash(x)=154125251)
1040
+ 8640 train 4.009730 (lr=6.0237e-05) (hash(x)=145857675)
1041
+ 8650 train 3.883564 (lr=6.0196e-05) (hash(x)=148185775)
1042
+ 8660 train 3.916621 (lr=6.0159e-05) (hash(x)=149778450)
1043
+ 8670 train 4.194302 (lr=6.0125e-05) (hash(x)=148123274)
1044
+ 8680 train 3.970802 (lr=6.0096e-05) (hash(x)=129372639)
1045
+ 8690 train 4.050332 (lr=6.0070e-05) (hash(x)=145050683)
1046
+ 8700 val loss 4.0264
1047
+ 8700 val perplexity 56.0598
1048
+ 8700 train 3.969222 (lr=6.0049e-05) (hash(x)=152860941)
1049
+ 8710 train 4.070393 (lr=6.0031e-05) (hash(x)=156971498)
1050
+ 8720 train 4.093506 (lr=6.0018e-05) (hash(x)=155513041)
1051
+ 8730 train 4.023675 (lr=6.0008e-05) (hash(x)=167451760)
1052
+ 8740 train 4.096678 (lr=6.0002e-05) (hash(x)=158768428)
1053
+ 8749 val loss 4.0271
1054
+ 8749 val perplexity 56.0961
lr6e-4_total_batch_size61440_baseline_seed1341/model_08749.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5fc8a228e4e562ebed91b936709f9a0775963a455b693132f065891f87ab0417
3
+ size 92843394
lr6e-4_total_batch_size61440_baseline_seed1341/optimizer_08749.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1e4887856c6e4888769043080f658679397f7c0e63fb3ade5968e232823a1d80
3
+ size 179406214