andrew-healey commited on
Commit
58f4fe8
·
verified ·
1 Parent(s): a629449

Upload folder using huggingface_hub

Browse files
lr16e-4_total_batch_size61440_baseline_seed1338/args.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_11/lr16e-4_total_batch_size61440_baseline_seed1338", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_11", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1338, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.0016, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "16e-4_61440", "n_embd": 256}
lr16e-4_total_batch_size61440_baseline_seed1338/dataloader_08749.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:953385078aa3787b69fc6857dfd48b0a2cd2f4d27c6f8892e01211aca53d07f5
3
+ size 964
lr16e-4_total_batch_size61440_baseline_seed1338/log2.txt ADDED
@@ -0,0 +1,1054 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ max_steps: 8750
2
+ 0 val loss 11.2202
3
+ 0 val perplexity 74626.3672
4
+ 0 train 11.227304 (lr=3.2000e-06) (hash(x)=150327452)
5
+ 10 train 9.972887 (lr=3.5200e-05) (hash(x)=165603215)
6
+ 20 train 9.516108 (lr=6.7200e-05) (hash(x)=142061326)
7
+ 30 train 8.889233 (lr=9.9200e-05) (hash(x)=147690730)
8
+ 40 train 8.316031 (lr=1.3120e-04) (hash(x)=160872542)
9
+ 50 train 7.841997 (lr=1.6320e-04) (hash(x)=166441190)
10
+ 60 train 7.660695 (lr=1.9520e-04) (hash(x)=140536766)
11
+ 70 train 7.590111 (lr=2.2720e-04) (hash(x)=146052944)
12
+ 80 train 7.435937 (lr=2.5920e-04) (hash(x)=140542391)
13
+ 90 train 7.297568 (lr=2.9120e-04) (hash(x)=147098660)
14
+ 100 val loss 7.2863
15
+ 100 val perplexity 1460.1827
16
+ 100 train 7.253595 (lr=3.2320e-04) (hash(x)=166780046)
17
+ 110 train 7.002010 (lr=3.5520e-04) (hash(x)=143376584)
18
+ 120 train 6.918857 (lr=3.8720e-04) (hash(x)=145337630)
19
+ 130 train 6.894269 (lr=4.1920e-04) (hash(x)=147524744)
20
+ 140 train 6.784772 (lr=4.5120e-04) (hash(x)=155587442)
21
+ 150 train 6.662381 (lr=4.8320e-04) (hash(x)=159835303)
22
+ 160 train 6.510073 (lr=5.1520e-04) (hash(x)=142171630)
23
+ 170 train 6.412191 (lr=5.4720e-04) (hash(x)=136930463)
24
+ 180 train 6.380286 (lr=5.7920e-04) (hash(x)=153579148)
25
+ 190 train 6.282069 (lr=6.1120e-04) (hash(x)=140352771)
26
+ 200 val loss 6.4800
27
+ 200 val perplexity 651.9868
28
+ 200 train 6.185512 (lr=6.4320e-04) (hash(x)=155040610)
29
+ 210 train 6.127870 (lr=6.7520e-04) (hash(x)=141273862)
30
+ 220 train 6.341401 (lr=7.0720e-04) (hash(x)=165326618)
31
+ 230 train 6.084839 (lr=7.3920e-04) (hash(x)=150122120)
32
+ 240 train 5.873354 (lr=7.7120e-04) (hash(x)=143024679)
33
+ 250 train 5.909622 (lr=8.0320e-04) (hash(x)=130190460)
34
+ 260 train 5.988489 (lr=8.3520e-04) (hash(x)=151890219)
35
+ 270 train 5.762166 (lr=8.6720e-04) (hash(x)=143685291)
36
+ 280 train 6.092319 (lr=8.9920e-04) (hash(x)=137108194)
37
+ 290 train 6.251884 (lr=9.3120e-04) (hash(x)=157920866)
38
+ 300 val loss 6.0527
39
+ 300 val perplexity 425.2684
40
+ 300 train 5.970470 (lr=9.6320e-04) (hash(x)=155504036)
41
+ 310 train 6.061251 (lr=9.9520e-04) (hash(x)=144551186)
42
+ 320 train 6.050638 (lr=1.0272e-03) (hash(x)=161180086)
43
+ 330 train 5.821877 (lr=1.0592e-03) (hash(x)=141651471)
44
+ 340 train 5.936610 (lr=1.0912e-03) (hash(x)=157490741)
45
+ 350 train 5.932688 (lr=1.1232e-03) (hash(x)=137347213)
46
+ 360 train 6.248074 (lr=1.1552e-03) (hash(x)=171273659)
47
+ 370 train 5.799923 (lr=1.1872e-03) (hash(x)=152118871)
48
+ 380 train 5.777218 (lr=1.2192e-03) (hash(x)=161338097)
49
+ 390 train 5.865106 (lr=1.2512e-03) (hash(x)=152404537)
50
+ 400 val loss 5.7248
51
+ 400 val perplexity 306.3653
52
+ 400 train 5.542346 (lr=1.2832e-03) (hash(x)=143823248)
53
+ 410 train 5.826689 (lr=1.3152e-03) (hash(x)=161686752)
54
+ 420 train 5.554622 (lr=1.3472e-03) (hash(x)=155391312)
55
+ 430 train 5.478610 (lr=1.3792e-03) (hash(x)=142145507)
56
+ 440 train 5.631017 (lr=1.4112e-03) (hash(x)=121931091)
57
+ 450 train 5.542422 (lr=1.4432e-03) (hash(x)=156260416)
58
+ 460 train 5.681055 (lr=1.4752e-03) (hash(x)=169781623)
59
+ 470 train 5.491864 (lr=1.5072e-03) (hash(x)=150131545)
60
+ 480 train 5.513600 (lr=1.5392e-03) (hash(x)=150443184)
61
+ 490 train 5.529697 (lr=1.5712e-03) (hash(x)=149368198)
62
+ 500 val loss 5.5410
63
+ 500 val perplexity 254.9404
64
+ 500 train 5.415002 (lr=1.6000e-03) (hash(x)=143734685)
65
+ 510 train 5.464277 (lr=1.6000e-03) (hash(x)=148943891)
66
+ 520 train 5.470827 (lr=1.6000e-03) (hash(x)=149070354)
67
+ 530 train 5.442743 (lr=1.6000e-03) (hash(x)=153601219)
68
+ 540 train 5.434228 (lr=1.5999e-03) (hash(x)=174970654)
69
+ 550 train 5.372306 (lr=1.5999e-03) (hash(x)=160013925)
70
+ 560 train 5.221977 (lr=1.5998e-03) (hash(x)=152096988)
71
+ 570 train 5.385816 (lr=1.5997e-03) (hash(x)=157203924)
72
+ 580 train 5.122974 (lr=1.5997e-03) (hash(x)=138582113)
73
+ 590 train 5.134297 (lr=1.5996e-03) (hash(x)=157451775)
74
+ 600 val loss 5.3683
75
+ 600 val perplexity 214.4945
76
+ 600 train 5.057099 (lr=1.5995e-03) (hash(x)=150678249)
77
+ 610 train 5.099609 (lr=1.5994e-03) (hash(x)=150248072)
78
+ 620 train 5.006677 (lr=1.5992e-03) (hash(x)=148286909)
79
+ 630 train 4.983775 (lr=1.5991e-03) (hash(x)=141197178)
80
+ 640 train 5.039190 (lr=1.5990e-03) (hash(x)=144988391)
81
+ 650 train 4.953558 (lr=1.5988e-03) (hash(x)=149073315)
82
+ 660 train 4.898599 (lr=1.5987e-03) (hash(x)=145239943)
83
+ 670 train 4.920674 (lr=1.5985e-03) (hash(x)=146657426)
84
+ 680 train 5.173290 (lr=1.5983e-03) (hash(x)=142323429)
85
+ 690 train 5.177485 (lr=1.5981e-03) (hash(x)=142724342)
86
+ 700 val loss 5.2021
87
+ 700 val perplexity 181.6552
88
+ 700 train 5.132995 (lr=1.5979e-03) (hash(x)=175802021)
89
+ 710 train 5.136622 (lr=1.5977e-03) (hash(x)=148902690)
90
+ 720 train 5.205983 (lr=1.5975e-03) (hash(x)=155151320)
91
+ 730 train 5.063040 (lr=1.5972e-03) (hash(x)=142078914)
92
+ 740 train 5.207072 (lr=1.5970e-03) (hash(x)=155634484)
93
+ 750 train 5.160653 (lr=1.5967e-03) (hash(x)=171034639)
94
+ 760 train 5.134158 (lr=1.5965e-03) (hash(x)=142470148)
95
+ 770 train 5.081149 (lr=1.5962e-03) (hash(x)=156024448)
96
+ 780 train 5.006321 (lr=1.5959e-03) (hash(x)=151369417)
97
+ 790 train 4.945127 (lr=1.5956e-03) (hash(x)=139260056)
98
+ 800 val loss 5.0218
99
+ 800 val perplexity 151.6873
100
+ 800 train 5.110589 (lr=1.5953e-03) (hash(x)=158681215)
101
+ 810 train 4.927437 (lr=1.5950e-03) (hash(x)=144884161)
102
+ 820 train 4.976940 (lr=1.5947e-03) (hash(x)=146977514)
103
+ 830 train 4.885663 (lr=1.5943e-03) (hash(x)=148983636)
104
+ 840 train 4.900819 (lr=1.5940e-03) (hash(x)=157824542)
105
+ 850 train 4.898818 (lr=1.5936e-03) (hash(x)=152116061)
106
+ 860 train 4.979419 (lr=1.5932e-03) (hash(x)=153743703)
107
+ 870 train 5.127135 (lr=1.5929e-03) (hash(x)=149913335)
108
+ 880 train 4.886528 (lr=1.5925e-03) (hash(x)=151096434)
109
+ 890 train 4.853988 (lr=1.5921e-03) (hash(x)=145848702)
110
+ 900 val loss 4.9168
111
+ 900 val perplexity 136.5624
112
+ 900 train 4.855925 (lr=1.5917e-03) (hash(x)=146108145)
113
+ 910 train 4.826305 (lr=1.5912e-03) (hash(x)=146903806)
114
+ 920 train 4.766856 (lr=1.5908e-03) (hash(x)=149225019)
115
+ 930 train 4.708419 (lr=1.5904e-03) (hash(x)=146110422)
116
+ 940 train 4.792430 (lr=1.5899e-03) (hash(x)=161333614)
117
+ 950 train 4.686063 (lr=1.5895e-03) (hash(x)=147598108)
118
+ 960 train 4.840932 (lr=1.5890e-03) (hash(x)=158133101)
119
+ 970 train 4.638521 (lr=1.5885e-03) (hash(x)=152199591)
120
+ 980 train 4.610433 (lr=1.5880e-03) (hash(x)=152359036)
121
+ 990 train 4.676208 (lr=1.5875e-03) (hash(x)=145166335)
122
+ 1000 val loss 4.8698
123
+ 1000 val perplexity 130.2967
124
+ 1000 train 4.575156 (lr=1.5870e-03) (hash(x)=154996086)
125
+ 1010 train 4.521421 (lr=1.5865e-03) (hash(x)=149406975)
126
+ 1020 train 4.632843 (lr=1.5859e-03) (hash(x)=144618270)
127
+ 1030 train 4.479891 (lr=1.5854e-03) (hash(x)=148306536)
128
+ 1040 train 4.515413 (lr=1.5848e-03) (hash(x)=151643553)
129
+ 1050 train 4.486822 (lr=1.5843e-03) (hash(x)=153396183)
130
+ 1060 train 4.479833 (lr=1.5837e-03) (hash(x)=157501787)
131
+ 1070 train 4.679214 (lr=1.5831e-03) (hash(x)=153277507)
132
+ 1080 train 4.764113 (lr=1.5825e-03) (hash(x)=156232389)
133
+ 1090 train 4.708676 (lr=1.5819e-03) (hash(x)=150673143)
134
+ 1100 val loss 4.7879
135
+ 1100 val perplexity 120.0531
136
+ 1100 train 4.764720 (lr=1.5813e-03) (hash(x)=153885445)
137
+ 1110 train 4.780037 (lr=1.5807e-03) (hash(x)=143951073)
138
+ 1120 train 5.204698 (lr=1.5800e-03) (hash(x)=151626904)
139
+ 1130 train 4.754672 (lr=1.5794e-03) (hash(x)=144013074)
140
+ 1140 train 4.704170 (lr=1.5787e-03) (hash(x)=149948988)
141
+ 1150 train 4.767950 (lr=1.5781e-03) (hash(x)=159666385)
142
+ 1160 train 4.747581 (lr=1.5774e-03) (hash(x)=146889587)
143
+ 1170 train 4.798748 (lr=1.5767e-03) (hash(x)=151746917)
144
+ 1180 train 4.819768 (lr=1.5760e-03) (hash(x)=154396948)
145
+ 1190 train 4.773929 (lr=1.5753e-03) (hash(x)=148201222)
146
+ 1200 val loss 4.7447
147
+ 1200 val perplexity 114.9768
148
+ 1200 train 4.761877 (lr=1.5746e-03) (hash(x)=142353087)
149
+ 1210 train 4.759091 (lr=1.5738e-03) (hash(x)=146779775)
150
+ 1220 train 4.813843 (lr=1.5731e-03) (hash(x)=155864740)
151
+ 1230 train 4.870390 (lr=1.5724e-03) (hash(x)=129826485)
152
+ 1240 train 4.734400 (lr=1.5716e-03) (hash(x)=108599491)
153
+ 1250 train 4.609966 (lr=1.5708e-03) (hash(x)=146491718)
154
+ 1260 train 4.682870 (lr=1.5701e-03) (hash(x)=141897382)
155
+ 1270 train 4.649850 (lr=1.5693e-03) (hash(x)=144830380)
156
+ 1280 train 4.720098 (lr=1.5685e-03) (hash(x)=157677763)
157
+ 1290 train 4.548703 (lr=1.5677e-03) (hash(x)=143611746)
158
+ 1300 val loss 4.6979
159
+ 1300 val perplexity 109.7178
160
+ 1300 train 4.578378 (lr=1.5668e-03) (hash(x)=150750353)
161
+ 1310 train 4.797231 (lr=1.5660e-03) (hash(x)=152850907)
162
+ 1320 train 4.731252 (lr=1.5652e-03) (hash(x)=148575395)
163
+ 1330 train 4.472525 (lr=1.5643e-03) (hash(x)=157113001)
164
+ 1340 train 4.523620 (lr=1.5635e-03) (hash(x)=137400723)
165
+ 1350 train 4.577045 (lr=1.5626e-03) (hash(x)=129849193)
166
+ 1360 train 4.576400 (lr=1.5617e-03) (hash(x)=144176797)
167
+ 1370 train 4.398354 (lr=1.5608e-03) (hash(x)=144550831)
168
+ 1380 train 4.370352 (lr=1.5600e-03) (hash(x)=144171821)
169
+ 1390 train 4.316359 (lr=1.5590e-03) (hash(x)=146110449)
170
+ 1400 val loss 4.6679
171
+ 1400 val perplexity 106.4792
172
+ 1400 train 4.398892 (lr=1.5581e-03) (hash(x)=152767913)
173
+ 1410 train 4.393617 (lr=1.5572e-03) (hash(x)=148110947)
174
+ 1420 train 4.274868 (lr=1.5563e-03) (hash(x)=153477905)
175
+ 1430 train 4.170779 (lr=1.5553e-03) (hash(x)=147227598)
176
+ 1440 train 4.393749 (lr=1.5544e-03) (hash(x)=146280368)
177
+ 1450 train 4.356233 (lr=1.5534e-03) (hash(x)=146531140)
178
+ 1460 train 4.197500 (lr=1.5524e-03) (hash(x)=151573873)
179
+ 1470 train 4.688970 (lr=1.5514e-03) (hash(x)=155559275)
180
+ 1480 train 4.561090 (lr=1.5504e-03) (hash(x)=140584826)
181
+ 1490 train 4.556269 (lr=1.5494e-03) (hash(x)=165070637)
182
+ 1500 val loss 4.6568
183
+ 1500 val perplexity 105.3020
184
+ 1500 train 4.676044 (lr=1.5484e-03) (hash(x)=151562048)
185
+ 1510 train 4.439283 (lr=1.5474e-03) (hash(x)=147770607)
186
+ 1520 train 4.525732 (lr=1.5464e-03) (hash(x)=156486153)
187
+ 1530 train 4.493170 (lr=1.5453e-03) (hash(x)=140244564)
188
+ 1540 train 4.543811 (lr=1.5443e-03) (hash(x)=151627549)
189
+ 1550 train 4.582613 (lr=1.5432e-03) (hash(x)=146001424)
190
+ 1560 train 4.663486 (lr=1.5421e-03) (hash(x)=184266264)
191
+ 1570 train 4.563400 (lr=1.5411e-03) (hash(x)=145002091)
192
+ 1580 train 4.447846 (lr=1.5400e-03) (hash(x)=132216826)
193
+ 1590 train 4.625302 (lr=1.5389e-03) (hash(x)=160975703)
194
+ 1600 val loss 4.6095
195
+ 1600 val perplexity 100.4312
196
+ 1600 train 4.768918 (lr=1.5378e-03) (hash(x)=166486165)
197
+ 1610 train 4.649122 (lr=1.5366e-03) (hash(x)=149601833)
198
+ 1620 train 4.524448 (lr=1.5355e-03) (hash(x)=157815188)
199
+ 1630 train 4.545942 (lr=1.5344e-03) (hash(x)=144234679)
200
+ 1640 train 4.669394 (lr=1.5332e-03) (hash(x)=160054560)
201
+ 1650 train 4.499795 (lr=1.5321e-03) (hash(x)=141061333)
202
+ 1660 train 4.612688 (lr=1.5309e-03) (hash(x)=148064219)
203
+ 1670 train 4.450078 (lr=1.5297e-03) (hash(x)=162799669)
204
+ 1680 train 4.497554 (lr=1.5285e-03) (hash(x)=147474277)
205
+ 1690 train 4.570117 (lr=1.5273e-03) (hash(x)=142369299)
206
+ 1700 val loss 4.5872
207
+ 1700 val perplexity 98.2144
208
+ 1700 train 4.347134 (lr=1.5261e-03) (hash(x)=130835396)
209
+ 1710 train 4.680794 (lr=1.5249e-03) (hash(x)=153173892)
210
+ 1720 train 4.328899 (lr=1.5237e-03) (hash(x)=148451191)
211
+ 1730 train 4.446330 (lr=1.5225e-03) (hash(x)=145047239)
212
+ 1740 train 4.353974 (lr=1.5212e-03) (hash(x)=147187109)
213
+ 1750 train 4.466566 (lr=1.5200e-03) (hash(x)=149493155)
214
+ 1760 train 4.421374 (lr=1.5187e-03) (hash(x)=147647531)
215
+ 1770 train 4.658438 (lr=1.5174e-03) (hash(x)=153677279)
216
+ 1780 train 4.485175 (lr=1.5162e-03) (hash(x)=144314015)
217
+ 1790 train 4.497335 (lr=1.5149e-03) (hash(x)=176962883)
218
+ 1800 val loss 4.5479
219
+ 1800 val perplexity 94.4331
220
+ 1800 train 4.589813 (lr=1.5136e-03) (hash(x)=158851816)
221
+ 1810 train 4.169217 (lr=1.5123e-03) (hash(x)=175415447)
222
+ 1820 train 4.478657 (lr=1.5109e-03) (hash(x)=169829733)
223
+ 1830 train 4.457378 (lr=1.5096e-03) (hash(x)=154116106)
224
+ 1840 train 4.301032 (lr=1.5083e-03) (hash(x)=150168406)
225
+ 1850 train 4.298365 (lr=1.5069e-03) (hash(x)=145782493)
226
+ 1860 train 4.277584 (lr=1.5056e-03) (hash(x)=158111664)
227
+ 1870 train 4.230505 (lr=1.5042e-03) (hash(x)=154837061)
228
+ 1880 train 4.171486 (lr=1.5029e-03) (hash(x)=151363745)
229
+ 1890 train 4.289242 (lr=1.5015e-03) (hash(x)=161283557)
230
+ 1900 val loss 4.5416
231
+ 1900 val perplexity 93.8427
232
+ 1900 train 4.545915 (lr=1.5001e-03) (hash(x)=153313879)
233
+ 1910 train 4.368105 (lr=1.4987e-03) (hash(x)=152272272)
234
+ 1920 train 4.429684 (lr=1.4973e-03) (hash(x)=153648837)
235
+ 1930 train 4.535322 (lr=1.4959e-03) (hash(x)=150161694)
236
+ 1940 train 4.408885 (lr=1.4944e-03) (hash(x)=151235093)
237
+ 1950 train 4.465991 (lr=1.4930e-03) (hash(x)=140582058)
238
+ 1960 train 4.410006 (lr=1.4916e-03) (hash(x)=125616059)
239
+ 1970 train 4.540657 (lr=1.4901e-03) (hash(x)=154353968)
240
+ 1980 train 4.551030 (lr=1.4886e-03) (hash(x)=156063143)
241
+ 1990 train 4.569766 (lr=1.4872e-03) (hash(x)=160927289)
242
+ 2000 val loss 4.4888
243
+ 2000 val perplexity 89.0169
244
+ 2000 train 4.478562 (lr=1.4857e-03) (hash(x)=158245023)
245
+ 2010 train 4.530198 (lr=1.4842e-03) (hash(x)=157096661)
246
+ 2020 train 4.759308 (lr=1.4827e-03) (hash(x)=144994302)
247
+ 2030 train 4.443616 (lr=1.4812e-03) (hash(x)=145742929)
248
+ 2040 train 4.458299 (lr=1.4797e-03) (hash(x)=156958528)
249
+ 2050 train 4.961064 (lr=1.4782e-03) (hash(x)=135313020)
250
+ 2060 train 4.608280 (lr=1.4767e-03) (hash(x)=147309332)
251
+ 2070 train 4.398744 (lr=1.4751e-03) (hash(x)=161720962)
252
+ 2080 train 4.581003 (lr=1.4736e-03) (hash(x)=167204793)
253
+ 2090 train 4.482615 (lr=1.4720e-03) (hash(x)=141325510)
254
+ 2100 val loss 4.4758
255
+ 2100 val perplexity 87.8612
256
+ 2100 train 4.308885 (lr=1.4704e-03) (hash(x)=157204896)
257
+ 2110 train 4.265119 (lr=1.4689e-03) (hash(x)=143077993)
258
+ 2120 train 4.461938 (lr=1.4673e-03) (hash(x)=159032689)
259
+ 2130 train 4.346468 (lr=1.4657e-03) (hash(x)=149513855)
260
+ 2140 train 4.264449 (lr=1.4641e-03) (hash(x)=151678267)
261
+ 2150 train 4.366063 (lr=1.4625e-03) (hash(x)=141309246)
262
+ 2160 train 4.459674 (lr=1.4609e-03) (hash(x)=153935732)
263
+ 2170 train 4.357332 (lr=1.4593e-03) (hash(x)=150668875)
264
+ 2180 train 4.364537 (lr=1.4576e-03) (hash(x)=142033123)
265
+ 2190 train 4.404786 (lr=1.4560e-03) (hash(x)=157015026)
266
+ 2200 val loss 4.4633
267
+ 2200 val perplexity 86.7719
268
+ 2200 train 4.286598 (lr=1.4543e-03) (hash(x)=137541932)
269
+ 2210 train 4.206499 (lr=1.4527e-03) (hash(x)=144424504)
270
+ 2220 train 4.132062 (lr=1.4510e-03) (hash(x)=151015810)
271
+ 2230 train 4.201881 (lr=1.4493e-03) (hash(x)=160707806)
272
+ 2240 train 4.213866 (lr=1.4476e-03) (hash(x)=151874110)
273
+ 2250 train 4.186008 (lr=1.4460e-03) (hash(x)=157341469)
274
+ 2260 train 4.008515 (lr=1.4443e-03) (hash(x)=144563224)
275
+ 2270 train 4.164164 (lr=1.4426e-03) (hash(x)=144878459)
276
+ 2280 train 4.225405 (lr=1.4408e-03) (hash(x)=158331565)
277
+ 2290 train 4.266317 (lr=1.4391e-03) (hash(x)=146875132)
278
+ 2300 val loss 4.4551
279
+ 2300 val perplexity 86.0660
280
+ 2300 train 4.395679 (lr=1.4374e-03) (hash(x)=150149692)
281
+ 2310 train 4.392414 (lr=1.4356e-03) (hash(x)=141206538)
282
+ 2320 train 4.442695 (lr=1.4339e-03) (hash(x)=136219115)
283
+ 2330 train 4.462418 (lr=1.4321e-03) (hash(x)=148496500)
284
+ 2340 train 4.452066 (lr=1.4304e-03) (hash(x)=161388511)
285
+ 2350 train 4.302850 (lr=1.4286e-03) (hash(x)=149433185)
286
+ 2360 train 4.513514 (lr=1.4268e-03) (hash(x)=154659203)
287
+ 2370 train 4.504569 (lr=1.4250e-03) (hash(x)=145696996)
288
+ 2380 train 4.313843 (lr=1.4232e-03) (hash(x)=139915095)
289
+ 2390 train 4.441415 (lr=1.4214e-03) (hash(x)=150567584)
290
+ 2400 val loss 4.4359
291
+ 2400 val perplexity 84.4303
292
+ 2400 train 4.358191 (lr=1.4196e-03) (hash(x)=151730720)
293
+ 2410 train 4.392675 (lr=1.4178e-03) (hash(x)=153136544)
294
+ 2420 train 4.352077 (lr=1.4160e-03) (hash(x)=151004677)
295
+ 2430 train 4.373085 (lr=1.4141e-03) (hash(x)=167118859)
296
+ 2440 train 4.391276 (lr=1.4123e-03) (hash(x)=145433453)
297
+ 2450 train 4.421721 (lr=1.4105e-03) (hash(x)=150913293)
298
+ 2460 train 4.386439 (lr=1.4086e-03) (hash(x)=145674639)
299
+ 2470 train 4.488542 (lr=1.4067e-03) (hash(x)=151022749)
300
+ 2480 train 4.337260 (lr=1.4049e-03) (hash(x)=136621037)
301
+ 2490 train 4.388394 (lr=1.4030e-03) (hash(x)=158100672)
302
+ 2500 val loss 4.4006
303
+ 2500 val perplexity 81.5001
304
+ 2500 train 4.407022 (lr=1.4011e-03) (hash(x)=143406752)
305
+ 2510 train 4.334888 (lr=1.3992e-03) (hash(x)=142664084)
306
+ 2520 train 4.389374 (lr=1.3973e-03) (hash(x)=132174378)
307
+ 2530 train 4.396482 (lr=1.3954e-03) (hash(x)=140760293)
308
+ 2540 train 4.341993 (lr=1.3935e-03) (hash(x)=154863398)
309
+ 2550 train 4.286408 (lr=1.3915e-03) (hash(x)=153042049)
310
+ 2560 train 4.222927 (lr=1.3896e-03) (hash(x)=145339170)
311
+ 2570 train 4.374972 (lr=1.3877e-03) (hash(x)=157656357)
312
+ 2580 train 4.356512 (lr=1.3857e-03) (hash(x)=147899462)
313
+ 2590 train 4.340437 (lr=1.3838e-03) (hash(x)=142812892)
314
+ 2600 val loss 4.4148
315
+ 2600 val perplexity 82.6684
316
+ 2600 train 4.291786 (lr=1.3818e-03) (hash(x)=157272496)
317
+ 2610 train 4.080462 (lr=1.3798e-03) (hash(x)=148923594)
318
+ 2620 train 4.008453 (lr=1.3778e-03) (hash(x)=164654807)
319
+ 2630 train 4.200662 (lr=1.3759e-03) (hash(x)=136419327)
320
+ 2640 train 4.165953 (lr=1.3739e-03) (hash(x)=153420840)
321
+ 2650 train 3.997154 (lr=1.3719e-03) (hash(x)=153693827)
322
+ 2660 train 4.156643 (lr=1.3699e-03) (hash(x)=145574165)
323
+ 2670 train 4.207622 (lr=1.3679e-03) (hash(x)=153894843)
324
+ 2680 train 4.050721 (lr=1.3658e-03) (hash(x)=132234446)
325
+ 2690 train 4.129662 (lr=1.3638e-03) (hash(x)=153430212)
326
+ 2700 val loss 4.4035
327
+ 2700 val perplexity 81.7395
328
+ 2700 train 4.306398 (lr=1.3618e-03) (hash(x)=155342327)
329
+ 2710 train 4.310942 (lr=1.3597e-03) (hash(x)=145797023)
330
+ 2720 train 4.461793 (lr=1.3577e-03) (hash(x)=149220870)
331
+ 2730 train 4.378531 (lr=1.3556e-03) (hash(x)=134069562)
332
+ 2740 train 4.375551 (lr=1.3536e-03) (hash(x)=141560499)
333
+ 2750 train 4.383879 (lr=1.3515e-03) (hash(x)=146000524)
334
+ 2760 train 4.283324 (lr=1.3494e-03) (hash(x)=164710665)
335
+ 2770 train 4.177800 (lr=1.3473e-03) (hash(x)=151199094)
336
+ 2780 train 4.370714 (lr=1.3453e-03) (hash(x)=138625404)
337
+ 2790 train 4.624011 (lr=1.3432e-03) (hash(x)=162766112)
338
+ 2800 val loss 4.3711
339
+ 2800 val perplexity 79.1271
340
+ 2800 train 4.334440 (lr=1.3411e-03) (hash(x)=140626679)
341
+ 2810 train 4.426750 (lr=1.3389e-03) (hash(x)=144248544)
342
+ 2820 train 4.334456 (lr=1.3368e-03) (hash(x)=139378481)
343
+ 2830 train 4.266303 (lr=1.3347e-03) (hash(x)=150173403)
344
+ 2840 train 4.557656 (lr=1.3326e-03) (hash(x)=160168705)
345
+ 2850 train 4.361000 (lr=1.3304e-03) (hash(x)=149480059)
346
+ 2860 train 4.403877 (lr=1.3283e-03) (hash(x)=155559096)
347
+ 2870 train 4.329821 (lr=1.3262e-03) (hash(x)=158825683)
348
+ 2880 train 4.509315 (lr=1.3240e-03) (hash(x)=152129048)
349
+ 2890 train 4.173676 (lr=1.3218e-03) (hash(x)=157866544)
350
+ 2900 val loss 4.3501
351
+ 2900 val perplexity 77.4855
352
+ 2900 train 4.303938 (lr=1.3197e-03) (hash(x)=144953350)
353
+ 2910 train 4.510077 (lr=1.3175e-03) (hash(x)=154103497)
354
+ 2920 train 4.228721 (lr=1.3153e-03) (hash(x)=144467854)
355
+ 2930 train 4.153110 (lr=1.3131e-03) (hash(x)=153784048)
356
+ 2940 train 4.451362 (lr=1.3109e-03) (hash(x)=154774534)
357
+ 2950 train 4.310647 (lr=1.3087e-03) (hash(x)=143988507)
358
+ 2960 train 4.189782 (lr=1.3065e-03) (hash(x)=159552857)
359
+ 2970 train 4.479873 (lr=1.3043e-03) (hash(x)=175539144)
360
+ 2980 train 4.367302 (lr=1.3021e-03) (hash(x)=158172686)
361
+ 2990 train 4.237266 (lr=1.2999e-03) (hash(x)=147553039)
362
+ 3000 val loss 4.3623
363
+ 3000 val perplexity 78.4352
364
+ 3000 train 4.169420 (lr=1.2976e-03) (hash(x)=172449837)
365
+ 3010 train 4.059199 (lr=1.2954e-03) (hash(x)=153285123)
366
+ 3020 train 4.232521 (lr=1.2932e-03) (hash(x)=145307252)
367
+ 3030 train 4.291190 (lr=1.2909e-03) (hash(x)=147739766)
368
+ 3040 train 4.006360 (lr=1.2887e-03) (hash(x)=154414400)
369
+ 3050 train 4.086408 (lr=1.2864e-03) (hash(x)=150696487)
370
+ 3060 train 4.097227 (lr=1.2841e-03) (hash(x)=154543441)
371
+ 3070 train 3.878911 (lr=1.2819e-03) (hash(x)=170733242)
372
+ 3080 train 4.238189 (lr=1.2796e-03) (hash(x)=161914669)
373
+ 3090 train 4.116949 (lr=1.2773e-03) (hash(x)=156125491)
374
+ 3100 val loss 4.3764
375
+ 3100 val perplexity 79.5494
376
+ 3100 train 4.004492 (lr=1.2750e-03) (hash(x)=141710086)
377
+ 3110 train 4.326313 (lr=1.2727e-03) (hash(x)=154547635)
378
+ 3120 train 4.165356 (lr=1.2704e-03) (hash(x)=151747171)
379
+ 3130 train 4.444594 (lr=1.2681e-03) (hash(x)=160311353)
380
+ 3140 train 4.560120 (lr=1.2658e-03) (hash(x)=157233821)
381
+ 3150 train 4.367349 (lr=1.2635e-03) (hash(x)=153016810)
382
+ 3160 train 4.342836 (lr=1.2612e-03) (hash(x)=157289875)
383
+ 3170 train 4.344308 (lr=1.2588e-03) (hash(x)=157459484)
384
+ 3180 train 4.269902 (lr=1.2565e-03) (hash(x)=162037435)
385
+ 3190 train 4.366226 (lr=1.2542e-03) (hash(x)=159360169)
386
+ 3200 val loss 4.3234
387
+ 3200 val perplexity 75.4459
388
+ 3200 train 4.249028 (lr=1.2518e-03) (hash(x)=151299772)
389
+ 3210 train 4.478005 (lr=1.2495e-03) (hash(x)=183865364)
390
+ 3220 train 4.220253 (lr=1.2471e-03) (hash(x)=139622592)
391
+ 3230 train 4.173029 (lr=1.2447e-03) (hash(x)=147102655)
392
+ 3240 train 4.228549 (lr=1.2424e-03) (hash(x)=161870696)
393
+ 3250 train 4.386982 (lr=1.2400e-03) (hash(x)=152692231)
394
+ 3260 train 4.126943 (lr=1.2376e-03) (hash(x)=149990134)
395
+ 3270 train 4.102612 (lr=1.2352e-03) (hash(x)=134848432)
396
+ 3280 train 4.164372 (lr=1.2329e-03) (hash(x)=138519418)
397
+ 3290 train 4.301248 (lr=1.2305e-03) (hash(x)=139301249)
398
+ 3300 val loss 4.3291
399
+ 3300 val perplexity 75.8753
400
+ 3300 train 4.136382 (lr=1.2281e-03) (hash(x)=146473110)
401
+ 3310 train 4.200676 (lr=1.2257e-03) (hash(x)=144833685)
402
+ 3320 train 4.105077 (lr=1.2233e-03) (hash(x)=148953545)
403
+ 3330 train 4.336022 (lr=1.2208e-03) (hash(x)=161302371)
404
+ 3340 train 4.266917 (lr=1.2184e-03) (hash(x)=144215411)
405
+ 3350 train 4.460447 (lr=1.2160e-03) (hash(x)=157713664)
406
+ 3360 train 4.319351 (lr=1.2136e-03) (hash(x)=150813928)
407
+ 3370 train 4.290013 (lr=1.2111e-03) (hash(x)=150889463)
408
+ 3380 train 4.340569 (lr=1.2087e-03) (hash(x)=156631928)
409
+ 3390 train 4.372313 (lr=1.2063e-03) (hash(x)=136001579)
410
+ 3400 val loss 4.3047
411
+ 3400 val perplexity 74.0480
412
+ 3400 train 4.451314 (lr=1.2038e-03) (hash(x)=153954157)
413
+ 3410 train 4.228926 (lr=1.2014e-03) (hash(x)=150012477)
414
+ 3420 train 4.591652 (lr=1.1989e-03) (hash(x)=160878899)
415
+ 3430 train 4.628926 (lr=1.1964e-03) (hash(x)=128131106)
416
+ 3440 train 4.542327 (lr=1.1940e-03) (hash(x)=149611866)
417
+ 3450 train 4.182538 (lr=1.1915e-03) (hash(x)=166942460)
418
+ 3460 train 4.334873 (lr=1.1890e-03) (hash(x)=150498414)
419
+ 3470 train 4.481339 (lr=1.1866e-03) (hash(x)=144342279)
420
+ 3480 train 4.310098 (lr=1.1841e-03) (hash(x)=148891764)
421
+ 3490 train 4.276525 (lr=1.1816e-03) (hash(x)=157308188)
422
+ 3500 val loss 4.2948
423
+ 3500 val perplexity 73.3210
424
+ 3500 train 4.191946 (lr=1.1791e-03) (hash(x)=153717336)
425
+ 3510 train 4.216006 (lr=1.1766e-03) (hash(x)=156083924)
426
+ 3520 train 4.196649 (lr=1.1741e-03) (hash(x)=144234966)
427
+ 3530 train 4.293768 (lr=1.1716e-03) (hash(x)=150184381)
428
+ 3540 train 4.182832 (lr=1.1691e-03) (hash(x)=143011198)
429
+ 3550 train 4.316899 (lr=1.1666e-03) (hash(x)=148764613)
430
+ 3560 train 4.082632 (lr=1.1641e-03) (hash(x)=150625329)
431
+ 3570 train 4.192494 (lr=1.1615e-03) (hash(x)=143981302)
432
+ 3580 train 4.248079 (lr=1.1590e-03) (hash(x)=154989703)
433
+ 3590 train 4.374173 (lr=1.1565e-03) (hash(x)=146602526)
434
+ 3600 val loss 4.2907
435
+ 3600 val perplexity 73.0205
436
+ 3600 train 4.075150 (lr=1.1539e-03) (hash(x)=144965161)
437
+ 3610 train 4.062934 (lr=1.1514e-03) (hash(x)=159452243)
438
+ 3620 train 4.055789 (lr=1.1489e-03) (hash(x)=144904300)
439
+ 3630 train 3.966146 (lr=1.1463e-03) (hash(x)=127790212)
440
+ 3640 train 4.208603 (lr=1.1438e-03) (hash(x)=150863158)
441
+ 3650 train 4.169408 (lr=1.1412e-03) (hash(x)=145691635)
442
+ 3660 train 4.148298 (lr=1.1387e-03) (hash(x)=150631264)
443
+ 3670 train 4.130219 (lr=1.1361e-03) (hash(x)=148646259)
444
+ 3680 train 4.098463 (lr=1.1335e-03) (hash(x)=139317386)
445
+ 3690 train 4.200757 (lr=1.1310e-03) (hash(x)=142206916)
446
+ 3700 val loss 4.3036
447
+ 3700 val perplexity 73.9631
448
+ 3700 train 4.207170 (lr=1.1284e-03) (hash(x)=125969741)
449
+ 3710 train 4.054093 (lr=1.1258e-03) (hash(x)=157183915)
450
+ 3720 train 4.044124 (lr=1.1232e-03) (hash(x)=150434369)
451
+ 3730 train 4.239077 (lr=1.1207e-03) (hash(x)=160911722)
452
+ 3740 train 4.135977 (lr=1.1181e-03) (hash(x)=163306921)
453
+ 3750 train 4.245700 (lr=1.1155e-03) (hash(x)=149156876)
454
+ 3760 train 4.380814 (lr=1.1129e-03) (hash(x)=156015970)
455
+ 3770 train 4.333178 (lr=1.1103e-03) (hash(x)=148222322)
456
+ 3780 train 4.437116 (lr=1.1077e-03) (hash(x)=172040174)
457
+ 3790 train 4.341127 (lr=1.1051e-03) (hash(x)=151593070)
458
+ 3800 val loss 4.2733
459
+ 3800 val perplexity 71.7614
460
+ 3800 train 4.090341 (lr=1.1025e-03) (hash(x)=155070487)
461
+ 3810 train 4.286554 (lr=1.0999e-03) (hash(x)=137073164)
462
+ 3820 train 4.182597 (lr=1.0973e-03) (hash(x)=147761510)
463
+ 3830 train 4.108144 (lr=1.0947e-03) (hash(x)=154763021)
464
+ 3840 train 4.155719 (lr=1.0920e-03) (hash(x)=149622435)
465
+ 3850 train 4.262024 (lr=1.0894e-03) (hash(x)=150779423)
466
+ 3860 train 4.230407 (lr=1.0868e-03) (hash(x)=139445120)
467
+ 3870 train 4.214289 (lr=1.0842e-03) (hash(x)=151884947)
468
+ 3880 train 4.223768 (lr=1.0815e-03) (hash(x)=139795201)
469
+ 3890 train 4.259877 (lr=1.0789e-03) (hash(x)=155589956)
470
+ 3900 val loss 4.2648
471
+ 3900 val perplexity 71.1531
472
+ 3900 train 4.189574 (lr=1.0763e-03) (hash(x)=149444644)
473
+ 3910 train 4.174083 (lr=1.0736e-03) (hash(x)=148538323)
474
+ 3920 train 4.298960 (lr=1.0710e-03) (hash(x)=144026925)
475
+ 3930 train 4.184155 (lr=1.0683e-03) (hash(x)=155311200)
476
+ 3940 train 4.165025 (lr=1.0657e-03) (hash(x)=142410034)
477
+ 3950 train 4.145735 (lr=1.0630e-03) (hash(x)=145986075)
478
+ 3960 train 4.048140 (lr=1.0604e-03) (hash(x)=156197753)
479
+ 3970 train 4.199309 (lr=1.0577e-03) (hash(x)=146981564)
480
+ 3980 train 4.505698 (lr=1.0551e-03) (hash(x)=145361976)
481
+ 3990 train 4.235699 (lr=1.0524e-03) (hash(x)=149344475)
482
+ 4000 val loss 4.2603
483
+ 4000 val perplexity 70.8284
484
+ 4000 train 4.119695 (lr=1.0497e-03) (hash(x)=151663033)
485
+ 4010 train 4.003411 (lr=1.0471e-03) (hash(x)=140915499)
486
+ 4020 train 4.223140 (lr=1.0444e-03) (hash(x)=159979134)
487
+ 4030 train 4.146472 (lr=1.0417e-03) (hash(x)=163081433)
488
+ 4040 train 4.033156 (lr=1.0391e-03) (hash(x)=144890231)
489
+ 4050 train 4.187871 (lr=1.0364e-03) (hash(x)=138242368)
490
+ 4060 train 4.092355 (lr=1.0337e-03) (hash(x)=144438406)
491
+ 4070 train 3.972995 (lr=1.0310e-03) (hash(x)=159294906)
492
+ 4080 train 4.032986 (lr=1.0284e-03) (hash(x)=143676301)
493
+ 4090 train 4.259402 (lr=1.0257e-03) (hash(x)=147875296)
494
+ 4100 val loss 4.2758
495
+ 4100 val perplexity 71.9386
496
+ 4100 train 4.321385 (lr=1.0230e-03) (hash(x)=143688282)
497
+ 4110 train 4.094481 (lr=1.0203e-03) (hash(x)=150086777)
498
+ 4120 train 4.045703 (lr=1.0176e-03) (hash(x)=154624391)
499
+ 4130 train 4.227767 (lr=1.0149e-03) (hash(x)=141163032)
500
+ 4140 train 4.249865 (lr=1.0122e-03) (hash(x)=151038180)
501
+ 4150 train 4.342282 (lr=1.0095e-03) (hash(x)=144718421)
502
+ 4160 train 4.245982 (lr=1.0068e-03) (hash(x)=154277459)
503
+ 4170 train 4.410131 (lr=1.0041e-03) (hash(x)=143765173)
504
+ 4180 train 4.271582 (lr=1.0014e-03) (hash(x)=161013425)
505
+ 4190 train 4.234535 (lr=9.9872e-04) (hash(x)=161666854)
506
+ 4200 val loss 4.2399
507
+ 4200 val perplexity 69.3994
508
+ 4200 train 4.147298 (lr=9.9602e-04) (hash(x)=163361651)
509
+ 4210 train 4.481609 (lr=9.9331e-04) (hash(x)=153491528)
510
+ 4220 train 4.275284 (lr=9.9060e-04) (hash(x)=164410385)
511
+ 4230 train 4.233717 (lr=9.8789e-04) (hash(x)=175191816)
512
+ 4240 train 4.096622 (lr=9.8518e-04) (hash(x)=133924818)
513
+ 4250 train 4.320632 (lr=9.8247e-04) (hash(x)=152932845)
514
+ 4260 train 4.194882 (lr=9.7975e-04) (hash(x)=141987318)
515
+ 4270 train 4.291714 (lr=9.7704e-04) (hash(x)=142521279)
516
+ 4280 train 4.158590 (lr=9.7432e-04) (hash(x)=147707669)
517
+ 4290 train 4.206725 (lr=9.7160e-04) (hash(x)=152245539)
518
+ 4300 val loss 4.2259
519
+ 4300 val perplexity 68.4342
520
+ 4300 train 4.101810 (lr=9.6888e-04) (hash(x)=153619361)
521
+ 4310 train 4.311889 (lr=9.6616e-04) (hash(x)=142614473)
522
+ 4320 train 4.143874 (lr=9.6344e-04) (hash(x)=142689858)
523
+ 4330 train 4.160812 (lr=9.6071e-04) (hash(x)=140837891)
524
+ 4340 train 4.149849 (lr=9.5799e-04) (hash(x)=131790760)
525
+ 4350 train 4.305469 (lr=9.5526e-04) (hash(x)=147186292)
526
+ 4360 train 4.121147 (lr=9.5253e-04) (hash(x)=148140562)
527
+ 4370 train 4.228862 (lr=9.4980e-04) (hash(x)=156409913)
528
+ 4380 train 4.142556 (lr=9.4708e-04) (hash(x)=145126034)
529
+ 4390 train 4.092573 (lr=9.4435e-04) (hash(x)=147368423)
530
+ 4400 val loss 4.2313
531
+ 4400 val perplexity 68.8075
532
+ 4400 train 4.566331 (lr=9.4161e-04) (hash(x)=168527064)
533
+ 4410 train 4.084151 (lr=9.3888e-04) (hash(x)=150152773)
534
+ 4420 train 4.176479 (lr=9.3615e-04) (hash(x)=155472304)
535
+ 4430 train 4.103662 (lr=9.3342e-04) (hash(x)=147299265)
536
+ 4440 train 4.058784 (lr=9.3068e-04) (hash(x)=140659522)
537
+ 4450 train 4.127119 (lr=9.2795e-04) (hash(x)=147169111)
538
+ 4460 train 4.197248 (lr=9.2521e-04) (hash(x)=144098557)
539
+ 4470 train 3.949067 (lr=9.2247e-04) (hash(x)=161957507)
540
+ 4480 train 3.980549 (lr=9.1974e-04) (hash(x)=154064204)
541
+ 4490 train 4.115980 (lr=9.1700e-04) (hash(x)=153999958)
542
+ 4500 val loss 4.2432
543
+ 4500 val perplexity 69.6283
544
+ 4500 train 4.261584 (lr=9.1426e-04) (hash(x)=125588037)
545
+ 4510 train 4.072170 (lr=9.1152e-04) (hash(x)=142522186)
546
+ 4520 train 3.913814 (lr=9.0878e-04) (hash(x)=152736893)
547
+ 4530 train 4.145648 (lr=9.0604e-04) (hash(x)=148091275)
548
+ 4540 train 4.266252 (lr=9.0330e-04) (hash(x)=147663019)
549
+ 4550 train 4.337443 (lr=9.0056e-04) (hash(x)=151491857)
550
+ 4560 train 4.227212 (lr=8.9782e-04) (hash(x)=161757513)
551
+ 4570 train 4.212447 (lr=8.9508e-04) (hash(x)=165448260)
552
+ 4580 train 4.347905 (lr=8.9234e-04) (hash(x)=151620292)
553
+ 4590 train 4.432113 (lr=8.8960e-04) (hash(x)=161877852)
554
+ 4600 val loss 4.2045
555
+ 4600 val perplexity 66.9851
556
+ 4600 train 4.179208 (lr=8.8685e-04) (hash(x)=143710941)
557
+ 4610 train 4.281816 (lr=8.8411e-04) (hash(x)=152244871)
558
+ 4620 train 4.229055 (lr=8.8137e-04) (hash(x)=161268167)
559
+ 4630 train 4.217800 (lr=8.7863e-04) (hash(x)=153904677)
560
+ 4640 train 4.176593 (lr=8.7589e-04) (hash(x)=152593758)
561
+ 4650 train 4.328807 (lr=8.7315e-04) (hash(x)=159047122)
562
+ 4660 train 4.396957 (lr=8.7040e-04) (hash(x)=167370244)
563
+ 4670 train 4.167033 (lr=8.6766e-04) (hash(x)=140357684)
564
+ 4680 train 4.321832 (lr=8.6492e-04) (hash(x)=140547853)
565
+ 4690 train 4.502204 (lr=8.6218e-04) (hash(x)=143000949)
566
+ 4700 val loss 4.1974
567
+ 4700 val perplexity 66.5150
568
+ 4700 train 4.136136 (lr=8.5944e-04) (hash(x)=150952742)
569
+ 4710 train 4.180377 (lr=8.5670e-04) (hash(x)=157964406)
570
+ 4720 train 4.073139 (lr=8.5396e-04) (hash(x)=147653608)
571
+ 4730 train 4.148001 (lr=8.5122e-04) (hash(x)=165545764)
572
+ 4740 train 4.129364 (lr=8.4848e-04) (hash(x)=146744631)
573
+ 4750 train 4.216397 (lr=8.4574e-04) (hash(x)=153635164)
574
+ 4760 train 4.266573 (lr=8.4300e-04) (hash(x)=153217508)
575
+ 4770 train 4.300474 (lr=8.4026e-04) (hash(x)=169622743)
576
+ 4780 train 4.139132 (lr=8.3753e-04) (hash(x)=138018297)
577
+ 4790 train 4.010749 (lr=8.3479e-04) (hash(x)=156067303)
578
+ 4800 val loss 4.1974
579
+ 4800 val perplexity 66.5157
580
+ 4800 train 4.177917 (lr=8.3205e-04) (hash(x)=145323659)
581
+ 4810 train 4.061078 (lr=8.2932e-04) (hash(x)=152587772)
582
+ 4820 train 4.288577 (lr=8.2658e-04) (hash(x)=148481485)
583
+ 4830 train 4.063444 (lr=8.2385e-04) (hash(x)=154219664)
584
+ 4840 train 3.943269 (lr=8.2112e-04) (hash(x)=156853645)
585
+ 4850 train 4.032576 (lr=8.1839e-04) (hash(x)=152329702)
586
+ 4860 train 4.023046 (lr=8.1565e-04) (hash(x)=136684779)
587
+ 4870 train 4.098526 (lr=8.1292e-04) (hash(x)=143260367)
588
+ 4880 train 4.052032 (lr=8.1020e-04) (hash(x)=150910947)
589
+ 4890 train 4.073494 (lr=8.0747e-04) (hash(x)=179249925)
590
+ 4900 val loss 4.2033
591
+ 4900 val perplexity 66.9095
592
+ 4900 train 4.226156 (lr=8.0474e-04) (hash(x)=153151397)
593
+ 4910 train 4.180113 (lr=8.0201e-04) (hash(x)=144101891)
594
+ 4920 train 4.068473 (lr=7.9929e-04) (hash(x)=144994952)
595
+ 4930 train 4.357684 (lr=7.9656e-04) (hash(x)=155390878)
596
+ 4940 train 4.141274 (lr=7.9384e-04) (hash(x)=141359221)
597
+ 4950 train 4.046164 (lr=7.9112e-04) (hash(x)=151570011)
598
+ 4960 train 4.258111 (lr=7.8840e-04) (hash(x)=146918855)
599
+ 4970 train 4.196177 (lr=7.8568e-04) (hash(x)=148647362)
600
+ 4980 train 4.148799 (lr=7.8296e-04) (hash(x)=159753562)
601
+ 4990 train 4.101168 (lr=7.8025e-04) (hash(x)=147683447)
602
+ 5000 val loss 4.1947
603
+ 5000 val perplexity 66.3354
604
+ 5000 train 4.243451 (lr=7.7753e-04) (hash(x)=143182059)
605
+ 5010 train 4.047309 (lr=7.7482e-04) (hash(x)=138657407)
606
+ 5020 train 4.154434 (lr=7.7211e-04) (hash(x)=151830497)
607
+ 5030 train 4.155403 (lr=7.6940e-04) (hash(x)=147727694)
608
+ 5040 train 3.997037 (lr=7.6669e-04) (hash(x)=151957014)
609
+ 5050 train 4.262687 (lr=7.6398e-04) (hash(x)=147828693)
610
+ 5060 train 4.329477 (lr=7.6128e-04) (hash(x)=158589700)
611
+ 5070 train 4.199682 (lr=7.5858e-04) (hash(x)=155662163)
612
+ 5080 train 4.315152 (lr=7.5587e-04) (hash(x)=157797383)
613
+ 5090 train 4.180853 (lr=7.5317e-04) (hash(x)=153848957)
614
+ 5100 val loss 4.1798
615
+ 5100 val perplexity 65.3550
616
+ 5100 train 4.129765 (lr=7.5048e-04) (hash(x)=170083586)
617
+ 5110 train 4.050482 (lr=7.4778e-04) (hash(x)=153800138)
618
+ 5120 train 4.156245 (lr=7.4509e-04) (hash(x)=153526228)
619
+ 5130 train 4.160551 (lr=7.4239e-04) (hash(x)=158650342)
620
+ 5140 train 4.155452 (lr=7.3970e-04) (hash(x)=153163034)
621
+ 5150 train 4.145086 (lr=7.3701e-04) (hash(x)=155281294)
622
+ 5160 train 4.131432 (lr=7.3433e-04) (hash(x)=147731627)
623
+ 5170 train 4.110987 (lr=7.3164e-04) (hash(x)=146388976)
624
+ 5180 train 4.323846 (lr=7.2896e-04) (hash(x)=140323065)
625
+ 5190 train 4.224886 (lr=7.2628e-04) (hash(x)=150406523)
626
+ 5200 val loss 4.1739
627
+ 5200 val perplexity 64.9655
628
+ 5200 train 4.028774 (lr=7.2361e-04) (hash(x)=149363919)
629
+ 5210 train 4.174520 (lr=7.2093e-04) (hash(x)=164390214)
630
+ 5220 train 4.127351 (lr=7.1826e-04) (hash(x)=153652984)
631
+ 5230 train 4.102473 (lr=7.1559e-04) (hash(x)=139542770)
632
+ 5240 train 3.983514 (lr=7.1292e-04) (hash(x)=140115288)
633
+ 5250 train 4.111379 (lr=7.1025e-04) (hash(x)=142158900)
634
+ 5260 train 4.036632 (lr=7.0759e-04) (hash(x)=156019449)
635
+ 5270 train 4.145755 (lr=7.0493e-04) (hash(x)=150776850)
636
+ 5280 train 4.105478 (lr=7.0227e-04) (hash(x)=151358692)
637
+ 5290 train 4.068083 (lr=6.9962e-04) (hash(x)=137112863)
638
+ 5300 val loss 4.1712
639
+ 5300 val perplexity 64.7923
640
+ 5300 train 4.000528 (lr=6.9696e-04) (hash(x)=152033784)
641
+ 5310 train 4.046142 (lr=6.9431e-04) (hash(x)=147785712)
642
+ 5320 train 4.177117 (lr=6.9166e-04) (hash(x)=156426339)
643
+ 5330 train 4.128112 (lr=6.8902e-04) (hash(x)=161907693)
644
+ 5340 train 4.197877 (lr=6.8638e-04) (hash(x)=156414094)
645
+ 5350 train 4.217412 (lr=6.8374e-04) (hash(x)=141712428)
646
+ 5360 train 4.170516 (lr=6.8110e-04) (hash(x)=156873601)
647
+ 5370 train 4.150231 (lr=6.7847e-04) (hash(x)=149863781)
648
+ 5380 train 4.151899 (lr=6.7584e-04) (hash(x)=148183971)
649
+ 5390 train 4.112661 (lr=6.7321e-04) (hash(x)=151542334)
650
+ 5400 val loss 4.1455
651
+ 5400 val perplexity 63.1507
652
+ 5400 train 4.391319 (lr=6.7059e-04) (hash(x)=154614289)
653
+ 5410 train 4.132742 (lr=6.6796e-04) (hash(x)=169124671)
654
+ 5420 train 4.159767 (lr=6.6534e-04) (hash(x)=159963738)
655
+ 5430 train 4.120434 (lr=6.6273e-04) (hash(x)=155162691)
656
+ 5440 train 4.155738 (lr=6.6012e-04) (hash(x)=135981908)
657
+ 5450 train 4.238271 (lr=6.5751e-04) (hash(x)=145055580)
658
+ 5460 train 4.097481 (lr=6.5490e-04) (hash(x)=150367825)
659
+ 5470 train 4.188226 (lr=6.5230e-04) (hash(x)=151034173)
660
+ 5480 train 4.097782 (lr=6.4970e-04) (hash(x)=155581766)
661
+ 5490 train 4.126717 (lr=6.4710e-04) (hash(x)=136973993)
662
+ 5500 val loss 4.1405
663
+ 5500 val perplexity 62.8314
664
+ 5500 train 4.296653 (lr=6.4451e-04) (hash(x)=157745174)
665
+ 5510 train 4.249143 (lr=6.4192e-04) (hash(x)=144628894)
666
+ 5520 train 4.134798 (lr=6.3934e-04) (hash(x)=135056684)
667
+ 5530 train 4.121049 (lr=6.3675e-04) (hash(x)=143418640)
668
+ 5540 train 4.101325 (lr=6.3417e-04) (hash(x)=144464984)
669
+ 5550 train 4.306342 (lr=6.3160e-04) (hash(x)=155888745)
670
+ 5560 train 3.965852 (lr=6.2903e-04) (hash(x)=149122683)
671
+ 5570 train 4.009026 (lr=6.2646e-04) (hash(x)=154573954)
672
+ 5580 train 4.113039 (lr=6.2390e-04) (hash(x)=149339361)
673
+ 5590 train 4.013528 (lr=6.2134e-04) (hash(x)=154738453)
674
+ 5600 val loss 4.1444
675
+ 5600 val perplexity 63.0819
676
+ 5600 train 4.127007 (lr=6.1878e-04) (hash(x)=147693222)
677
+ 5610 train 3.892473 (lr=6.1623e-04) (hash(x)=142140192)
678
+ 5620 train 4.034860 (lr=6.1368e-04) (hash(x)=148677209)
679
+ 5630 train 4.101720 (lr=6.1113e-04) (hash(x)=143495554)
680
+ 5640 train 4.184603 (lr=6.0859e-04) (hash(x)=153202964)
681
+ 5650 train 4.110519 (lr=6.0605e-04) (hash(x)=168724867)
682
+ 5660 train 4.073341 (lr=6.0352e-04) (hash(x)=142816746)
683
+ 5670 train 4.196189 (lr=6.0099e-04) (hash(x)=164119551)
684
+ 5680 train 4.067638 (lr=5.9846e-04) (hash(x)=157239121)
685
+ 5690 train 4.340961 (lr=5.9594e-04) (hash(x)=144662329)
686
+ 5700 val loss 4.1286
687
+ 5700 val perplexity 62.0893
688
+ 5700 train 4.060287 (lr=5.9342e-04) (hash(x)=149784627)
689
+ 5710 train 4.032685 (lr=5.9091e-04) (hash(x)=152821162)
690
+ 5720 train 4.245067 (lr=5.8840e-04) (hash(x)=158497530)
691
+ 5730 train 4.234516 (lr=5.8590e-04) (hash(x)=154583914)
692
+ 5740 train 4.054588 (lr=5.8340e-04) (hash(x)=142027172)
693
+ 5750 train 4.299486 (lr=5.8090e-04) (hash(x)=119227402)
694
+ 5760 train 4.145566 (lr=5.7841e-04) (hash(x)=153248187)
695
+ 5770 train 4.017678 (lr=5.7592e-04) (hash(x)=147951994)
696
+ 5780 train 4.028544 (lr=5.7344e-04) (hash(x)=138935284)
697
+ 5790 train 4.121736 (lr=5.7096e-04) (hash(x)=151786681)
698
+ 5800 val loss 4.1437
699
+ 5800 val perplexity 63.0349
700
+ 5800 train 4.161643 (lr=5.6849e-04) (hash(x)=158620729)
701
+ 5810 train 4.098246 (lr=5.6602e-04) (hash(x)=178115789)
702
+ 5820 train 4.026398 (lr=5.6355e-04) (hash(x)=149092790)
703
+ 5830 train 4.203728 (lr=5.6109e-04) (hash(x)=148269833)
704
+ 5840 train 4.133464 (lr=5.5864e-04) (hash(x)=163714738)
705
+ 5850 train 4.114130 (lr=5.5618e-04) (hash(x)=144111890)
706
+ 5860 train 4.074070 (lr=5.5374e-04) (hash(x)=170702475)
707
+ 5870 train 4.019765 (lr=5.5130e-04) (hash(x)=155970835)
708
+ 5880 train 4.024945 (lr=5.4886e-04) (hash(x)=146695463)
709
+ 5890 train 4.089661 (lr=5.4643e-04) (hash(x)=151603187)
710
+ 5900 val loss 4.1251
711
+ 5900 val perplexity 61.8761
712
+ 5900 train 4.110221 (lr=5.4400e-04) (hash(x)=159763910)
713
+ 5910 train 4.143404 (lr=5.4158e-04) (hash(x)=149555230)
714
+ 5920 train 4.153804 (lr=5.3916e-04) (hash(x)=138048333)
715
+ 5930 train 4.048625 (lr=5.3675e-04) (hash(x)=148668256)
716
+ 5940 train 4.190791 (lr=5.3434e-04) (hash(x)=148352543)
717
+ 5950 train 4.205280 (lr=5.3194e-04) (hash(x)=163457488)
718
+ 5960 train 4.276459 (lr=5.2954e-04) (hash(x)=155306804)
719
+ 5970 train 4.122219 (lr=5.2715e-04) (hash(x)=146576143)
720
+ 5980 train 4.137489 (lr=5.2476e-04) (hash(x)=144668956)
721
+ 5990 train 4.097038 (lr=5.2238e-04) (hash(x)=156098617)
722
+ 6000 val loss 4.1088
723
+ 6000 val perplexity 60.8706
724
+ 6000 train 4.073006 (lr=5.2000e-04) (hash(x)=147640561)
725
+ 6010 train 4.144456 (lr=5.1763e-04) (hash(x)=161223074)
726
+ 6020 train 4.129782 (lr=5.1526e-04) (hash(x)=156308454)
727
+ 6030 train 4.109142 (lr=5.1290e-04) (hash(x)=142553975)
728
+ 6040 train 4.074658 (lr=5.1054e-04) (hash(x)=142039829)
729
+ 6050 train 4.146600 (lr=5.0819e-04) (hash(x)=156329296)
730
+ 6060 train 4.076528 (lr=5.0585e-04) (hash(x)=148505537)
731
+ 6070 train 3.968623 (lr=5.0351e-04) (hash(x)=151801272)
732
+ 6080 train 4.054537 (lr=5.0117e-04) (hash(x)=159908287)
733
+ 6090 train 4.044281 (lr=4.9885e-04) (hash(x)=148714721)
734
+ 6100 val loss 4.1038
735
+ 6100 val perplexity 60.5726
736
+ 6100 train 4.109980 (lr=4.9652e-04) (hash(x)=156613394)
737
+ 6110 train 3.967459 (lr=4.9420e-04) (hash(x)=143286705)
738
+ 6120 train 4.178342 (lr=4.9189e-04) (hash(x)=150191141)
739
+ 6130 train 4.055149 (lr=4.8959e-04) (hash(x)=149855624)
740
+ 6140 train 4.037532 (lr=4.8729e-04) (hash(x)=157312372)
741
+ 6150 train 4.163954 (lr=4.8499e-04) (hash(x)=148262730)
742
+ 6160 train 4.080248 (lr=4.8270e-04) (hash(x)=147032785)
743
+ 6170 train 4.004763 (lr=4.8042e-04) (hash(x)=161213064)
744
+ 6180 train 3.880142 (lr=4.7814e-04) (hash(x)=144538373)
745
+ 6190 train 3.936374 (lr=4.7587e-04) (hash(x)=144688493)
746
+ 6200 val loss 4.1019
747
+ 6200 val perplexity 60.4531
748
+ 6200 train 4.010356 (lr=4.7360e-04) (hash(x)=186221290)
749
+ 6210 train 4.014867 (lr=4.7134e-04) (hash(x)=139529352)
750
+ 6220 train 4.179955 (lr=4.6909e-04) (hash(x)=138081469)
751
+ 6230 train 4.053678 (lr=4.6684e-04) (hash(x)=152979409)
752
+ 6240 train 4.036180 (lr=4.6460e-04) (hash(x)=157848424)
753
+ 6250 train 3.924152 (lr=4.6236e-04) (hash(x)=150770915)
754
+ 6260 train 4.024043 (lr=4.6013e-04) (hash(x)=153769379)
755
+ 6270 train 3.955381 (lr=4.5790e-04) (hash(x)=153318361)
756
+ 6280 train 3.948435 (lr=4.5569e-04) (hash(x)=143366327)
757
+ 6290 train 4.116281 (lr=4.5347e-04) (hash(x)=160684845)
758
+ 6300 val loss 4.0853
759
+ 6300 val perplexity 59.4595
760
+ 6300 train 4.001014 (lr=4.5127e-04) (hash(x)=152081419)
761
+ 6310 train 3.960897 (lr=4.4907e-04) (hash(x)=146626797)
762
+ 6320 train 4.147958 (lr=4.4688e-04) (hash(x)=155291923)
763
+ 6330 train 4.022735 (lr=4.4469e-04) (hash(x)=140920951)
764
+ 6340 train 4.100725 (lr=4.4251e-04) (hash(x)=146642359)
765
+ 6350 train 4.075447 (lr=4.4033e-04) (hash(x)=160486550)
766
+ 6360 train 4.031275 (lr=4.3817e-04) (hash(x)=151833082)
767
+ 6370 train 4.011332 (lr=4.3600e-04) (hash(x)=124740362)
768
+ 6380 train 4.168252 (lr=4.3385e-04) (hash(x)=153952434)
769
+ 6390 train 4.202826 (lr=4.3170e-04) (hash(x)=145864314)
770
+ 6400 val loss 4.0898
771
+ 6400 val perplexity 59.7285
772
+ 6400 train 4.089182 (lr=4.2956e-04) (hash(x)=154808349)
773
+ 6410 train 4.061103 (lr=4.2742e-04) (hash(x)=155037739)
774
+ 6420 train 3.996108 (lr=4.2529e-04) (hash(x)=151155588)
775
+ 6430 train 3.893414 (lr=4.2317e-04) (hash(x)=146548556)
776
+ 6440 train 4.061685 (lr=4.2105e-04) (hash(x)=152706474)
777
+ 6450 train 4.047606 (lr=4.1895e-04) (hash(x)=154207794)
778
+ 6460 train 3.924859 (lr=4.1684e-04) (hash(x)=141590515)
779
+ 6470 train 3.967111 (lr=4.1475e-04) (hash(x)=157673109)
780
+ 6480 train 3.971589 (lr=4.1266e-04) (hash(x)=155842980)
781
+ 6490 train 4.053762 (lr=4.1058e-04) (hash(x)=143595736)
782
+ 6500 val loss 4.0874
783
+ 6500 val perplexity 59.5838
784
+ 6500 train 4.010974 (lr=4.0850e-04) (hash(x)=159437208)
785
+ 6510 train 4.016948 (lr=4.0643e-04) (hash(x)=142631317)
786
+ 6520 train 3.745801 (lr=4.0437e-04) (hash(x)=139009211)
787
+ 6530 train 4.101014 (lr=4.0231e-04) (hash(x)=145451137)
788
+ 6540 train 4.052980 (lr=4.0027e-04) (hash(x)=163532779)
789
+ 6550 train 3.960567 (lr=3.9823e-04) (hash(x)=158084674)
790
+ 6560 train 3.936872 (lr=3.9619e-04) (hash(x)=142422592)
791
+ 6570 train 3.854061 (lr=3.9416e-04) (hash(x)=141089289)
792
+ 6580 train 3.953637 (lr=3.9214e-04) (hash(x)=139712979)
793
+ 6590 train 3.856697 (lr=3.9013e-04) (hash(x)=145833805)
794
+ 6600 val loss 4.0865
795
+ 6600 val perplexity 59.5312
796
+ 6600 train 3.934482 (lr=3.8813e-04) (hash(x)=157933074)
797
+ 6610 train 3.861314 (lr=3.8613e-04) (hash(x)=142534769)
798
+ 6620 train 3.911716 (lr=3.8414e-04) (hash(x)=138456044)
799
+ 6630 train 4.038729 (lr=3.8215e-04) (hash(x)=145111052)
800
+ 6640 train 3.995534 (lr=3.8017e-04) (hash(x)=166954515)
801
+ 6650 train 3.961848 (lr=3.7820e-04) (hash(x)=146889543)
802
+ 6660 train 3.996976 (lr=3.7624e-04) (hash(x)=143253726)
803
+ 6670 train 3.913163 (lr=3.7429e-04) (hash(x)=144349844)
804
+ 6680 train 4.098059 (lr=3.7234e-04) (hash(x)=139911928)
805
+ 6690 train 4.283506 (lr=3.7040e-04) (hash(x)=143135862)
806
+ 6700 val loss 4.0738
807
+ 6700 val perplexity 58.7791
808
+ 6700 train 4.009466 (lr=3.6847e-04) (hash(x)=161560240)
809
+ 6710 train 4.095942 (lr=3.6654e-04) (hash(x)=150437730)
810
+ 6720 train 4.197622 (lr=3.6462e-04) (hash(x)=153035304)
811
+ 6730 train 3.991101 (lr=3.6271e-04) (hash(x)=139323796)
812
+ 6740 train 3.990134 (lr=3.6081e-04) (hash(x)=167437923)
813
+ 6750 train 3.960217 (lr=3.5891e-04) (hash(x)=143814596)
814
+ 6760 train 4.020265 (lr=3.5702e-04) (hash(x)=143755031)
815
+ 6770 train 3.975055 (lr=3.5514e-04) (hash(x)=142014038)
816
+ 6780 train 4.031541 (lr=3.5327e-04) (hash(x)=148465062)
817
+ 6790 train 4.067742 (lr=3.5140e-04) (hash(x)=154656877)
818
+ 6800 val loss 4.0715
819
+ 6800 val perplexity 58.6463
820
+ 6800 train 4.176820 (lr=3.4955e-04) (hash(x)=155424292)
821
+ 6810 train 3.807037 (lr=3.4770e-04) (hash(x)=147358081)
822
+ 6820 train 3.923870 (lr=3.4585e-04) (hash(x)=145387291)
823
+ 6830 train 3.873622 (lr=3.4402e-04) (hash(x)=146334068)
824
+ 6840 train 3.957843 (lr=3.4219e-04) (hash(x)=139563766)
825
+ 6850 train 3.977593 (lr=3.4037e-04) (hash(x)=139095691)
826
+ 6860 train 3.924802 (lr=3.3856e-04) (hash(x)=142093999)
827
+ 6870 train 4.035909 (lr=3.3676e-04) (hash(x)=148598823)
828
+ 6880 train 4.117771 (lr=3.3496e-04) (hash(x)=146486085)
829
+ 6890 train 3.902429 (lr=3.3318e-04) (hash(x)=165683212)
830
+ 6900 val loss 4.0728
831
+ 6900 val perplexity 58.7210
832
+ 6900 train 4.106917 (lr=3.3140e-04) (hash(x)=148561470)
833
+ 6910 train 3.793993 (lr=3.2962e-04) (hash(x)=136352432)
834
+ 6920 train 4.176646 (lr=3.2786e-04) (hash(x)=147640094)
835
+ 6930 train 4.061341 (lr=3.2611e-04) (hash(x)=152343827)
836
+ 6940 train 4.044687 (lr=3.2436e-04) (hash(x)=158787971)
837
+ 6950 train 4.316038 (lr=3.2262e-04) (hash(x)=178133125)
838
+ 6960 train 4.104434 (lr=3.2089e-04) (hash(x)=167302853)
839
+ 6970 train 4.047144 (lr=3.1916e-04) (hash(x)=143729435)
840
+ 6980 train 4.137383 (lr=3.1745e-04) (hash(x)=135857879)
841
+ 6990 train 4.084717 (lr=3.1574e-04) (hash(x)=155384510)
842
+ 7000 val loss 4.0596
843
+ 7000 val perplexity 57.9497
844
+ 7000 train 4.023391 (lr=3.1404e-04) (hash(x)=141527450)
845
+ 7010 train 4.069504 (lr=3.1235e-04) (hash(x)=148648498)
846
+ 7020 train 4.001482 (lr=3.1067e-04) (hash(x)=148073842)
847
+ 7030 train 4.129344 (lr=3.0899e-04) (hash(x)=160771614)
848
+ 7040 train 3.906679 (lr=3.0733e-04) (hash(x)=148361330)
849
+ 7050 train 3.883871 (lr=3.0567e-04) (hash(x)=151212224)
850
+ 7060 train 4.074808 (lr=3.0402e-04) (hash(x)=155925366)
851
+ 7070 train 3.936165 (lr=3.0238e-04) (hash(x)=149830576)
852
+ 7080 train 3.904384 (lr=3.0075e-04) (hash(x)=144849934)
853
+ 7090 train 3.993180 (lr=2.9912e-04) (hash(x)=150997807)
854
+ 7100 val loss 4.0543
855
+ 7100 val perplexity 57.6421
856
+ 7100 train 4.033813 (lr=2.9751e-04) (hash(x)=151066339)
857
+ 7110 train 3.934469 (lr=2.9590e-04) (hash(x)=147185781)
858
+ 7120 train 3.980731 (lr=2.9430e-04) (hash(x)=141765575)
859
+ 7130 train 3.993372 (lr=2.9271e-04) (hash(x)=144204513)
860
+ 7140 train 3.822274 (lr=2.9113e-04) (hash(x)=149326253)
861
+ 7150 train 3.983820 (lr=2.8956e-04) (hash(x)=150409811)
862
+ 7160 train 3.874244 (lr=2.8799e-04) (hash(x)=144790348)
863
+ 7170 train 3.999096 (lr=2.8644e-04) (hash(x)=150500939)
864
+ 7180 train 3.814944 (lr=2.8489e-04) (hash(x)=143552103)
865
+ 7190 train 3.859600 (lr=2.8335e-04) (hash(x)=157471075)
866
+ 7200 val loss 4.0579
867
+ 7200 val perplexity 57.8537
868
+ 7200 train 3.837742 (lr=2.8182e-04) (hash(x)=155231264)
869
+ 7210 train 3.927071 (lr=2.8030e-04) (hash(x)=150350401)
870
+ 7220 train 3.945467 (lr=2.7878e-04) (hash(x)=151952594)
871
+ 7230 train 4.030033 (lr=2.7728e-04) (hash(x)=145381804)
872
+ 7240 train 3.914976 (lr=2.7578e-04) (hash(x)=142651222)
873
+ 7250 train 4.022504 (lr=2.7430e-04) (hash(x)=136115279)
874
+ 7260 train 4.120305 (lr=2.7282e-04) (hash(x)=154536751)
875
+ 7270 train 4.075540 (lr=2.7135e-04) (hash(x)=148883836)
876
+ 7280 train 3.928825 (lr=2.6989e-04) (hash(x)=150606754)
877
+ 7290 train 4.042625 (lr=2.6844e-04) (hash(x)=165845807)
878
+ 7300 val loss 4.0463
879
+ 7300 val perplexity 57.1863
880
+ 7300 train 4.116895 (lr=2.6700e-04) (hash(x)=150281149)
881
+ 7310 train 4.068338 (lr=2.6556e-04) (hash(x)=159929281)
882
+ 7320 train 3.897439 (lr=2.6414e-04) (hash(x)=143074269)
883
+ 7330 train 4.057128 (lr=2.6272e-04) (hash(x)=147369269)
884
+ 7340 train 4.111042 (lr=2.6132e-04) (hash(x)=139286601)
885
+ 7350 train 4.008691 (lr=2.5992e-04) (hash(x)=145263048)
886
+ 7360 train 3.858882 (lr=2.5853e-04) (hash(x)=144626817)
887
+ 7370 train 4.062269 (lr=2.5715e-04) (hash(x)=168989813)
888
+ 7380 train 4.039978 (lr=2.5578e-04) (hash(x)=144790656)
889
+ 7390 train 3.844556 (lr=2.5442e-04) (hash(x)=141279761)
890
+ 7400 val loss 4.0432
891
+ 7400 val perplexity 57.0093
892
+ 7400 train 3.981868 (lr=2.5306e-04) (hash(x)=148421717)
893
+ 7410 train 3.977128 (lr=2.5172e-04) (hash(x)=141241548)
894
+ 7420 train 4.042267 (lr=2.5038e-04) (hash(x)=162434228)
895
+ 7430 train 3.993505 (lr=2.4906e-04) (hash(x)=156312086)
896
+ 7440 train 3.950574 (lr=2.4774e-04) (hash(x)=139803064)
897
+ 7450 train 3.649232 (lr=2.4644e-04) (hash(x)=156361394)
898
+ 7460 train 3.959004 (lr=2.4514e-04) (hash(x)=141142183)
899
+ 7470 train 3.788096 (lr=2.4385e-04) (hash(x)=156701683)
900
+ 7480 train 3.962706 (lr=2.4257e-04) (hash(x)=148549917)
901
+ 7490 train 3.843719 (lr=2.4130e-04) (hash(x)=143002119)
902
+ 7500 val loss 4.0448
903
+ 7500 val perplexity 57.0980
904
+ 7500 train 3.851630 (lr=2.4004e-04) (hash(x)=146921118)
905
+ 7510 train 3.858370 (lr=2.3879e-04) (hash(x)=148592146)
906
+ 7520 train 4.004126 (lr=2.3754e-04) (hash(x)=148456120)
907
+ 7530 train 4.158412 (lr=2.3631e-04) (hash(x)=163865945)
908
+ 7540 train 4.051578 (lr=2.3509e-04) (hash(x)=160753039)
909
+ 7550 train 4.062814 (lr=2.3387e-04) (hash(x)=150275989)
910
+ 7560 train 4.086408 (lr=2.3267e-04) (hash(x)=141749735)
911
+ 7570 train 4.054147 (lr=2.3147e-04) (hash(x)=148449999)
912
+ 7580 train 3.932919 (lr=2.3029e-04) (hash(x)=139153889)
913
+ 7590 train 4.048338 (lr=2.2911e-04) (hash(x)=147418540)
914
+ 7600 val loss 4.0320
915
+ 7600 val perplexity 56.3763
916
+ 7600 train 4.069643 (lr=2.2794e-04) (hash(x)=150660048)
917
+ 7610 train 4.052121 (lr=2.2678e-04) (hash(x)=155369080)
918
+ 7620 train 3.953581 (lr=2.2564e-04) (hash(x)=151926309)
919
+ 7630 train 4.030202 (lr=2.2450e-04) (hash(x)=149549080)
920
+ 7640 train 3.921366 (lr=2.2337e-04) (hash(x)=140642882)
921
+ 7650 train 4.196265 (lr=2.2225e-04) (hash(x)=140218502)
922
+ 7660 train 4.031204 (lr=2.2114e-04) (hash(x)=153198276)
923
+ 7670 train 3.888023 (lr=2.2004e-04) (hash(x)=150706960)
924
+ 7680 train 4.032162 (lr=2.1894e-04) (hash(x)=152025509)
925
+ 7690 train 3.819033 (lr=2.1786e-04) (hash(x)=147288976)
926
+ 7700 val loss 4.0332
927
+ 7700 val perplexity 56.4415
928
+ 7700 train 3.930063 (lr=2.1679e-04) (hash(x)=148059852)
929
+ 7710 train 3.749929 (lr=2.1573e-04) (hash(x)=158443334)
930
+ 7720 train 3.871289 (lr=2.1468e-04) (hash(x)=143467085)
931
+ 7730 train 3.815771 (lr=2.1363e-04) (hash(x)=138439550)
932
+ 7740 train 3.946394 (lr=2.1260e-04) (hash(x)=145878152)
933
+ 7750 train 3.798604 (lr=2.1158e-04) (hash(x)=136874128)
934
+ 7760 train 3.824095 (lr=2.1056e-04) (hash(x)=148856154)
935
+ 7770 train 3.850093 (lr=2.0956e-04) (hash(x)=146982119)
936
+ 7780 train 3.946401 (lr=2.0856e-04) (hash(x)=159846779)
937
+ 7790 train 3.634008 (lr=2.0758e-04) (hash(x)=141501838)
938
+ 7800 val loss 4.0367
939
+ 7800 val perplexity 56.6369
940
+ 7800 train 3.763717 (lr=2.0660e-04) (hash(x)=148331002)
941
+ 7810 train 4.068491 (lr=2.0564e-04) (hash(x)=150692304)
942
+ 7820 train 3.939309 (lr=2.0468e-04) (hash(x)=156841183)
943
+ 7830 train 3.980107 (lr=2.0373e-04) (hash(x)=139594008)
944
+ 7840 train 3.919498 (lr=2.0280e-04) (hash(x)=149700138)
945
+ 7850 train 3.941700 (lr=2.0187e-04) (hash(x)=149327044)
946
+ 7860 train 4.108171 (lr=2.0096e-04) (hash(x)=148315418)
947
+ 7870 train 4.050933 (lr=2.0005e-04) (hash(x)=153431778)
948
+ 7880 train 3.894650 (lr=1.9915e-04) (hash(x)=145049799)
949
+ 7890 train 4.020423 (lr=1.9827e-04) (hash(x)=150265096)
950
+ 7900 val loss 4.0240
951
+ 7900 val perplexity 55.9238
952
+ 7900 train 3.848468 (lr=1.9739e-04) (hash(x)=164923883)
953
+ 7910 train 3.908226 (lr=1.9652e-04) (hash(x)=139315738)
954
+ 7920 train 3.959571 (lr=1.9566e-04) (hash(x)=153031394)
955
+ 7930 train 3.749157 (lr=1.9482e-04) (hash(x)=148835859)
956
+ 7940 train 3.879015 (lr=1.9398e-04) (hash(x)=146875217)
957
+ 7950 train 3.956004 (lr=1.9315e-04) (hash(x)=151003885)
958
+ 7960 train 3.869174 (lr=1.9233e-04) (hash(x)=141885471)
959
+ 7970 train 4.071434 (lr=1.9153e-04) (hash(x)=150541583)
960
+ 7980 train 3.769432 (lr=1.9073e-04) (hash(x)=138626492)
961
+ 7990 train 3.952919 (lr=1.8994e-04) (hash(x)=149935635)
962
+ 8000 val loss 4.0243
963
+ 8000 val perplexity 55.9404
964
+ 8000 train 3.922093 (lr=1.8917e-04) (hash(x)=143545384)
965
+ 8010 train 3.781529 (lr=1.8840e-04) (hash(x)=144689366)
966
+ 8020 train 3.957091 (lr=1.8764e-04) (hash(x)=151299532)
967
+ 8030 train 3.903553 (lr=1.8689e-04) (hash(x)=131300239)
968
+ 8040 train 3.801501 (lr=1.8616e-04) (hash(x)=146715955)
969
+ 8050 train 3.727899 (lr=1.8543e-04) (hash(x)=135376656)
970
+ 8060 train 3.811551 (lr=1.8471e-04) (hash(x)=141991433)
971
+ 8070 train 3.768016 (lr=1.8400e-04) (hash(x)=140260574)
972
+ 8080 train 3.733690 (lr=1.8331e-04) (hash(x)=151492896)
973
+ 8090 train 3.940285 (lr=1.8262e-04) (hash(x)=143333635)
974
+ 8100 val loss 4.0283
975
+ 8100 val perplexity 56.1633
976
+ 8100 train 3.825931 (lr=1.8194e-04) (hash(x)=160686959)
977
+ 8110 train 3.826274 (lr=1.8128e-04) (hash(x)=141145704)
978
+ 8120 train 4.064416 (lr=1.8062e-04) (hash(x)=146967795)
979
+ 8130 train 3.885343 (lr=1.7997e-04) (hash(x)=147844486)
980
+ 8140 train 3.941893 (lr=1.7934e-04) (hash(x)=141860136)
981
+ 8150 train 4.079678 (lr=1.7871e-04) (hash(x)=147696006)
982
+ 8160 train 3.979321 (lr=1.7810e-04) (hash(x)=151497110)
983
+ 8170 train 3.983521 (lr=1.7749e-04) (hash(x)=140881859)
984
+ 8180 train 3.931177 (lr=1.7689e-04) (hash(x)=158865574)
985
+ 8190 train 3.889834 (lr=1.7631e-04) (hash(x)=161247161)
986
+ 8200 val loss 4.0209
987
+ 8200 val perplexity 55.7515
988
+ 8200 train 3.853966 (lr=1.7573e-04) (hash(x)=156501889)
989
+ 8210 train 3.973594 (lr=1.7517e-04) (hash(x)=158384162)
990
+ 8220 train 4.020069 (lr=1.7461e-04) (hash(x)=154815062)
991
+ 8230 train 3.901694 (lr=1.7407e-04) (hash(x)=145018315)
992
+ 8240 train 3.939783 (lr=1.7354e-04) (hash(x)=161659129)
993
+ 8250 train 3.906171 (lr=1.7301e-04) (hash(x)=139863367)
994
+ 8260 train 3.856577 (lr=1.7250e-04) (hash(x)=149569663)
995
+ 8270 train 3.612512 (lr=1.7199e-04) (hash(x)=162234378)
996
+ 8280 train 3.998510 (lr=1.7150e-04) (hash(x)=144371378)
997
+ 8290 train 3.900618 (lr=1.7102e-04) (hash(x)=148120258)
998
+ 8300 val loss 4.0212
999
+ 8300 val perplexity 55.7700
1000
+ 8300 train 3.891476 (lr=1.7055e-04) (hash(x)=142716875)
1001
+ 8310 train 3.912858 (lr=1.7008e-04) (hash(x)=166789234)
1002
+ 8320 train 3.978795 (lr=1.6963e-04) (hash(x)=150613211)
1003
+ 8330 train 3.858101 (lr=1.6919e-04) (hash(x)=139895391)
1004
+ 8340 train 3.967160 (lr=1.6876e-04) (hash(x)=152011203)
1005
+ 8350 train 3.924365 (lr=1.6834e-04) (hash(x)=156010875)
1006
+ 8360 train 3.932877 (lr=1.6793e-04) (hash(x)=152988051)
1007
+ 8370 train 4.098442 (lr=1.6752e-04) (hash(x)=155144327)
1008
+ 8380 train 3.971939 (lr=1.6713e-04) (hash(x)=133642309)
1009
+ 8390 train 4.024613 (lr=1.6675e-04) (hash(x)=149745335)
1010
+ 8400 val loss 4.0136
1011
+ 8400 val perplexity 55.3479
1012
+ 8400 train 4.007542 (lr=1.6639e-04) (hash(x)=154436684)
1013
+ 8410 train 4.046245 (lr=1.6603e-04) (hash(x)=147946630)
1014
+ 8420 train 3.959814 (lr=1.6568e-04) (hash(x)=153947058)
1015
+ 8430 train 4.054209 (lr=1.6534e-04) (hash(x)=159821040)
1016
+ 8440 train 4.011341 (lr=1.6501e-04) (hash(x)=149127348)
1017
+ 8450 train 4.020397 (lr=1.6469e-04) (hash(x)=149421871)
1018
+ 8460 train 3.992570 (lr=1.6439e-04) (hash(x)=142621577)
1019
+ 8470 train 3.987309 (lr=1.6409e-04) (hash(x)=148978213)
1020
+ 8480 train 3.807852 (lr=1.6380e-04) (hash(x)=139959537)
1021
+ 8490 train 3.900011 (lr=1.6353e-04) (hash(x)=144232498)
1022
+ 8500 val loss 4.0116
1023
+ 8500 val perplexity 55.2336
1024
+ 8500 train 4.118513 (lr=1.6326e-04) (hash(x)=147965839)
1025
+ 8510 train 3.996301 (lr=1.6300e-04) (hash(x)=147679836)
1026
+ 8520 train 3.868639 (lr=1.6276e-04) (hash(x)=147486097)
1027
+ 8530 train 4.017868 (lr=1.6253e-04) (hash(x)=151655265)
1028
+ 8540 train 3.915907 (lr=1.6230e-04) (hash(x)=150866632)
1029
+ 8550 train 4.062387 (lr=1.6209e-04) (hash(x)=144355059)
1030
+ 8560 train 3.923198 (lr=1.6188e-04) (hash(x)=145518478)
1031
+ 8570 train 4.062747 (lr=1.6169e-04) (hash(x)=178973323)
1032
+ 8580 train 3.990893 (lr=1.6151e-04) (hash(x)=152773969)
1033
+ 8590 train 3.782759 (lr=1.6134e-04) (hash(x)=141805508)
1034
+ 8600 val loss 4.0163
1035
+ 8600 val perplexity 55.4975
1036
+ 8600 train 3.840646 (lr=1.6117e-04) (hash(x)=145228097)
1037
+ 8610 train 3.857193 (lr=1.6102e-04) (hash(x)=149525694)
1038
+ 8620 train 3.927480 (lr=1.6088e-04) (hash(x)=151651655)
1039
+ 8630 train 4.046357 (lr=1.6075e-04) (hash(x)=144918643)
1040
+ 8640 train 3.897918 (lr=1.6063e-04) (hash(x)=149721879)
1041
+ 8650 train 3.964263 (lr=1.6052e-04) (hash(x)=157684127)
1042
+ 8660 train 3.969104 (lr=1.6042e-04) (hash(x)=152426036)
1043
+ 8670 train 3.889540 (lr=1.6033e-04) (hash(x)=141396342)
1044
+ 8680 train 3.978575 (lr=1.6026e-04) (hash(x)=146459856)
1045
+ 8690 train 4.064189 (lr=1.6019e-04) (hash(x)=151766124)
1046
+ 8700 val loss 4.0070
1047
+ 8700 val perplexity 54.9826
1048
+ 8700 train 4.220052 (lr=1.6013e-04) (hash(x)=152910357)
1049
+ 8710 train 4.004989 (lr=1.6008e-04) (hash(x)=149724642)
1050
+ 8720 train 3.946559 (lr=1.6005e-04) (hash(x)=152483840)
1051
+ 8730 train 3.924124 (lr=1.6002e-04) (hash(x)=147289415)
1052
+ 8740 train 3.959003 (lr=1.6001e-04) (hash(x)=152254131)
1053
+ 8749 val loss 4.0056
1054
+ 8749 val perplexity 54.9060
lr16e-4_total_batch_size61440_baseline_seed1338/model_08749.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8c20a788466bccde2a67c3887d5c690fd33d949dea9b4812b5a326f64507b310
3
+ size 92843394
lr16e-4_total_batch_size61440_baseline_seed1338/optimizer_08749.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:54d0daae3dd209136d94fe1067abea720fa99355842628448471413f7325dbb8
3
+ size 179406214