andrew-healey commited on
Commit
034e560
·
verified ·
1 Parent(s): 10f34aa

Upload folder using huggingface_hub

Browse files
lr10e-4_total_batch_size61440_baseline_seed1338/args.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_11/lr10e-4_total_batch_size61440_baseline_seed1338", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_11", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1338, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.001, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "10e-4_61440", "n_embd": 256}
lr10e-4_total_batch_size61440_baseline_seed1338/dataloader_08749.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:953385078aa3787b69fc6857dfd48b0a2cd2f4d27c6f8892e01211aca53d07f5
3
+ size 964
lr10e-4_total_batch_size61440_baseline_seed1338/log2.txt ADDED
@@ -0,0 +1,1054 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ max_steps: 8750
2
+ 0 val loss 11.2202
3
+ 0 val perplexity 74626.3672
4
+ 0 train 11.227304 (lr=2.0000e-06) (hash(x)=150327452)
5
+ 10 train 10.094975 (lr=2.2000e-05) (hash(x)=165603215)
6
+ 20 train 9.697444 (lr=4.2000e-05) (hash(x)=142061326)
7
+ 30 train 9.268339 (lr=6.2000e-05) (hash(x)=147690730)
8
+ 40 train 8.829544 (lr=8.2000e-05) (hash(x)=160872542)
9
+ 50 train 8.305347 (lr=1.0200e-04) (hash(x)=166441190)
10
+ 60 train 7.897141 (lr=1.2200e-04) (hash(x)=140536766)
11
+ 70 train 7.646972 (lr=1.4200e-04) (hash(x)=146052944)
12
+ 80 train 7.537001 (lr=1.6200e-04) (hash(x)=140542391)
13
+ 90 train 7.412724 (lr=1.8200e-04) (hash(x)=147098660)
14
+ 100 val loss 7.4117
15
+ 100 val perplexity 1655.1925
16
+ 100 train 7.387587 (lr=2.0200e-04) (hash(x)=166780046)
17
+ 110 train 7.151572 (lr=2.2200e-04) (hash(x)=143376584)
18
+ 120 train 7.080782 (lr=2.4200e-04) (hash(x)=145337630)
19
+ 130 train 7.046910 (lr=2.6200e-04) (hash(x)=147524744)
20
+ 140 train 6.938785 (lr=2.8200e-04) (hash(x)=155587442)
21
+ 150 train 6.823975 (lr=3.0200e-04) (hash(x)=159835303)
22
+ 160 train 6.649853 (lr=3.2200e-04) (hash(x)=142171630)
23
+ 170 train 6.542361 (lr=3.4200e-04) (hash(x)=136930463)
24
+ 180 train 6.541846 (lr=3.6200e-04) (hash(x)=153579148)
25
+ 190 train 6.419317 (lr=3.8200e-04) (hash(x)=140352771)
26
+ 200 val loss 6.6121
27
+ 200 val perplexity 744.0649
28
+ 200 train 6.323372 (lr=4.0200e-04) (hash(x)=155040610)
29
+ 210 train 6.251032 (lr=4.2200e-04) (hash(x)=141273862)
30
+ 220 train 6.483278 (lr=4.4200e-04) (hash(x)=165326618)
31
+ 230 train 6.224693 (lr=4.6200e-04) (hash(x)=150122120)
32
+ 240 train 6.012254 (lr=4.8200e-04) (hash(x)=143024679)
33
+ 250 train 6.029131 (lr=5.0200e-04) (hash(x)=130190460)
34
+ 260 train 6.116913 (lr=5.2200e-04) (hash(x)=151890219)
35
+ 270 train 5.904250 (lr=5.4200e-04) (hash(x)=143685291)
36
+ 280 train 6.220187 (lr=5.6200e-04) (hash(x)=137108194)
37
+ 290 train 6.362343 (lr=5.8200e-04) (hash(x)=157920866)
38
+ 300 val loss 6.1364
39
+ 300 val perplexity 462.3962
40
+ 300 train 6.057953 (lr=6.0200e-04) (hash(x)=155504036)
41
+ 310 train 6.175628 (lr=6.2200e-04) (hash(x)=144551186)
42
+ 320 train 6.170208 (lr=6.4200e-04) (hash(x)=161180086)
43
+ 330 train 5.921549 (lr=6.6200e-04) (hash(x)=141651471)
44
+ 340 train 6.032030 (lr=6.8200e-04) (hash(x)=157490741)
45
+ 350 train 6.011773 (lr=7.0200e-04) (hash(x)=137347213)
46
+ 360 train 6.267807 (lr=7.2200e-04) (hash(x)=171273659)
47
+ 370 train 5.904537 (lr=7.4200e-04) (hash(x)=152118871)
48
+ 380 train 5.896435 (lr=7.6200e-04) (hash(x)=161338097)
49
+ 390 train 5.960093 (lr=7.8200e-04) (hash(x)=152404537)
50
+ 400 val loss 5.8246
51
+ 400 val perplexity 338.5137
52
+ 400 train 5.661698 (lr=8.0200e-04) (hash(x)=143823248)
53
+ 410 train 5.919258 (lr=8.2200e-04) (hash(x)=161686752)
54
+ 420 train 5.644386 (lr=8.4200e-04) (hash(x)=155391312)
55
+ 430 train 5.566195 (lr=8.6200e-04) (hash(x)=142145507)
56
+ 440 train 5.709962 (lr=8.8200e-04) (hash(x)=121931091)
57
+ 450 train 5.642570 (lr=9.0200e-04) (hash(x)=156260416)
58
+ 460 train 5.790680 (lr=9.2200e-04) (hash(x)=169781623)
59
+ 470 train 5.556425 (lr=9.4200e-04) (hash(x)=150131545)
60
+ 480 train 5.580823 (lr=9.6200e-04) (hash(x)=150443184)
61
+ 490 train 5.593628 (lr=9.8200e-04) (hash(x)=149368198)
62
+ 500 val loss 5.6062
63
+ 500 val perplexity 272.1055
64
+ 500 train 5.490559 (lr=1.0000e-03) (hash(x)=143734685)
65
+ 510 train 5.520675 (lr=1.0000e-03) (hash(x)=148943891)
66
+ 520 train 5.532617 (lr=9.9999e-04) (hash(x)=149070354)
67
+ 530 train 5.510972 (lr=9.9997e-04) (hash(x)=153601219)
68
+ 540 train 5.520864 (lr=9.9995e-04) (hash(x)=174970654)
69
+ 550 train 5.455142 (lr=9.9992e-04) (hash(x)=160013925)
70
+ 560 train 5.302090 (lr=9.9988e-04) (hash(x)=152096988)
71
+ 570 train 5.417586 (lr=9.9984e-04) (hash(x)=157203924)
72
+ 580 train 5.195374 (lr=9.9979e-04) (hash(x)=138582113)
73
+ 590 train 5.205650 (lr=9.9974e-04) (hash(x)=157451775)
74
+ 600 val loss 5.4366
75
+ 600 val perplexity 229.6494
76
+ 600 train 5.137094 (lr=9.9967e-04) (hash(x)=150678249)
77
+ 610 train 5.179779 (lr=9.9961e-04) (hash(x)=150248072)
78
+ 620 train 5.081443 (lr=9.9953e-04) (hash(x)=148286909)
79
+ 630 train 5.041496 (lr=9.9945e-04) (hash(x)=141197178)
80
+ 640 train 5.121722 (lr=9.9936e-04) (hash(x)=144988391)
81
+ 650 train 5.047835 (lr=9.9927e-04) (hash(x)=149073315)
82
+ 660 train 4.990276 (lr=9.9917e-04) (hash(x)=145239943)
83
+ 670 train 5.017312 (lr=9.9906e-04) (hash(x)=146657426)
84
+ 680 train 5.263485 (lr=9.9894e-04) (hash(x)=142323429)
85
+ 690 train 5.242060 (lr=9.9882e-04) (hash(x)=142724342)
86
+ 700 val loss 5.2891
87
+ 700 val perplexity 198.1621
88
+ 700 train 5.154198 (lr=9.9870e-04) (hash(x)=175802021)
89
+ 710 train 5.205874 (lr=9.9856e-04) (hash(x)=148902690)
90
+ 720 train 5.296787 (lr=9.9842e-04) (hash(x)=155151320)
91
+ 730 train 5.141076 (lr=9.9828e-04) (hash(x)=142078914)
92
+ 740 train 5.302879 (lr=9.9812e-04) (hash(x)=155634484)
93
+ 750 train 5.281301 (lr=9.9796e-04) (hash(x)=171034639)
94
+ 760 train 5.222290 (lr=9.9780e-04) (hash(x)=142470148)
95
+ 770 train 5.173294 (lr=9.9762e-04) (hash(x)=156024448)
96
+ 780 train 5.087138 (lr=9.9744e-04) (hash(x)=151369417)
97
+ 790 train 5.028801 (lr=9.9726e-04) (hash(x)=139260056)
98
+ 800 val loss 5.1323
99
+ 800 val perplexity 169.4109
100
+ 800 train 5.244038 (lr=9.9707e-04) (hash(x)=158681215)
101
+ 810 train 5.051548 (lr=9.9687e-04) (hash(x)=144884161)
102
+ 820 train 5.089558 (lr=9.9666e-04) (hash(x)=146977514)
103
+ 830 train 4.973002 (lr=9.9645e-04) (hash(x)=148983636)
104
+ 840 train 5.009206 (lr=9.9623e-04) (hash(x)=157824542)
105
+ 850 train 4.983213 (lr=9.9601e-04) (hash(x)=152116061)
106
+ 860 train 5.081656 (lr=9.9578e-04) (hash(x)=153743703)
107
+ 870 train 5.197436 (lr=9.9554e-04) (hash(x)=149913335)
108
+ 880 train 4.982324 (lr=9.9530e-04) (hash(x)=151096434)
109
+ 890 train 4.954268 (lr=9.9505e-04) (hash(x)=145848702)
110
+ 900 val loss 5.0116
111
+ 900 val perplexity 150.1395
112
+ 900 train 4.934205 (lr=9.9479e-04) (hash(x)=146108145)
113
+ 910 train 4.901026 (lr=9.9453e-04) (hash(x)=146903806)
114
+ 920 train 4.841945 (lr=9.9426e-04) (hash(x)=149225019)
115
+ 930 train 4.780059 (lr=9.9398e-04) (hash(x)=146110422)
116
+ 940 train 4.864466 (lr=9.9370e-04) (hash(x)=161333614)
117
+ 950 train 4.775730 (lr=9.9341e-04) (hash(x)=147598108)
118
+ 960 train 4.907565 (lr=9.9311e-04) (hash(x)=158133101)
119
+ 970 train 4.729620 (lr=9.9281e-04) (hash(x)=152199591)
120
+ 980 train 4.688633 (lr=9.9250e-04) (hash(x)=152359036)
121
+ 990 train 4.736023 (lr=9.9219e-04) (hash(x)=145166335)
122
+ 1000 val loss 4.9379
123
+ 1000 val perplexity 139.4723
124
+ 1000 train 4.636187 (lr=9.9187e-04) (hash(x)=154996086)
125
+ 1010 train 4.585032 (lr=9.9154e-04) (hash(x)=149406975)
126
+ 1020 train 4.686368 (lr=9.9121e-04) (hash(x)=144618270)
127
+ 1030 train 4.542612 (lr=9.9087e-04) (hash(x)=148306536)
128
+ 1040 train 4.578196 (lr=9.9052e-04) (hash(x)=151643553)
129
+ 1050 train 4.531322 (lr=9.9017e-04) (hash(x)=153396183)
130
+ 1060 train 4.529526 (lr=9.8981e-04) (hash(x)=157501787)
131
+ 1070 train 4.730604 (lr=9.8944e-04) (hash(x)=153277507)
132
+ 1080 train 4.807221 (lr=9.8907e-04) (hash(x)=156232389)
133
+ 1090 train 4.752865 (lr=9.8869e-04) (hash(x)=150673143)
134
+ 1100 val loss 4.8379
135
+ 1100 val perplexity 126.2089
136
+ 1100 train 4.822827 (lr=9.8831e-04) (hash(x)=153885445)
137
+ 1110 train 4.836082 (lr=9.8791e-04) (hash(x)=143951073)
138
+ 1120 train 5.228443 (lr=9.8752e-04) (hash(x)=151626904)
139
+ 1130 train 4.796211 (lr=9.8711e-04) (hash(x)=144013074)
140
+ 1140 train 4.727512 (lr=9.8670e-04) (hash(x)=149948988)
141
+ 1150 train 4.801790 (lr=9.8629e-04) (hash(x)=159666385)
142
+ 1160 train 4.772598 (lr=9.8586e-04) (hash(x)=146889587)
143
+ 1170 train 4.836252 (lr=9.8543e-04) (hash(x)=151746917)
144
+ 1180 train 4.846839 (lr=9.8500e-04) (hash(x)=154396948)
145
+ 1190 train 4.803428 (lr=9.8456e-04) (hash(x)=148201222)
146
+ 1200 val loss 4.7805
147
+ 1200 val perplexity 119.1652
148
+ 1200 train 4.796102 (lr=9.8411e-04) (hash(x)=142353087)
149
+ 1210 train 4.791084 (lr=9.8365e-04) (hash(x)=146779775)
150
+ 1220 train 4.846773 (lr=9.8319e-04) (hash(x)=155864740)
151
+ 1230 train 4.904485 (lr=9.8272e-04) (hash(x)=129826485)
152
+ 1240 train 4.752458 (lr=9.8225e-04) (hash(x)=108599491)
153
+ 1250 train 4.627167 (lr=9.8177e-04) (hash(x)=146491718)
154
+ 1260 train 4.706443 (lr=9.8129e-04) (hash(x)=141897382)
155
+ 1270 train 4.690997 (lr=9.8079e-04) (hash(x)=144830380)
156
+ 1280 train 4.744698 (lr=9.8030e-04) (hash(x)=157677763)
157
+ 1290 train 4.578168 (lr=9.7979e-04) (hash(x)=143611746)
158
+ 1300 val loss 4.7299
159
+ 1300 val perplexity 113.2867
160
+ 1300 train 4.605031 (lr=9.7928e-04) (hash(x)=150750353)
161
+ 1310 train 4.838254 (lr=9.7876e-04) (hash(x)=152850907)
162
+ 1320 train 4.750986 (lr=9.7824e-04) (hash(x)=148575395)
163
+ 1330 train 4.488687 (lr=9.7771e-04) (hash(x)=157113001)
164
+ 1340 train 4.550245 (lr=9.7717e-04) (hash(x)=137400723)
165
+ 1350 train 4.605360 (lr=9.7663e-04) (hash(x)=129849193)
166
+ 1360 train 4.592472 (lr=9.7608e-04) (hash(x)=144176797)
167
+ 1370 train 4.418783 (lr=9.7553e-04) (hash(x)=144550831)
168
+ 1380 train 4.392552 (lr=9.7497e-04) (hash(x)=144171821)
169
+ 1390 train 4.329997 (lr=9.7440e-04) (hash(x)=146110449)
170
+ 1400 val loss 4.6922
171
+ 1400 val perplexity 109.0923
172
+ 1400 train 4.420912 (lr=9.7383e-04) (hash(x)=152767913)
173
+ 1410 train 4.418310 (lr=9.7325e-04) (hash(x)=148110947)
174
+ 1420 train 4.294248 (lr=9.7267e-04) (hash(x)=153477905)
175
+ 1430 train 4.194738 (lr=9.7207e-04) (hash(x)=147227598)
176
+ 1440 train 4.405521 (lr=9.7148e-04) (hash(x)=146280368)
177
+ 1450 train 4.376457 (lr=9.7087e-04) (hash(x)=146531140)
178
+ 1460 train 4.211118 (lr=9.7026e-04) (hash(x)=151573873)
179
+ 1470 train 4.706414 (lr=9.6965e-04) (hash(x)=155559275)
180
+ 1480 train 4.582390 (lr=9.6903e-04) (hash(x)=140584826)
181
+ 1490 train 4.567916 (lr=9.6840e-04) (hash(x)=165070637)
182
+ 1500 val loss 4.6653
183
+ 1500 val perplexity 106.1974
184
+ 1500 train 4.687382 (lr=9.6777e-04) (hash(x)=151562048)
185
+ 1510 train 4.452570 (lr=9.6713e-04) (hash(x)=147770607)
186
+ 1520 train 4.533971 (lr=9.6648e-04) (hash(x)=156486153)
187
+ 1530 train 4.503690 (lr=9.6583e-04) (hash(x)=140244564)
188
+ 1540 train 4.554633 (lr=9.6517e-04) (hash(x)=151627549)
189
+ 1550 train 4.606707 (lr=9.6451e-04) (hash(x)=146001424)
190
+ 1560 train 4.667432 (lr=9.6384e-04) (hash(x)=184266264)
191
+ 1570 train 4.565306 (lr=9.6316e-04) (hash(x)=145002091)
192
+ 1580 train 4.462911 (lr=9.6248e-04) (hash(x)=132216826)
193
+ 1590 train 4.633543 (lr=9.6179e-04) (hash(x)=160975703)
194
+ 1600 val loss 4.6105
195
+ 1600 val perplexity 100.5388
196
+ 1600 train 4.773427 (lr=9.6110e-04) (hash(x)=166486165)
197
+ 1610 train 4.651180 (lr=9.6040e-04) (hash(x)=149601833)
198
+ 1620 train 4.540932 (lr=9.5969e-04) (hash(x)=157815188)
199
+ 1630 train 4.564334 (lr=9.5898e-04) (hash(x)=144234679)
200
+ 1640 train 4.681538 (lr=9.5826e-04) (hash(x)=160054560)
201
+ 1650 train 4.506206 (lr=9.5754e-04) (hash(x)=141061333)
202
+ 1660 train 4.628647 (lr=9.5681e-04) (hash(x)=148064219)
203
+ 1670 train 4.466605 (lr=9.5607e-04) (hash(x)=162799669)
204
+ 1680 train 4.503276 (lr=9.5533e-04) (hash(x)=147474277)
205
+ 1690 train 4.576978 (lr=9.5458e-04) (hash(x)=142369299)
206
+ 1700 val loss 4.5922
207
+ 1700 val perplexity 98.7104
208
+ 1700 train 4.349199 (lr=9.5383e-04) (hash(x)=130835396)
209
+ 1710 train 4.688591 (lr=9.5307e-04) (hash(x)=153173892)
210
+ 1720 train 4.348163 (lr=9.5231e-04) (hash(x)=148451191)
211
+ 1730 train 4.459361 (lr=9.5153e-04) (hash(x)=145047239)
212
+ 1740 train 4.365893 (lr=9.5076e-04) (hash(x)=147187109)
213
+ 1750 train 4.470172 (lr=9.4998e-04) (hash(x)=149493155)
214
+ 1760 train 4.426159 (lr=9.4919e-04) (hash(x)=147647531)
215
+ 1770 train 4.654772 (lr=9.4839e-04) (hash(x)=153677279)
216
+ 1780 train 4.485097 (lr=9.4759e-04) (hash(x)=144314015)
217
+ 1790 train 4.503185 (lr=9.4679e-04) (hash(x)=176962883)
218
+ 1800 val loss 4.5574
219
+ 1800 val perplexity 95.3377
220
+ 1800 train 4.600549 (lr=9.4598e-04) (hash(x)=158851816)
221
+ 1810 train 4.183919 (lr=9.4516e-04) (hash(x)=175415447)
222
+ 1820 train 4.485393 (lr=9.4434e-04) (hash(x)=169829733)
223
+ 1830 train 4.466335 (lr=9.4351e-04) (hash(x)=154116106)
224
+ 1840 train 4.302244 (lr=9.4268e-04) (hash(x)=150168406)
225
+ 1850 train 4.295933 (lr=9.4184e-04) (hash(x)=145782493)
226
+ 1860 train 4.281456 (lr=9.4099e-04) (hash(x)=158111664)
227
+ 1870 train 4.225426 (lr=9.4014e-04) (hash(x)=154837061)
228
+ 1880 train 4.174526 (lr=9.3928e-04) (hash(x)=151363745)
229
+ 1890 train 4.299949 (lr=9.3842e-04) (hash(x)=161283557)
230
+ 1900 val loss 4.5433
231
+ 1900 val perplexity 93.9961
232
+ 1900 train 4.549829 (lr=9.3755e-04) (hash(x)=153313879)
233
+ 1910 train 4.367863 (lr=9.3668e-04) (hash(x)=152272272)
234
+ 1920 train 4.434670 (lr=9.3580e-04) (hash(x)=153648837)
235
+ 1930 train 4.537760 (lr=9.3491e-04) (hash(x)=150161694)
236
+ 1940 train 4.411253 (lr=9.3402e-04) (hash(x)=151235093)
237
+ 1950 train 4.470268 (lr=9.3313e-04) (hash(x)=140582058)
238
+ 1960 train 4.405994 (lr=9.3223e-04) (hash(x)=125616059)
239
+ 1970 train 4.548931 (lr=9.3132e-04) (hash(x)=154353968)
240
+ 1980 train 4.553081 (lr=9.3041e-04) (hash(x)=156063143)
241
+ 1990 train 4.571413 (lr=9.2949e-04) (hash(x)=160927289)
242
+ 2000 val loss 4.4958
243
+ 2000 val perplexity 89.6415
244
+ 2000 train 4.481911 (lr=9.2856e-04) (hash(x)=158245023)
245
+ 2010 train 4.538713 (lr=9.2763e-04) (hash(x)=157096661)
246
+ 2020 train 4.756229 (lr=9.2670e-04) (hash(x)=144994302)
247
+ 2030 train 4.439175 (lr=9.2576e-04) (hash(x)=145742929)
248
+ 2040 train 4.462164 (lr=9.2481e-04) (hash(x)=156958528)
249
+ 2050 train 4.964296 (lr=9.2386e-04) (hash(x)=135313020)
250
+ 2060 train 4.612521 (lr=9.2291e-04) (hash(x)=147309332)
251
+ 2070 train 4.396996 (lr=9.2195e-04) (hash(x)=161720962)
252
+ 2080 train 4.581806 (lr=9.2098e-04) (hash(x)=167204793)
253
+ 2090 train 4.485687 (lr=9.2001e-04) (hash(x)=141325510)
254
+ 2100 val loss 4.4749
255
+ 2100 val perplexity 87.7822
256
+ 2100 train 4.298159 (lr=9.1903e-04) (hash(x)=157204896)
257
+ 2110 train 4.260617 (lr=9.1804e-04) (hash(x)=143077993)
258
+ 2120 train 4.466634 (lr=9.1706e-04) (hash(x)=159032689)
259
+ 2130 train 4.339815 (lr=9.1606e-04) (hash(x)=149513855)
260
+ 2140 train 4.267748 (lr=9.1506e-04) (hash(x)=151678267)
261
+ 2150 train 4.359253 (lr=9.1406e-04) (hash(x)=141309246)
262
+ 2160 train 4.454968 (lr=9.1305e-04) (hash(x)=153935732)
263
+ 2170 train 4.355664 (lr=9.1203e-04) (hash(x)=150668875)
264
+ 2180 train 4.360215 (lr=9.1101e-04) (hash(x)=142033123)
265
+ 2190 train 4.398824 (lr=9.0999e-04) (hash(x)=157015026)
266
+ 2200 val loss 4.4627
267
+ 2200 val perplexity 86.7203
268
+ 2200 train 4.276230 (lr=9.0896e-04) (hash(x)=137541932)
269
+ 2210 train 4.207301 (lr=9.0792e-04) (hash(x)=144424504)
270
+ 2220 train 4.129959 (lr=9.0688e-04) (hash(x)=151015810)
271
+ 2230 train 4.205470 (lr=9.0583e-04) (hash(x)=160707806)
272
+ 2240 train 4.213854 (lr=9.0478e-04) (hash(x)=151874110)
273
+ 2250 train 4.181313 (lr=9.0372e-04) (hash(x)=157341469)
274
+ 2260 train 4.006356 (lr=9.0266e-04) (hash(x)=144563224)
275
+ 2270 train 4.163818 (lr=9.0160e-04) (hash(x)=144878459)
276
+ 2280 train 4.227529 (lr=9.0052e-04) (hash(x)=158331565)
277
+ 2290 train 4.255958 (lr=8.9945e-04) (hash(x)=146875132)
278
+ 2300 val loss 4.4523
279
+ 2300 val perplexity 85.8242
280
+ 2300 train 4.395814 (lr=8.9836e-04) (hash(x)=150149692)
281
+ 2310 train 4.405045 (lr=8.9728e-04) (hash(x)=141206538)
282
+ 2320 train 4.430086 (lr=8.9618e-04) (hash(x)=136219115)
283
+ 2330 train 4.459432 (lr=8.9509e-04) (hash(x)=148496500)
284
+ 2340 train 4.448664 (lr=8.9398e-04) (hash(x)=161388511)
285
+ 2350 train 4.300060 (lr=8.9288e-04) (hash(x)=149433185)
286
+ 2360 train 4.514430 (lr=8.9177e-04) (hash(x)=154659203)
287
+ 2370 train 4.499865 (lr=8.9065e-04) (hash(x)=145696996)
288
+ 2380 train 4.313351 (lr=8.8953e-04) (hash(x)=139915095)
289
+ 2390 train 4.440316 (lr=8.8840e-04) (hash(x)=150567584)
290
+ 2400 val loss 4.4280
291
+ 2400 val perplexity 83.7652
292
+ 2400 train 4.348131 (lr=8.8727e-04) (hash(x)=151730720)
293
+ 2410 train 4.381239 (lr=8.8613e-04) (hash(x)=153136544)
294
+ 2420 train 4.349610 (lr=8.8499e-04) (hash(x)=151004677)
295
+ 2430 train 4.366881 (lr=8.8384e-04) (hash(x)=167118859)
296
+ 2440 train 4.377445 (lr=8.8269e-04) (hash(x)=145433453)
297
+ 2450 train 4.424284 (lr=8.8153e-04) (hash(x)=150913293)
298
+ 2460 train 4.377126 (lr=8.8037e-04) (hash(x)=145674639)
299
+ 2470 train 4.486581 (lr=8.7921e-04) (hash(x)=151022749)
300
+ 2480 train 4.337985 (lr=8.7804e-04) (hash(x)=136621037)
301
+ 2490 train 4.390441 (lr=8.7686e-04) (hash(x)=158100672)
302
+ 2500 val loss 4.3973
303
+ 2500 val perplexity 81.2303
304
+ 2500 train 4.406614 (lr=8.7568e-04) (hash(x)=143406752)
305
+ 2510 train 4.328140 (lr=8.7450e-04) (hash(x)=142664084)
306
+ 2520 train 4.383507 (lr=8.7331e-04) (hash(x)=132174378)
307
+ 2530 train 4.387276 (lr=8.7211e-04) (hash(x)=140760293)
308
+ 2540 train 4.333264 (lr=8.7091e-04) (hash(x)=154863398)
309
+ 2550 train 4.278936 (lr=8.6971e-04) (hash(x)=153042049)
310
+ 2560 train 4.209887 (lr=8.6850e-04) (hash(x)=145339170)
311
+ 2570 train 4.365739 (lr=8.6729e-04) (hash(x)=157656357)
312
+ 2580 train 4.354192 (lr=8.6607e-04) (hash(x)=147899462)
313
+ 2590 train 4.328354 (lr=8.6485e-04) (hash(x)=142812892)
314
+ 2600 val loss 4.4119
315
+ 2600 val perplexity 82.4277
316
+ 2600 train 4.283755 (lr=8.6362e-04) (hash(x)=157272496)
317
+ 2610 train 4.072284 (lr=8.6239e-04) (hash(x)=148923594)
318
+ 2620 train 4.009719 (lr=8.6116e-04) (hash(x)=164654807)
319
+ 2630 train 4.192162 (lr=8.5991e-04) (hash(x)=136419327)
320
+ 2640 train 4.160370 (lr=8.5867e-04) (hash(x)=153420840)
321
+ 2650 train 3.994092 (lr=8.5742e-04) (hash(x)=153693827)
322
+ 2660 train 4.158444 (lr=8.5617e-04) (hash(x)=145574165)
323
+ 2670 train 4.193728 (lr=8.5491e-04) (hash(x)=153894843)
324
+ 2680 train 4.051310 (lr=8.5365e-04) (hash(x)=132234446)
325
+ 2690 train 4.120742 (lr=8.5238e-04) (hash(x)=153430212)
326
+ 2700 val loss 4.3978
327
+ 2700 val perplexity 81.2722
328
+ 2700 train 4.301477 (lr=8.5111e-04) (hash(x)=155342327)
329
+ 2710 train 4.311716 (lr=8.4983e-04) (hash(x)=145797023)
330
+ 2720 train 4.461933 (lr=8.4855e-04) (hash(x)=149220870)
331
+ 2730 train 4.371161 (lr=8.4727e-04) (hash(x)=134069562)
332
+ 2740 train 4.372974 (lr=8.4598e-04) (hash(x)=141560499)
333
+ 2750 train 4.386624 (lr=8.4469e-04) (hash(x)=146000524)
334
+ 2760 train 4.273922 (lr=8.4339e-04) (hash(x)=164710665)
335
+ 2770 train 4.175827 (lr=8.4209e-04) (hash(x)=151199094)
336
+ 2780 train 4.365935 (lr=8.4078e-04) (hash(x)=138625404)
337
+ 2790 train 4.626088 (lr=8.3947e-04) (hash(x)=162766112)
338
+ 2800 val loss 4.3658
339
+ 2800 val perplexity 78.7141
340
+ 2800 train 4.325482 (lr=8.3816e-04) (hash(x)=140626679)
341
+ 2810 train 4.403170 (lr=8.3684e-04) (hash(x)=144248544)
342
+ 2820 train 4.332086 (lr=8.3552e-04) (hash(x)=139378481)
343
+ 2830 train 4.263780 (lr=8.3419e-04) (hash(x)=150173403)
344
+ 2840 train 4.560523 (lr=8.3286e-04) (hash(x)=160168705)
345
+ 2850 train 4.346167 (lr=8.3153e-04) (hash(x)=149480059)
346
+ 2860 train 4.396791 (lr=8.3019e-04) (hash(x)=155559096)
347
+ 2870 train 4.325521 (lr=8.2884e-04) (hash(x)=158825683)
348
+ 2880 train 4.493082 (lr=8.2750e-04) (hash(x)=152129048)
349
+ 2890 train 4.167849 (lr=8.2615e-04) (hash(x)=157866544)
350
+ 2900 val loss 4.3451
351
+ 2900 val perplexity 77.0970
352
+ 2900 train 4.301449 (lr=8.2479e-04) (hash(x)=144953350)
353
+ 2910 train 4.506434 (lr=8.2343e-04) (hash(x)=154103497)
354
+ 2920 train 4.216445 (lr=8.2207e-04) (hash(x)=144467854)
355
+ 2930 train 4.150402 (lr=8.2070e-04) (hash(x)=153784048)
356
+ 2940 train 4.453684 (lr=8.1933e-04) (hash(x)=154774534)
357
+ 2950 train 4.301002 (lr=8.1796e-04) (hash(x)=143988507)
358
+ 2960 train 4.177539 (lr=8.1658e-04) (hash(x)=159552857)
359
+ 2970 train 4.483184 (lr=8.1520e-04) (hash(x)=175539144)
360
+ 2980 train 4.357156 (lr=8.1381e-04) (hash(x)=158172686)
361
+ 2990 train 4.231555 (lr=8.1242e-04) (hash(x)=147553039)
362
+ 3000 val loss 4.3513
363
+ 3000 val perplexity 77.5766
364
+ 3000 train 4.155370 (lr=8.1103e-04) (hash(x)=172449837)
365
+ 3010 train 4.048590 (lr=8.0963e-04) (hash(x)=153285123)
366
+ 3020 train 4.230925 (lr=8.0823e-04) (hash(x)=145307252)
367
+ 3030 train 4.282101 (lr=8.0682e-04) (hash(x)=147739766)
368
+ 3040 train 3.994451 (lr=8.0541e-04) (hash(x)=154414400)
369
+ 3050 train 4.081535 (lr=8.0400e-04) (hash(x)=150696487)
370
+ 3060 train 4.086955 (lr=8.0258e-04) (hash(x)=154543441)
371
+ 3070 train 3.862572 (lr=8.0116e-04) (hash(x)=170733242)
372
+ 3080 train 4.220778 (lr=7.9974e-04) (hash(x)=161914669)
373
+ 3090 train 4.105796 (lr=7.9831e-04) (hash(x)=156125491)
374
+ 3100 val loss 4.3696
375
+ 3100 val perplexity 79.0157
376
+ 3100 train 3.998830 (lr=7.9688e-04) (hash(x)=141710086)
377
+ 3110 train 4.307368 (lr=7.9545e-04) (hash(x)=154547635)
378
+ 3120 train 4.148840 (lr=7.9401e-04) (hash(x)=151747171)
379
+ 3130 train 4.441361 (lr=7.9257e-04) (hash(x)=160311353)
380
+ 3140 train 4.550424 (lr=7.9112e-04) (hash(x)=157233821)
381
+ 3150 train 4.362074 (lr=7.8967e-04) (hash(x)=153016810)
382
+ 3160 train 4.345395 (lr=7.8822e-04) (hash(x)=157289875)
383
+ 3170 train 4.338235 (lr=7.8677e-04) (hash(x)=157459484)
384
+ 3180 train 4.260104 (lr=7.8531e-04) (hash(x)=162037435)
385
+ 3190 train 4.351839 (lr=7.8384e-04) (hash(x)=159360169)
386
+ 3200 val loss 4.3152
387
+ 3200 val perplexity 74.8300
388
+ 3200 train 4.228230 (lr=7.8238e-04) (hash(x)=151299772)
389
+ 3210 train 4.473785 (lr=7.8091e-04) (hash(x)=183865364)
390
+ 3220 train 4.210444 (lr=7.7944e-04) (hash(x)=139622592)
391
+ 3230 train 4.164743 (lr=7.7796e-04) (hash(x)=147102655)
392
+ 3240 train 4.212816 (lr=7.7648e-04) (hash(x)=161870696)
393
+ 3250 train 4.373344 (lr=7.7500e-04) (hash(x)=152692231)
394
+ 3260 train 4.118723 (lr=7.7351e-04) (hash(x)=149990134)
395
+ 3270 train 4.091029 (lr=7.7203e-04) (hash(x)=134848432)
396
+ 3280 train 4.147256 (lr=7.7053e-04) (hash(x)=138519418)
397
+ 3290 train 4.284679 (lr=7.6904e-04) (hash(x)=139301249)
398
+ 3300 val loss 4.3191
399
+ 3300 val perplexity 75.1182
400
+ 3300 train 4.127578 (lr=7.6754e-04) (hash(x)=146473110)
401
+ 3310 train 4.197045 (lr=7.6604e-04) (hash(x)=144833685)
402
+ 3320 train 4.091340 (lr=7.6453e-04) (hash(x)=148953545)
403
+ 3330 train 4.323723 (lr=7.6303e-04) (hash(x)=161302371)
404
+ 3340 train 4.260886 (lr=7.6151e-04) (hash(x)=144215411)
405
+ 3350 train 4.429548 (lr=7.6000e-04) (hash(x)=157713664)
406
+ 3360 train 4.296024 (lr=7.5848e-04) (hash(x)=150813928)
407
+ 3370 train 4.283208 (lr=7.5696e-04) (hash(x)=150889463)
408
+ 3380 train 4.328917 (lr=7.5544e-04) (hash(x)=156631928)
409
+ 3390 train 4.362484 (lr=7.5391e-04) (hash(x)=136001579)
410
+ 3400 val loss 4.2956
411
+ 3400 val perplexity 73.3754
412
+ 3400 train 4.440581 (lr=7.5238e-04) (hash(x)=153954157)
413
+ 3410 train 4.216467 (lr=7.5085e-04) (hash(x)=150012477)
414
+ 3420 train 4.579030 (lr=7.4932e-04) (hash(x)=160878899)
415
+ 3430 train 4.616146 (lr=7.4778e-04) (hash(x)=128131106)
416
+ 3440 train 4.540115 (lr=7.4624e-04) (hash(x)=149611866)
417
+ 3450 train 4.170357 (lr=7.4470e-04) (hash(x)=166942460)
418
+ 3460 train 4.322520 (lr=7.4315e-04) (hash(x)=150498414)
419
+ 3470 train 4.473644 (lr=7.4160e-04) (hash(x)=144342279)
420
+ 3480 train 4.303067 (lr=7.4005e-04) (hash(x)=148891764)
421
+ 3490 train 4.275787 (lr=7.3849e-04) (hash(x)=157308188)
422
+ 3500 val loss 4.2811
423
+ 3500 val perplexity 72.3233
424
+ 3500 train 4.173330 (lr=7.3694e-04) (hash(x)=153717336)
425
+ 3510 train 4.203944 (lr=7.3538e-04) (hash(x)=156083924)
426
+ 3520 train 4.187388 (lr=7.3381e-04) (hash(x)=144234966)
427
+ 3530 train 4.278142 (lr=7.3225e-04) (hash(x)=150184381)
428
+ 3540 train 4.162787 (lr=7.3068e-04) (hash(x)=143011198)
429
+ 3550 train 4.306414 (lr=7.2911e-04) (hash(x)=148764613)
430
+ 3560 train 4.069956 (lr=7.2754e-04) (hash(x)=150625329)
431
+ 3570 train 4.178258 (lr=7.2596e-04) (hash(x)=143981302)
432
+ 3580 train 4.238663 (lr=7.2438e-04) (hash(x)=154989703)
433
+ 3590 train 4.361791 (lr=7.2280e-04) (hash(x)=146602526)
434
+ 3600 val loss 4.2801
435
+ 3600 val perplexity 72.2447
436
+ 3600 train 4.064011 (lr=7.2122e-04) (hash(x)=144965161)
437
+ 3610 train 4.056331 (lr=7.1963e-04) (hash(x)=159452243)
438
+ 3620 train 4.047841 (lr=7.1804e-04) (hash(x)=144904300)
439
+ 3630 train 3.959478 (lr=7.1645e-04) (hash(x)=127790212)
440
+ 3640 train 4.197893 (lr=7.1486e-04) (hash(x)=150863158)
441
+ 3650 train 4.160391 (lr=7.1326e-04) (hash(x)=145691635)
442
+ 3660 train 4.133244 (lr=7.1167e-04) (hash(x)=150631264)
443
+ 3670 train 4.110845 (lr=7.1007e-04) (hash(x)=148646259)
444
+ 3680 train 4.071361 (lr=7.0846e-04) (hash(x)=139317386)
445
+ 3690 train 4.181606 (lr=7.0686e-04) (hash(x)=142206916)
446
+ 3700 val loss 4.2912
447
+ 3700 val perplexity 73.0548
448
+ 3700 train 4.190293 (lr=7.0525e-04) (hash(x)=125969741)
449
+ 3710 train 4.039621 (lr=7.0364e-04) (hash(x)=157183915)
450
+ 3720 train 4.037220 (lr=7.0203e-04) (hash(x)=150434369)
451
+ 3730 train 4.225224 (lr=7.0041e-04) (hash(x)=160911722)
452
+ 3740 train 4.121335 (lr=6.9880e-04) (hash(x)=163306921)
453
+ 3750 train 4.232787 (lr=6.9718e-04) (hash(x)=149156876)
454
+ 3760 train 4.372747 (lr=6.9556e-04) (hash(x)=156015970)
455
+ 3770 train 4.320635 (lr=6.9394e-04) (hash(x)=148222322)
456
+ 3780 train 4.423875 (lr=6.9231e-04) (hash(x)=172040174)
457
+ 3790 train 4.330666 (lr=6.9069e-04) (hash(x)=151593070)
458
+ 3800 val loss 4.2653
459
+ 3800 val perplexity 71.1885
460
+ 3800 train 4.083667 (lr=6.8906e-04) (hash(x)=155070487)
461
+ 3810 train 4.266338 (lr=6.8743e-04) (hash(x)=137073164)
462
+ 3820 train 4.173840 (lr=6.8579e-04) (hash(x)=147761510)
463
+ 3830 train 4.101120 (lr=6.8416e-04) (hash(x)=154763021)
464
+ 3840 train 4.136382 (lr=6.8252e-04) (hash(x)=149622435)
465
+ 3850 train 4.250448 (lr=6.8088e-04) (hash(x)=150779423)
466
+ 3860 train 4.222646 (lr=6.7924e-04) (hash(x)=139445120)
467
+ 3870 train 4.209052 (lr=6.7760e-04) (hash(x)=151884947)
468
+ 3880 train 4.210846 (lr=6.7596e-04) (hash(x)=139795201)
469
+ 3890 train 4.251503 (lr=6.7431e-04) (hash(x)=155589956)
470
+ 3900 val loss 4.2509
471
+ 3900 val perplexity 70.1705
472
+ 3900 train 4.169502 (lr=6.7266e-04) (hash(x)=149444644)
473
+ 3910 train 4.162097 (lr=6.7101e-04) (hash(x)=148538323)
474
+ 3920 train 4.290719 (lr=6.6936e-04) (hash(x)=144026925)
475
+ 3930 train 4.177498 (lr=6.6771e-04) (hash(x)=155311200)
476
+ 3940 train 4.154435 (lr=6.6605e-04) (hash(x)=142410034)
477
+ 3950 train 4.131623 (lr=6.6440e-04) (hash(x)=145986075)
478
+ 3960 train 4.034272 (lr=6.6274e-04) (hash(x)=156197753)
479
+ 3970 train 4.187742 (lr=6.6108e-04) (hash(x)=146981564)
480
+ 3980 train 4.497239 (lr=6.5942e-04) (hash(x)=145361976)
481
+ 3990 train 4.229481 (lr=6.5776e-04) (hash(x)=149344475)
482
+ 4000 val loss 4.2490
483
+ 4000 val perplexity 70.0364
484
+ 4000 train 4.106431 (lr=6.5609e-04) (hash(x)=151663033)
485
+ 4010 train 3.992515 (lr=6.5443e-04) (hash(x)=140915499)
486
+ 4020 train 4.210707 (lr=6.5276e-04) (hash(x)=159979134)
487
+ 4030 train 4.132223 (lr=6.5109e-04) (hash(x)=163081433)
488
+ 4040 train 4.016047 (lr=6.4942e-04) (hash(x)=144890231)
489
+ 4050 train 4.176378 (lr=6.4775e-04) (hash(x)=138242368)
490
+ 4060 train 4.070343 (lr=6.4607e-04) (hash(x)=144438406)
491
+ 4070 train 3.958871 (lr=6.4440e-04) (hash(x)=159294906)
492
+ 4080 train 4.019866 (lr=6.4272e-04) (hash(x)=143676301)
493
+ 4090 train 4.251940 (lr=6.4104e-04) (hash(x)=147875296)
494
+ 4100 val loss 4.2648
495
+ 4100 val perplexity 71.1541
496
+ 4100 train 4.315084 (lr=6.3937e-04) (hash(x)=143688282)
497
+ 4110 train 4.082783 (lr=6.3769e-04) (hash(x)=150086777)
498
+ 4120 train 4.030358 (lr=6.3600e-04) (hash(x)=154624391)
499
+ 4130 train 4.219367 (lr=6.3432e-04) (hash(x)=141163032)
500
+ 4140 train 4.238806 (lr=6.3264e-04) (hash(x)=151038180)
501
+ 4150 train 4.321716 (lr=6.3095e-04) (hash(x)=144718421)
502
+ 4160 train 4.239073 (lr=6.2927e-04) (hash(x)=154277459)
503
+ 4170 train 4.394313 (lr=6.2758e-04) (hash(x)=143765173)
504
+ 4180 train 4.261236 (lr=6.2589e-04) (hash(x)=161013425)
505
+ 4190 train 4.217021 (lr=6.2420e-04) (hash(x)=161666854)
506
+ 4200 val loss 4.2258
507
+ 4200 val perplexity 68.4265
508
+ 4200 train 4.134756 (lr=6.2251e-04) (hash(x)=163361651)
509
+ 4210 train 4.469084 (lr=6.2082e-04) (hash(x)=153491528)
510
+ 4220 train 4.267834 (lr=6.1913e-04) (hash(x)=164410385)
511
+ 4230 train 4.208251 (lr=6.1743e-04) (hash(x)=175191816)
512
+ 4240 train 4.081667 (lr=6.1574e-04) (hash(x)=133924818)
513
+ 4250 train 4.308551 (lr=6.1404e-04) (hash(x)=152932845)
514
+ 4260 train 4.180036 (lr=6.1235e-04) (hash(x)=141987318)
515
+ 4270 train 4.279481 (lr=6.1065e-04) (hash(x)=142521279)
516
+ 4280 train 4.134878 (lr=6.0895e-04) (hash(x)=147707669)
517
+ 4290 train 4.202910 (lr=6.0725e-04) (hash(x)=152245539)
518
+ 4300 val loss 4.2119
519
+ 4300 val perplexity 67.4857
520
+ 4300 train 4.089963 (lr=6.0555e-04) (hash(x)=153619361)
521
+ 4310 train 4.301236 (lr=6.0385e-04) (hash(x)=142614473)
522
+ 4320 train 4.119951 (lr=6.0215e-04) (hash(x)=142689858)
523
+ 4330 train 4.149946 (lr=6.0044e-04) (hash(x)=140837891)
524
+ 4340 train 4.130474 (lr=5.9874e-04) (hash(x)=131790760)
525
+ 4350 train 4.292194 (lr=5.9704e-04) (hash(x)=147186292)
526
+ 4360 train 4.103318 (lr=5.9533e-04) (hash(x)=148140562)
527
+ 4370 train 4.217632 (lr=5.9363e-04) (hash(x)=156409913)
528
+ 4380 train 4.126440 (lr=5.9192e-04) (hash(x)=145126034)
529
+ 4390 train 4.076787 (lr=5.9022e-04) (hash(x)=147368423)
530
+ 4400 val loss 4.2161
531
+ 4400 val perplexity 67.7689
532
+ 4400 train 4.560677 (lr=5.8851e-04) (hash(x)=168527064)
533
+ 4410 train 4.072362 (lr=5.8680e-04) (hash(x)=150152773)
534
+ 4420 train 4.158353 (lr=5.8509e-04) (hash(x)=155472304)
535
+ 4430 train 4.079534 (lr=5.8338e-04) (hash(x)=147299265)
536
+ 4440 train 4.042902 (lr=5.8168e-04) (hash(x)=140659522)
537
+ 4450 train 4.109509 (lr=5.7997e-04) (hash(x)=147169111)
538
+ 4460 train 4.178479 (lr=5.7826e-04) (hash(x)=144098557)
539
+ 4470 train 3.938241 (lr=5.7655e-04) (hash(x)=161957507)
540
+ 4480 train 3.964268 (lr=5.7483e-04) (hash(x)=154064204)
541
+ 4490 train 4.101294 (lr=5.7312e-04) (hash(x)=153999958)
542
+ 4500 val loss 4.2295
543
+ 4500 val perplexity 68.6821
544
+ 4500 train 4.255933 (lr=5.7141e-04) (hash(x)=125588037)
545
+ 4510 train 4.057528 (lr=5.6970e-04) (hash(x)=142522186)
546
+ 4520 train 3.898660 (lr=5.6799e-04) (hash(x)=152736893)
547
+ 4530 train 4.125094 (lr=5.6628e-04) (hash(x)=148091275)
548
+ 4540 train 4.240044 (lr=5.6456e-04) (hash(x)=147663019)
549
+ 4550 train 4.324493 (lr=5.6285e-04) (hash(x)=151491857)
550
+ 4560 train 4.211528 (lr=5.6114e-04) (hash(x)=161757513)
551
+ 4570 train 4.200512 (lr=5.5942e-04) (hash(x)=165448260)
552
+ 4580 train 4.343745 (lr=5.5771e-04) (hash(x)=151620292)
553
+ 4590 train 4.426542 (lr=5.5600e-04) (hash(x)=161877852)
554
+ 4600 val loss 4.1930
555
+ 4600 val perplexity 66.2239
556
+ 4600 train 4.177270 (lr=5.5428e-04) (hash(x)=143710941)
557
+ 4610 train 4.267295 (lr=5.5257e-04) (hash(x)=152244871)
558
+ 4620 train 4.217650 (lr=5.5086e-04) (hash(x)=161268167)
559
+ 4630 train 4.204400 (lr=5.4914e-04) (hash(x)=153904677)
560
+ 4640 train 4.161971 (lr=5.4743e-04) (hash(x)=152593758)
561
+ 4650 train 4.314531 (lr=5.4572e-04) (hash(x)=159047122)
562
+ 4660 train 4.377215 (lr=5.4400e-04) (hash(x)=167370244)
563
+ 4670 train 4.158813 (lr=5.4229e-04) (hash(x)=140357684)
564
+ 4680 train 4.312389 (lr=5.4058e-04) (hash(x)=140547853)
565
+ 4690 train 4.485363 (lr=5.3886e-04) (hash(x)=143000949)
566
+ 4700 val loss 4.1859
567
+ 4700 val perplexity 65.7497
568
+ 4700 train 4.119508 (lr=5.3715e-04) (hash(x)=150952742)
569
+ 4710 train 4.166143 (lr=5.3544e-04) (hash(x)=157964406)
570
+ 4720 train 4.056052 (lr=5.3372e-04) (hash(x)=147653608)
571
+ 4730 train 4.140808 (lr=5.3201e-04) (hash(x)=165545764)
572
+ 4740 train 4.120740 (lr=5.3030e-04) (hash(x)=146744631)
573
+ 4750 train 4.206942 (lr=5.2859e-04) (hash(x)=153635164)
574
+ 4760 train 4.250543 (lr=5.2688e-04) (hash(x)=153217508)
575
+ 4770 train 4.199159 (lr=5.2517e-04) (hash(x)=169622743)
576
+ 4780 train 4.120512 (lr=5.2345e-04) (hash(x)=138018297)
577
+ 4790 train 3.996400 (lr=5.2174e-04) (hash(x)=156067303)
578
+ 4800 val loss 4.1856
579
+ 4800 val perplexity 65.7335
580
+ 4800 train 4.169362 (lr=5.2003e-04) (hash(x)=145323659)
581
+ 4810 train 4.055585 (lr=5.1832e-04) (hash(x)=152587772)
582
+ 4820 train 4.275341 (lr=5.1662e-04) (hash(x)=148481485)
583
+ 4830 train 4.038648 (lr=5.1491e-04) (hash(x)=154219664)
584
+ 4840 train 3.925911 (lr=5.1320e-04) (hash(x)=156853645)
585
+ 4850 train 4.018215 (lr=5.1149e-04) (hash(x)=152329702)
586
+ 4860 train 4.009691 (lr=5.0978e-04) (hash(x)=136684779)
587
+ 4870 train 4.077189 (lr=5.0808e-04) (hash(x)=143260367)
588
+ 4880 train 4.039571 (lr=5.0637e-04) (hash(x)=150910947)
589
+ 4890 train 4.054239 (lr=5.0467e-04) (hash(x)=179249925)
590
+ 4900 val loss 4.1913
591
+ 4900 val perplexity 66.1096
592
+ 4900 train 4.215473 (lr=5.0296e-04) (hash(x)=153151397)
593
+ 4910 train 4.167186 (lr=5.0126e-04) (hash(x)=144101891)
594
+ 4920 train 4.060726 (lr=4.9956e-04) (hash(x)=144994952)
595
+ 4930 train 4.341872 (lr=4.9785e-04) (hash(x)=155390878)
596
+ 4940 train 4.126190 (lr=4.9615e-04) (hash(x)=141359221)
597
+ 4950 train 4.028131 (lr=4.9445e-04) (hash(x)=151570011)
598
+ 4960 train 4.246081 (lr=4.9275e-04) (hash(x)=146918855)
599
+ 4970 train 4.181008 (lr=4.9105e-04) (hash(x)=148647362)
600
+ 4980 train 4.136794 (lr=4.8935e-04) (hash(x)=159753562)
601
+ 4990 train 4.085922 (lr=4.8765e-04) (hash(x)=147683447)
602
+ 5000 val loss 4.1808
603
+ 5000 val perplexity 65.4179
604
+ 5000 train 4.224794 (lr=4.8596e-04) (hash(x)=143182059)
605
+ 5010 train 4.033636 (lr=4.8426e-04) (hash(x)=138657407)
606
+ 5020 train 4.144500 (lr=4.8257e-04) (hash(x)=151830497)
607
+ 5030 train 4.141582 (lr=4.8087e-04) (hash(x)=147727694)
608
+ 5040 train 3.986638 (lr=4.7918e-04) (hash(x)=151957014)
609
+ 5050 train 4.253509 (lr=4.7749e-04) (hash(x)=147828693)
610
+ 5060 train 4.301372 (lr=4.7580e-04) (hash(x)=158589700)
611
+ 5070 train 4.184211 (lr=4.7411e-04) (hash(x)=155662163)
612
+ 5080 train 4.287389 (lr=4.7242e-04) (hash(x)=157797383)
613
+ 5090 train 4.163893 (lr=4.7073e-04) (hash(x)=153848957)
614
+ 5100 val loss 4.1653
615
+ 5100 val perplexity 64.4096
616
+ 5100 train 4.116641 (lr=4.6905e-04) (hash(x)=170083586)
617
+ 5110 train 4.038221 (lr=4.6736e-04) (hash(x)=153800138)
618
+ 5120 train 4.142804 (lr=4.6568e-04) (hash(x)=153526228)
619
+ 5130 train 4.152077 (lr=4.6400e-04) (hash(x)=158650342)
620
+ 5140 train 4.140490 (lr=4.6231e-04) (hash(x)=153163034)
621
+ 5150 train 4.142080 (lr=4.6063e-04) (hash(x)=155281294)
622
+ 5160 train 4.120215 (lr=4.5896e-04) (hash(x)=147731627)
623
+ 5170 train 4.091925 (lr=4.5728e-04) (hash(x)=146388976)
624
+ 5180 train 4.311483 (lr=4.5560e-04) (hash(x)=140323065)
625
+ 5190 train 4.215456 (lr=4.5393e-04) (hash(x)=150406523)
626
+ 5200 val loss 4.1621
627
+ 5200 val perplexity 64.2057
628
+ 5200 train 4.021189 (lr=4.5225e-04) (hash(x)=149363919)
629
+ 5210 train 4.160786 (lr=4.5058e-04) (hash(x)=164390214)
630
+ 5220 train 4.121970 (lr=4.4891e-04) (hash(x)=153652984)
631
+ 5230 train 4.087306 (lr=4.4724e-04) (hash(x)=139542770)
632
+ 5240 train 3.969379 (lr=4.4557e-04) (hash(x)=140115288)
633
+ 5250 train 4.109451 (lr=4.4391e-04) (hash(x)=142158900)
634
+ 5260 train 4.023765 (lr=4.4224e-04) (hash(x)=156019449)
635
+ 5270 train 4.134142 (lr=4.4058e-04) (hash(x)=150776850)
636
+ 5280 train 4.094083 (lr=4.3892e-04) (hash(x)=151358692)
637
+ 5290 train 4.060670 (lr=4.3726e-04) (hash(x)=137112863)
638
+ 5300 val loss 4.1580
639
+ 5300 val perplexity 63.9466
640
+ 5300 train 3.987714 (lr=4.3560e-04) (hash(x)=152033784)
641
+ 5310 train 4.026588 (lr=4.3395e-04) (hash(x)=147785712)
642
+ 5320 train 4.166820 (lr=4.3229e-04) (hash(x)=156426339)
643
+ 5330 train 4.120505 (lr=4.3064e-04) (hash(x)=161907693)
644
+ 5340 train 4.188681 (lr=4.2899e-04) (hash(x)=156414094)
645
+ 5350 train 4.208929 (lr=4.2734e-04) (hash(x)=141712428)
646
+ 5360 train 4.161851 (lr=4.2569e-04) (hash(x)=156873601)
647
+ 5370 train 4.144500 (lr=4.2404e-04) (hash(x)=149863781)
648
+ 5380 train 4.140306 (lr=4.2240e-04) (hash(x)=148183971)
649
+ 5390 train 4.103770 (lr=4.2076e-04) (hash(x)=151542334)
650
+ 5400 val loss 4.1356
651
+ 5400 val perplexity 62.5278
652
+ 5400 train 4.382998 (lr=4.1912e-04) (hash(x)=154614289)
653
+ 5410 train 4.122485 (lr=4.1748e-04) (hash(x)=169124671)
654
+ 5420 train 4.145134 (lr=4.1584e-04) (hash(x)=159963738)
655
+ 5430 train 4.106667 (lr=4.1421e-04) (hash(x)=155162691)
656
+ 5440 train 4.138029 (lr=4.1257e-04) (hash(x)=135981908)
657
+ 5450 train 4.229747 (lr=4.1094e-04) (hash(x)=145055580)
658
+ 5460 train 4.082614 (lr=4.0931e-04) (hash(x)=150367825)
659
+ 5470 train 4.171754 (lr=4.0769e-04) (hash(x)=151034173)
660
+ 5480 train 4.091937 (lr=4.0606e-04) (hash(x)=155581766)
661
+ 5490 train 4.119243 (lr=4.0444e-04) (hash(x)=136973993)
662
+ 5500 val loss 4.1301
663
+ 5500 val perplexity 62.1853
664
+ 5500 train 4.285725 (lr=4.0282e-04) (hash(x)=157745174)
665
+ 5510 train 4.237983 (lr=4.0120e-04) (hash(x)=144628894)
666
+ 5520 train 4.121191 (lr=3.9959e-04) (hash(x)=135056684)
667
+ 5530 train 4.108870 (lr=3.9797e-04) (hash(x)=143418640)
668
+ 5540 train 4.089125 (lr=3.9636e-04) (hash(x)=144464984)
669
+ 5550 train 4.297401 (lr=3.9475e-04) (hash(x)=155888745)
670
+ 5560 train 3.954111 (lr=3.9314e-04) (hash(x)=149122683)
671
+ 5570 train 4.001010 (lr=3.9154e-04) (hash(x)=154573954)
672
+ 5580 train 4.105954 (lr=3.8993e-04) (hash(x)=149339361)
673
+ 5590 train 3.992002 (lr=3.8833e-04) (hash(x)=154738453)
674
+ 5600 val loss 4.1314
675
+ 5600 val perplexity 62.2631
676
+ 5600 train 4.109376 (lr=3.8674e-04) (hash(x)=147693222)
677
+ 5610 train 3.887067 (lr=3.8514e-04) (hash(x)=142140192)
678
+ 5620 train 4.017938 (lr=3.8355e-04) (hash(x)=148677209)
679
+ 5630 train 4.084262 (lr=3.8196e-04) (hash(x)=143495554)
680
+ 5640 train 4.174602 (lr=3.8037e-04) (hash(x)=153202964)
681
+ 5650 train 4.099245 (lr=3.7878e-04) (hash(x)=168724867)
682
+ 5660 train 4.068202 (lr=3.7720e-04) (hash(x)=142816746)
683
+ 5670 train 4.190711 (lr=3.7562e-04) (hash(x)=164119551)
684
+ 5680 train 4.052152 (lr=3.7404e-04) (hash(x)=157239121)
685
+ 5690 train 4.336944 (lr=3.7246e-04) (hash(x)=144662329)
686
+ 5700 val loss 4.1187
687
+ 5700 val perplexity 61.4796
688
+ 5700 train 4.053085 (lr=3.7089e-04) (hash(x)=149784627)
689
+ 5710 train 4.021266 (lr=3.6932e-04) (hash(x)=152821162)
690
+ 5720 train 4.236451 (lr=3.6775e-04) (hash(x)=158497530)
691
+ 5730 train 4.227133 (lr=3.6619e-04) (hash(x)=154583914)
692
+ 5740 train 4.045676 (lr=3.6462e-04) (hash(x)=142027172)
693
+ 5750 train 4.287654 (lr=3.6306e-04) (hash(x)=119227402)
694
+ 5760 train 4.133159 (lr=3.6151e-04) (hash(x)=153248187)
695
+ 5770 train 4.010704 (lr=3.5995e-04) (hash(x)=147951994)
696
+ 5780 train 4.020247 (lr=3.5840e-04) (hash(x)=138935284)
697
+ 5790 train 4.103357 (lr=3.5685e-04) (hash(x)=151786681)
698
+ 5800 val loss 4.1273
699
+ 5800 val perplexity 62.0090
700
+ 5800 train 4.143414 (lr=3.5530e-04) (hash(x)=158620729)
701
+ 5810 train 4.090216 (lr=3.5376e-04) (hash(x)=178115789)
702
+ 5820 train 4.011483 (lr=3.5222e-04) (hash(x)=149092790)
703
+ 5830 train 4.190702 (lr=3.5068e-04) (hash(x)=148269833)
704
+ 5840 train 4.124129 (lr=3.4915e-04) (hash(x)=163714738)
705
+ 5850 train 4.103542 (lr=3.4762e-04) (hash(x)=144111890)
706
+ 5860 train 4.069653 (lr=3.4609e-04) (hash(x)=170702475)
707
+ 5870 train 4.011530 (lr=3.4456e-04) (hash(x)=155970835)
708
+ 5880 train 4.016212 (lr=3.4304e-04) (hash(x)=146695463)
709
+ 5890 train 4.073385 (lr=3.4152e-04) (hash(x)=151603187)
710
+ 5900 val loss 4.1133
711
+ 5900 val perplexity 61.1492
712
+ 5900 train 4.089013 (lr=3.4000e-04) (hash(x)=159763910)
713
+ 5910 train 4.133032 (lr=3.3849e-04) (hash(x)=149555230)
714
+ 5920 train 4.140339 (lr=3.3697e-04) (hash(x)=138048333)
715
+ 5930 train 4.037030 (lr=3.3547e-04) (hash(x)=148668256)
716
+ 5940 train 4.176332 (lr=3.3396e-04) (hash(x)=148352543)
717
+ 5950 train 4.187139 (lr=3.3246e-04) (hash(x)=163457488)
718
+ 5960 train 4.268230 (lr=3.3096e-04) (hash(x)=155306804)
719
+ 5970 train 4.115589 (lr=3.2947e-04) (hash(x)=146576143)
720
+ 5980 train 4.121903 (lr=3.2797e-04) (hash(x)=144668956)
721
+ 5990 train 4.089404 (lr=3.2649e-04) (hash(x)=156098617)
722
+ 6000 val loss 4.1007
723
+ 6000 val perplexity 60.3824
724
+ 6000 train 4.069320 (lr=3.2500e-04) (hash(x)=147640561)
725
+ 6010 train 4.134161 (lr=3.2352e-04) (hash(x)=161223074)
726
+ 6020 train 4.116009 (lr=3.2204e-04) (hash(x)=156308454)
727
+ 6030 train 4.098608 (lr=3.2056e-04) (hash(x)=142553975)
728
+ 6040 train 4.062779 (lr=3.1909e-04) (hash(x)=142039829)
729
+ 6050 train 4.132275 (lr=3.1762e-04) (hash(x)=156329296)
730
+ 6060 train 4.073678 (lr=3.1616e-04) (hash(x)=148505537)
731
+ 6070 train 3.962841 (lr=3.1469e-04) (hash(x)=151801272)
732
+ 6080 train 4.050081 (lr=3.1323e-04) (hash(x)=159908287)
733
+ 6090 train 4.031686 (lr=3.1178e-04) (hash(x)=148714721)
734
+ 6100 val loss 4.0938
735
+ 6100 val perplexity 59.9689
736
+ 6100 train 4.104977 (lr=3.1033e-04) (hash(x)=156613394)
737
+ 6110 train 3.957910 (lr=3.0888e-04) (hash(x)=143286705)
738
+ 6120 train 4.171110 (lr=3.0743e-04) (hash(x)=150191141)
739
+ 6130 train 4.042486 (lr=3.0599e-04) (hash(x)=149855624)
740
+ 6140 train 4.032149 (lr=3.0455e-04) (hash(x)=157312372)
741
+ 6150 train 4.158311 (lr=3.0312e-04) (hash(x)=148262730)
742
+ 6160 train 4.071771 (lr=3.0169e-04) (hash(x)=147032785)
743
+ 6170 train 3.994284 (lr=3.0026e-04) (hash(x)=161213064)
744
+ 6180 train 3.869380 (lr=2.9884e-04) (hash(x)=144538373)
745
+ 6190 train 3.922911 (lr=2.9742e-04) (hash(x)=144688493)
746
+ 6200 val loss 4.0933
747
+ 6200 val perplexity 59.9374
748
+ 6200 train 4.011249 (lr=2.9600e-04) (hash(x)=186221290)
749
+ 6210 train 4.004768 (lr=2.9459e-04) (hash(x)=139529352)
750
+ 6220 train 4.161993 (lr=2.9318e-04) (hash(x)=138081469)
751
+ 6230 train 4.040517 (lr=2.9177e-04) (hash(x)=152979409)
752
+ 6240 train 4.027634 (lr=2.9037e-04) (hash(x)=157848424)
753
+ 6250 train 3.912084 (lr=2.8897e-04) (hash(x)=150770915)
754
+ 6260 train 4.011302 (lr=2.8758e-04) (hash(x)=153769379)
755
+ 6270 train 3.945753 (lr=2.8619e-04) (hash(x)=153318361)
756
+ 6280 train 3.940867 (lr=2.8480e-04) (hash(x)=143366327)
757
+ 6290 train 4.106936 (lr=2.8342e-04) (hash(x)=160684845)
758
+ 6300 val loss 4.0772
759
+ 6300 val perplexity 58.9789
760
+ 6300 train 3.993979 (lr=2.8204e-04) (hash(x)=152081419)
761
+ 6310 train 3.946291 (lr=2.8067e-04) (hash(x)=146626797)
762
+ 6320 train 4.131120 (lr=2.7930e-04) (hash(x)=155291923)
763
+ 6330 train 4.010694 (lr=2.7793e-04) (hash(x)=140920951)
764
+ 6340 train 4.085267 (lr=2.7657e-04) (hash(x)=146642359)
765
+ 6350 train 4.058128 (lr=2.7521e-04) (hash(x)=160486550)
766
+ 6360 train 4.010235 (lr=2.7385e-04) (hash(x)=151833082)
767
+ 6370 train 4.002586 (lr=2.7250e-04) (hash(x)=124740362)
768
+ 6380 train 4.153225 (lr=2.7116e-04) (hash(x)=153952434)
769
+ 6390 train 4.192462 (lr=2.6981e-04) (hash(x)=145864314)
770
+ 6400 val loss 4.0805
771
+ 6400 val perplexity 59.1736
772
+ 6400 train 4.083189 (lr=2.6847e-04) (hash(x)=154808349)
773
+ 6410 train 4.056243 (lr=2.6714e-04) (hash(x)=155037739)
774
+ 6420 train 3.988791 (lr=2.6581e-04) (hash(x)=151155588)
775
+ 6430 train 3.882061 (lr=2.6448e-04) (hash(x)=146548556)
776
+ 6440 train 4.051258 (lr=2.6316e-04) (hash(x)=152706474)
777
+ 6450 train 4.043720 (lr=2.6184e-04) (hash(x)=154207794)
778
+ 6460 train 3.911148 (lr=2.6053e-04) (hash(x)=141590515)
779
+ 6470 train 3.953193 (lr=2.5922e-04) (hash(x)=157673109)
780
+ 6480 train 3.958526 (lr=2.5791e-04) (hash(x)=155842980)
781
+ 6490 train 4.045887 (lr=2.5661e-04) (hash(x)=143595736)
782
+ 6500 val loss 4.0784
783
+ 6500 val perplexity 59.0535
784
+ 6500 train 3.999872 (lr=2.5531e-04) (hash(x)=159437208)
785
+ 6510 train 4.011888 (lr=2.5402e-04) (hash(x)=142631317)
786
+ 6520 train 3.739606 (lr=2.5273e-04) (hash(x)=139009211)
787
+ 6530 train 4.083701 (lr=2.5145e-04) (hash(x)=145451137)
788
+ 6540 train 4.039228 (lr=2.5017e-04) (hash(x)=163532779)
789
+ 6550 train 3.948493 (lr=2.4889e-04) (hash(x)=158084674)
790
+ 6560 train 3.931911 (lr=2.4762e-04) (hash(x)=142422592)
791
+ 6570 train 3.846286 (lr=2.4635e-04) (hash(x)=141089289)
792
+ 6580 train 3.947164 (lr=2.4509e-04) (hash(x)=139712979)
793
+ 6590 train 3.851499 (lr=2.4383e-04) (hash(x)=145833805)
794
+ 6600 val loss 4.0779
795
+ 6600 val perplexity 59.0198
796
+ 6600 train 3.924870 (lr=2.4258e-04) (hash(x)=157933074)
797
+ 6610 train 3.850175 (lr=2.4133e-04) (hash(x)=142534769)
798
+ 6620 train 3.901227 (lr=2.4009e-04) (hash(x)=138456044)
799
+ 6630 train 4.027202 (lr=2.3884e-04) (hash(x)=145111052)
800
+ 6640 train 3.994487 (lr=2.3761e-04) (hash(x)=166954515)
801
+ 6650 train 3.954459 (lr=2.3638e-04) (hash(x)=146889543)
802
+ 6660 train 3.981487 (lr=2.3515e-04) (hash(x)=143253726)
803
+ 6670 train 3.902173 (lr=2.3393e-04) (hash(x)=144349844)
804
+ 6680 train 4.094649 (lr=2.3271e-04) (hash(x)=139911928)
805
+ 6690 train 4.280886 (lr=2.3150e-04) (hash(x)=143135862)
806
+ 6700 val loss 4.0665
807
+ 6700 val perplexity 58.3551
808
+ 6700 train 4.000284 (lr=2.3029e-04) (hash(x)=161560240)
809
+ 6710 train 4.086408 (lr=2.2909e-04) (hash(x)=150437730)
810
+ 6720 train 4.190146 (lr=2.2789e-04) (hash(x)=153035304)
811
+ 6730 train 3.980381 (lr=2.2669e-04) (hash(x)=139323796)
812
+ 6740 train 3.986463 (lr=2.2550e-04) (hash(x)=167437923)
813
+ 6750 train 3.950536 (lr=2.2432e-04) (hash(x)=143814596)
814
+ 6760 train 4.013819 (lr=2.2314e-04) (hash(x)=143755031)
815
+ 6770 train 3.975060 (lr=2.2196e-04) (hash(x)=142014038)
816
+ 6780 train 4.024720 (lr=2.2079e-04) (hash(x)=148465062)
817
+ 6790 train 4.059067 (lr=2.1963e-04) (hash(x)=154656877)
818
+ 6800 val loss 4.0642
819
+ 6800 val perplexity 58.2199
820
+ 6800 train 4.162954 (lr=2.1847e-04) (hash(x)=155424292)
821
+ 6810 train 3.805522 (lr=2.1731e-04) (hash(x)=147358081)
822
+ 6820 train 3.916002 (lr=2.1616e-04) (hash(x)=145387291)
823
+ 6830 train 3.872147 (lr=2.1501e-04) (hash(x)=146334068)
824
+ 6840 train 3.947189 (lr=2.1387e-04) (hash(x)=139563766)
825
+ 6850 train 3.972929 (lr=2.1273e-04) (hash(x)=139095691)
826
+ 6860 train 3.910185 (lr=2.1160e-04) (hash(x)=142093999)
827
+ 6870 train 4.032164 (lr=2.1047e-04) (hash(x)=148598823)
828
+ 6880 train 4.115886 (lr=2.0935e-04) (hash(x)=146486085)
829
+ 6890 train 3.894317 (lr=2.0823e-04) (hash(x)=165683212)
830
+ 6900 val loss 4.0669
831
+ 6900 val perplexity 58.3745
832
+ 6900 train 4.103228 (lr=2.0712e-04) (hash(x)=148561470)
833
+ 6910 train 3.790687 (lr=2.0602e-04) (hash(x)=136352432)
834
+ 6920 train 4.166777 (lr=2.0491e-04) (hash(x)=147640094)
835
+ 6930 train 4.045142 (lr=2.0382e-04) (hash(x)=152343827)
836
+ 6940 train 4.035655 (lr=2.0272e-04) (hash(x)=158787971)
837
+ 6950 train 4.313387 (lr=2.0164e-04) (hash(x)=178133125)
838
+ 6960 train 4.098657 (lr=2.0055e-04) (hash(x)=167302853)
839
+ 6970 train 4.035652 (lr=1.9948e-04) (hash(x)=143729435)
840
+ 6980 train 4.131123 (lr=1.9840e-04) (hash(x)=135857879)
841
+ 6990 train 4.075550 (lr=1.9734e-04) (hash(x)=155384510)
842
+ 7000 val loss 4.0542
843
+ 7000 val perplexity 57.6405
844
+ 7000 train 4.026678 (lr=1.9628e-04) (hash(x)=141527450)
845
+ 7010 train 4.066357 (lr=1.9522e-04) (hash(x)=148648498)
846
+ 7020 train 3.992153 (lr=1.9417e-04) (hash(x)=148073842)
847
+ 7030 train 4.125440 (lr=1.9312e-04) (hash(x)=160771614)
848
+ 7040 train 3.898851 (lr=1.9208e-04) (hash(x)=148361330)
849
+ 7050 train 3.881182 (lr=1.9104e-04) (hash(x)=151212224)
850
+ 7060 train 4.074076 (lr=1.9001e-04) (hash(x)=155925366)
851
+ 7070 train 3.924225 (lr=1.8899e-04) (hash(x)=149830576)
852
+ 7080 train 3.896394 (lr=1.8797e-04) (hash(x)=144849934)
853
+ 7090 train 3.986582 (lr=1.8695e-04) (hash(x)=150997807)
854
+ 7100 val loss 4.0497
855
+ 7100 val perplexity 57.3795
856
+ 7100 train 4.025570 (lr=1.8594e-04) (hash(x)=151066339)
857
+ 7110 train 3.931357 (lr=1.8494e-04) (hash(x)=147185781)
858
+ 7120 train 3.973876 (lr=1.8394e-04) (hash(x)=141765575)
859
+ 7130 train 3.986445 (lr=1.8294e-04) (hash(x)=144204513)
860
+ 7140 train 3.819539 (lr=1.8196e-04) (hash(x)=149326253)
861
+ 7150 train 3.976435 (lr=1.8097e-04) (hash(x)=150409811)
862
+ 7160 train 3.868832 (lr=1.7999e-04) (hash(x)=144790348)
863
+ 7170 train 3.985991 (lr=1.7902e-04) (hash(x)=150500939)
864
+ 7180 train 3.810583 (lr=1.7805e-04) (hash(x)=143552103)
865
+ 7190 train 3.857282 (lr=1.7709e-04) (hash(x)=157471075)
866
+ 7200 val loss 4.0533
867
+ 7200 val perplexity 57.5855
868
+ 7200 train 3.834300 (lr=1.7614e-04) (hash(x)=155231264)
869
+ 7210 train 3.930772 (lr=1.7519e-04) (hash(x)=150350401)
870
+ 7220 train 3.942129 (lr=1.7424e-04) (hash(x)=151952594)
871
+ 7230 train 4.025707 (lr=1.7330e-04) (hash(x)=145381804)
872
+ 7240 train 3.907091 (lr=1.7237e-04) (hash(x)=142651222)
873
+ 7250 train 4.016520 (lr=1.7144e-04) (hash(x)=136115279)
874
+ 7260 train 4.118497 (lr=1.7051e-04) (hash(x)=154536751)
875
+ 7270 train 4.071742 (lr=1.6959e-04) (hash(x)=148883836)
876
+ 7280 train 3.926810 (lr=1.6868e-04) (hash(x)=150606754)
877
+ 7290 train 4.042694 (lr=1.6777e-04) (hash(x)=165845807)
878
+ 7300 val loss 4.0427
879
+ 7300 val perplexity 56.9806
880
+ 7300 train 4.115440 (lr=1.6687e-04) (hash(x)=150281149)
881
+ 7310 train 4.068348 (lr=1.6598e-04) (hash(x)=159929281)
882
+ 7320 train 3.894274 (lr=1.6509e-04) (hash(x)=143074269)
883
+ 7330 train 4.053135 (lr=1.6420e-04) (hash(x)=147369269)
884
+ 7340 train 4.103865 (lr=1.6332e-04) (hash(x)=139286601)
885
+ 7350 train 4.004265 (lr=1.6245e-04) (hash(x)=145263048)
886
+ 7360 train 3.859831 (lr=1.6158e-04) (hash(x)=144626817)
887
+ 7370 train 4.053353 (lr=1.6072e-04) (hash(x)=168989813)
888
+ 7380 train 4.033418 (lr=1.5986e-04) (hash(x)=144790656)
889
+ 7390 train 3.841959 (lr=1.5901e-04) (hash(x)=141279761)
890
+ 7400 val loss 4.0393
891
+ 7400 val perplexity 56.7858
892
+ 7400 train 3.972180 (lr=1.5816e-04) (hash(x)=148421717)
893
+ 7410 train 3.969993 (lr=1.5732e-04) (hash(x)=141241548)
894
+ 7420 train 4.040917 (lr=1.5649e-04) (hash(x)=162434228)
895
+ 7430 train 3.989641 (lr=1.5566e-04) (hash(x)=156312086)
896
+ 7440 train 3.946321 (lr=1.5484e-04) (hash(x)=139803064)
897
+ 7450 train 3.652802 (lr=1.5402e-04) (hash(x)=156361394)
898
+ 7460 train 3.956328 (lr=1.5321e-04) (hash(x)=141142183)
899
+ 7470 train 3.783363 (lr=1.5241e-04) (hash(x)=156701683)
900
+ 7480 train 3.960618 (lr=1.5161e-04) (hash(x)=148549917)
901
+ 7490 train 3.836790 (lr=1.5081e-04) (hash(x)=143002119)
902
+ 7500 val loss 4.0402
903
+ 7500 val perplexity 56.8351
904
+ 7500 train 3.846091 (lr=1.5002e-04) (hash(x)=146921118)
905
+ 7510 train 3.853552 (lr=1.4924e-04) (hash(x)=148592146)
906
+ 7520 train 4.002445 (lr=1.4847e-04) (hash(x)=148456120)
907
+ 7530 train 4.152397 (lr=1.4769e-04) (hash(x)=163865945)
908
+ 7540 train 4.051622 (lr=1.4693e-04) (hash(x)=160753039)
909
+ 7550 train 4.053471 (lr=1.4617e-04) (hash(x)=150275989)
910
+ 7560 train 4.084820 (lr=1.4542e-04) (hash(x)=141749735)
911
+ 7570 train 4.047843 (lr=1.4467e-04) (hash(x)=148449999)
912
+ 7580 train 3.932255 (lr=1.4393e-04) (hash(x)=139153889)
913
+ 7590 train 4.040966 (lr=1.4319e-04) (hash(x)=147418540)
914
+ 7600 val loss 4.0296
915
+ 7600 val perplexity 56.2388
916
+ 7600 train 4.065164 (lr=1.4246e-04) (hash(x)=150660048)
917
+ 7610 train 4.045284 (lr=1.4174e-04) (hash(x)=155369080)
918
+ 7620 train 3.942210 (lr=1.4102e-04) (hash(x)=151926309)
919
+ 7630 train 4.025853 (lr=1.4031e-04) (hash(x)=149549080)
920
+ 7640 train 3.911004 (lr=1.3960e-04) (hash(x)=140642882)
921
+ 7650 train 4.193230 (lr=1.3890e-04) (hash(x)=140218502)
922
+ 7660 train 4.028582 (lr=1.3821e-04) (hash(x)=153198276)
923
+ 7670 train 3.886666 (lr=1.3752e-04) (hash(x)=150706960)
924
+ 7680 train 4.033081 (lr=1.3684e-04) (hash(x)=152025509)
925
+ 7690 train 3.818812 (lr=1.3616e-04) (hash(x)=147288976)
926
+ 7700 val loss 4.0299
927
+ 7700 val perplexity 56.2528
928
+ 7700 train 3.926182 (lr=1.3549e-04) (hash(x)=148059852)
929
+ 7710 train 3.744325 (lr=1.3483e-04) (hash(x)=158443334)
930
+ 7720 train 3.854547 (lr=1.3417e-04) (hash(x)=143467085)
931
+ 7730 train 3.807595 (lr=1.3352e-04) (hash(x)=138439550)
932
+ 7740 train 3.944607 (lr=1.3287e-04) (hash(x)=145878152)
933
+ 7750 train 3.791583 (lr=1.3223e-04) (hash(x)=136874128)
934
+ 7760 train 3.821380 (lr=1.3160e-04) (hash(x)=148856154)
935
+ 7770 train 3.846889 (lr=1.3097e-04) (hash(x)=146982119)
936
+ 7780 train 3.945842 (lr=1.3035e-04) (hash(x)=159846779)
937
+ 7790 train 3.628672 (lr=1.2974e-04) (hash(x)=141501838)
938
+ 7800 val loss 4.0330
939
+ 7800 val perplexity 56.4289
940
+ 7800 train 3.761494 (lr=1.2913e-04) (hash(x)=148331002)
941
+ 7810 train 4.069176 (lr=1.2852e-04) (hash(x)=150692304)
942
+ 7820 train 3.936056 (lr=1.2793e-04) (hash(x)=156841183)
943
+ 7830 train 3.982287 (lr=1.2733e-04) (hash(x)=139594008)
944
+ 7840 train 3.911881 (lr=1.2675e-04) (hash(x)=149700138)
945
+ 7850 train 3.937826 (lr=1.2617e-04) (hash(x)=149327044)
946
+ 7860 train 4.105933 (lr=1.2560e-04) (hash(x)=148315418)
947
+ 7870 train 4.047635 (lr=1.2503e-04) (hash(x)=153431778)
948
+ 7880 train 3.890493 (lr=1.2447e-04) (hash(x)=145049799)
949
+ 7890 train 4.015502 (lr=1.2392e-04) (hash(x)=150265096)
950
+ 7900 val loss 4.0229
951
+ 7900 val perplexity 55.8644
952
+ 7900 train 3.848037 (lr=1.2337e-04) (hash(x)=164923883)
953
+ 7910 train 3.901191 (lr=1.2283e-04) (hash(x)=139315738)
954
+ 7920 train 3.960188 (lr=1.2229e-04) (hash(x)=153031394)
955
+ 7930 train 3.744658 (lr=1.2176e-04) (hash(x)=148835859)
956
+ 7940 train 3.872959 (lr=1.2124e-04) (hash(x)=146875217)
957
+ 7950 train 3.954048 (lr=1.2072e-04) (hash(x)=151003885)
958
+ 7960 train 3.865334 (lr=1.2021e-04) (hash(x)=141885471)
959
+ 7970 train 4.062125 (lr=1.1970e-04) (hash(x)=150541583)
960
+ 7980 train 3.766766 (lr=1.1921e-04) (hash(x)=138626492)
961
+ 7990 train 3.954561 (lr=1.1871e-04) (hash(x)=149935635)
962
+ 8000 val loss 4.0219
963
+ 8000 val perplexity 55.8050
964
+ 8000 train 3.923151 (lr=1.1823e-04) (hash(x)=143545384)
965
+ 8010 train 3.782924 (lr=1.1775e-04) (hash(x)=144689366)
966
+ 8020 train 3.952917 (lr=1.1728e-04) (hash(x)=151299532)
967
+ 8030 train 3.899816 (lr=1.1681e-04) (hash(x)=131300239)
968
+ 8040 train 3.798137 (lr=1.1635e-04) (hash(x)=146715955)
969
+ 8050 train 3.722908 (lr=1.1589e-04) (hash(x)=135376656)
970
+ 8060 train 3.807712 (lr=1.1544e-04) (hash(x)=141991433)
971
+ 8070 train 3.757957 (lr=1.1500e-04) (hash(x)=140260574)
972
+ 8080 train 3.734957 (lr=1.1457e-04) (hash(x)=151492896)
973
+ 8090 train 3.940652 (lr=1.1414e-04) (hash(x)=143333635)
974
+ 8100 val loss 4.0244
975
+ 8100 val perplexity 55.9457
976
+ 8100 train 3.820928 (lr=1.1371e-04) (hash(x)=160686959)
977
+ 8110 train 3.818959 (lr=1.1330e-04) (hash(x)=141145704)
978
+ 8120 train 4.059864 (lr=1.1289e-04) (hash(x)=146967795)
979
+ 8130 train 3.886271 (lr=1.1248e-04) (hash(x)=147844486)
980
+ 8140 train 3.940571 (lr=1.1209e-04) (hash(x)=141860136)
981
+ 8150 train 4.073424 (lr=1.1169e-04) (hash(x)=147696006)
982
+ 8160 train 3.975924 (lr=1.1131e-04) (hash(x)=151497110)
983
+ 8170 train 3.984741 (lr=1.1093e-04) (hash(x)=140881859)
984
+ 8180 train 3.928429 (lr=1.1056e-04) (hash(x)=158865574)
985
+ 8190 train 3.884697 (lr=1.1019e-04) (hash(x)=161247161)
986
+ 8200 val loss 4.0193
987
+ 8200 val perplexity 55.6609
988
+ 8200 train 3.851676 (lr=1.0983e-04) (hash(x)=156501889)
989
+ 8210 train 3.974160 (lr=1.0948e-04) (hash(x)=158384162)
990
+ 8220 train 4.017684 (lr=1.0913e-04) (hash(x)=154815062)
991
+ 8230 train 3.895302 (lr=1.0879e-04) (hash(x)=145018315)
992
+ 8240 train 3.929684 (lr=1.0846e-04) (hash(x)=161659129)
993
+ 8250 train 3.902359 (lr=1.0813e-04) (hash(x)=139863367)
994
+ 8260 train 3.857470 (lr=1.0781e-04) (hash(x)=149569663)
995
+ 8270 train 3.612097 (lr=1.0750e-04) (hash(x)=162234378)
996
+ 8280 train 3.996893 (lr=1.0719e-04) (hash(x)=144371378)
997
+ 8290 train 3.896417 (lr=1.0689e-04) (hash(x)=148120258)
998
+ 8300 val loss 4.0199
999
+ 8300 val perplexity 55.6958
1000
+ 8300 train 3.891152 (lr=1.0659e-04) (hash(x)=142716875)
1001
+ 8310 train 3.912412 (lr=1.0630e-04) (hash(x)=166789234)
1002
+ 8320 train 3.978505 (lr=1.0602e-04) (hash(x)=150613211)
1003
+ 8330 train 3.856364 (lr=1.0574e-04) (hash(x)=139895391)
1004
+ 8340 train 3.970069 (lr=1.0547e-04) (hash(x)=152011203)
1005
+ 8350 train 3.924445 (lr=1.0521e-04) (hash(x)=156010875)
1006
+ 8360 train 3.925253 (lr=1.0495e-04) (hash(x)=152988051)
1007
+ 8370 train 4.098245 (lr=1.0470e-04) (hash(x)=155144327)
1008
+ 8380 train 3.972328 (lr=1.0446e-04) (hash(x)=133642309)
1009
+ 8390 train 4.023841 (lr=1.0422e-04) (hash(x)=149745335)
1010
+ 8400 val loss 4.0115
1011
+ 8400 val perplexity 55.2290
1012
+ 8400 train 4.001040 (lr=1.0399e-04) (hash(x)=154436684)
1013
+ 8410 train 4.052238 (lr=1.0377e-04) (hash(x)=147946630)
1014
+ 8420 train 3.958612 (lr=1.0355e-04) (hash(x)=153947058)
1015
+ 8430 train 4.046863 (lr=1.0334e-04) (hash(x)=159821040)
1016
+ 8440 train 4.014922 (lr=1.0313e-04) (hash(x)=149127348)
1017
+ 8450 train 4.021274 (lr=1.0293e-04) (hash(x)=149421871)
1018
+ 8460 train 3.986947 (lr=1.0274e-04) (hash(x)=142621577)
1019
+ 8470 train 3.981867 (lr=1.0256e-04) (hash(x)=148978213)
1020
+ 8480 train 3.803187 (lr=1.0238e-04) (hash(x)=139959537)
1021
+ 8490 train 3.899957 (lr=1.0220e-04) (hash(x)=144232498)
1022
+ 8500 val loss 4.0109
1023
+ 8500 val perplexity 55.1978
1024
+ 8500 train 4.119369 (lr=1.0204e-04) (hash(x)=147965839)
1025
+ 8510 train 3.992701 (lr=1.0188e-04) (hash(x)=147679836)
1026
+ 8520 train 3.869359 (lr=1.0172e-04) (hash(x)=147486097)
1027
+ 8530 train 4.007263 (lr=1.0158e-04) (hash(x)=151655265)
1028
+ 8540 train 3.911099 (lr=1.0144e-04) (hash(x)=150866632)
1029
+ 8550 train 4.055044 (lr=1.0130e-04) (hash(x)=144355059)
1030
+ 8560 train 3.912959 (lr=1.0118e-04) (hash(x)=145518478)
1031
+ 8570 train 4.066995 (lr=1.0106e-04) (hash(x)=178973323)
1032
+ 8580 train 3.986718 (lr=1.0094e-04) (hash(x)=152773969)
1033
+ 8590 train 3.779989 (lr=1.0083e-04) (hash(x)=141805508)
1034
+ 8600 val loss 4.0144
1035
+ 8600 val perplexity 55.3883
1036
+ 8600 train 3.838955 (lr=1.0073e-04) (hash(x)=145228097)
1037
+ 8610 train 3.854586 (lr=1.0064e-04) (hash(x)=149525694)
1038
+ 8620 train 3.921444 (lr=1.0055e-04) (hash(x)=151651655)
1039
+ 8630 train 4.039790 (lr=1.0047e-04) (hash(x)=144918643)
1040
+ 8640 train 3.892104 (lr=1.0039e-04) (hash(x)=149721879)
1041
+ 8650 train 3.965341 (lr=1.0033e-04) (hash(x)=157684127)
1042
+ 8660 train 3.963883 (lr=1.0026e-04) (hash(x)=152426036)
1043
+ 8670 train 3.883737 (lr=1.0021e-04) (hash(x)=141396342)
1044
+ 8680 train 3.976582 (lr=1.0016e-04) (hash(x)=146459856)
1045
+ 8690 train 4.060663 (lr=1.0012e-04) (hash(x)=151766124)
1046
+ 8700 val loss 4.0055
1047
+ 8700 val perplexity 54.9009
1048
+ 8700 train 4.218400 (lr=1.0008e-04) (hash(x)=152910357)
1049
+ 8710 train 4.006836 (lr=1.0005e-04) (hash(x)=149724642)
1050
+ 8720 train 3.940507 (lr=1.0003e-04) (hash(x)=152483840)
1051
+ 8730 train 3.922172 (lr=1.0001e-04) (hash(x)=147289415)
1052
+ 8740 train 3.951164 (lr=1.0000e-04) (hash(x)=152254131)
1053
+ 8749 val loss 4.0051
1054
+ 8749 val perplexity 54.8784
lr10e-4_total_batch_size61440_baseline_seed1338/model_08749.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a855e98537e4c4ca3ac969d3f541fe65e3b19a163c51bcecb939a520577fece2
3
+ size 92843394
lr10e-4_total_batch_size61440_baseline_seed1338/optimizer_08749.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:19bbbec1821b1ab7efb85522deb9cac5780d8e8de9d8d89b2b25f70ec1a41466
3
+ size 179406214