andrew-healey commited on
Commit
f101627
·
verified ·
1 Parent(s): 367f793

Upload folder using huggingface_hub

Browse files
n_heads12_lr10e-4_total_batch_size61440_seed1338/args.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_11/n_heads12_lr10e-4_total_batch_size61440_seed1338", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 12, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_11", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1338, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 60, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.001, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "n_heads12_10e-4_61440", "n_embd": 768}
n_heads12_lr10e-4_total_batch_size61440_seed1338/dataloader_08749.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9bbf5b7eef643b850a13a0d3881f9b995af25e67f722e39e1853dc09a06ad1dc
3
+ size 964
n_heads12_lr10e-4_total_batch_size61440_seed1338/log2.txt ADDED
@@ -0,0 +1,1054 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ max_steps: 8750
2
+ 0 val loss 10.9899
3
+ 0 val perplexity 59271.3594
4
+ 0 train 10.996866 (lr=2.0000e-06) (hash(x)=73026074)
5
+ 10 train 9.878416 (lr=2.2000e-05) (hash(x)=81408174)
6
+ 20 train 9.437363 (lr=4.2000e-05) (hash(x)=73752420)
7
+ 30 train 9.008246 (lr=6.2000e-05) (hash(x)=77647299)
8
+ 40 train 8.589837 (lr=8.2000e-05) (hash(x)=76104329)
9
+ 50 train 8.106159 (lr=1.0200e-04) (hash(x)=69335494)
10
+ 60 train 7.770695 (lr=1.2200e-04) (hash(x)=72716449)
11
+ 70 train 7.610338 (lr=1.4200e-04) (hash(x)=71125105)
12
+ 80 train 7.492192 (lr=1.6200e-04) (hash(x)=74407559)
13
+ 90 train 7.331594 (lr=1.8200e-04) (hash(x)=78245164)
14
+ 100 val loss 7.3030
15
+ 100 val perplexity 1484.6930
16
+ 100 train 7.273099 (lr=2.0200e-04) (hash(x)=66975638)
17
+ 110 train 7.049629 (lr=2.2200e-04) (hash(x)=72573036)
18
+ 120 train 6.967019 (lr=2.4200e-04) (hash(x)=72876251)
19
+ 130 train 6.937787 (lr=2.6200e-04) (hash(x)=79969711)
20
+ 140 train 6.822509 (lr=2.8200e-04) (hash(x)=76473027)
21
+ 150 train 6.709403 (lr=3.0200e-04) (hash(x)=72199769)
22
+ 160 train 6.526720 (lr=3.2200e-04) (hash(x)=79575844)
23
+ 170 train 6.426419 (lr=3.4200e-04) (hash(x)=65860740)
24
+ 180 train 6.399967 (lr=3.6200e-04) (hash(x)=79880170)
25
+ 190 train 6.293023 (lr=3.8200e-04) (hash(x)=78465601)
26
+ 200 val loss 6.4675
27
+ 200 val perplexity 643.8866
28
+ 200 train 6.200614 (lr=4.0200e-04) (hash(x)=72711480)
29
+ 210 train 6.149294 (lr=4.2200e-04) (hash(x)=71194058)
30
+ 220 train 6.341296 (lr=4.4200e-04) (hash(x)=92783037)
31
+ 230 train 6.089361 (lr=4.6200e-04) (hash(x)=73611161)
32
+ 240 train 5.879336 (lr=4.8200e-04) (hash(x)=74427917)
33
+ 250 train 5.897179 (lr=5.0200e-04) (hash(x)=65305370)
34
+ 260 train 5.989441 (lr=5.2200e-04) (hash(x)=81378465)
35
+ 270 train 5.762376 (lr=5.4200e-04) (hash(x)=72738754)
36
+ 280 train 6.082486 (lr=5.6200e-04) (hash(x)=82172761)
37
+ 290 train 6.245429 (lr=5.8200e-04) (hash(x)=70024961)
38
+ 300 val loss 6.0048
39
+ 300 val perplexity 405.3865
40
+ 300 train 5.950438 (lr=6.0200e-04) (hash(x)=74275986)
41
+ 310 train 6.054238 (lr=6.2200e-04) (hash(x)=76295038)
42
+ 320 train 6.028188 (lr=6.4200e-04) (hash(x)=74741156)
43
+ 330 train 5.800296 (lr=6.6200e-04) (hash(x)=71941043)
44
+ 340 train 5.910799 (lr=6.8200e-04) (hash(x)=75020464)
45
+ 350 train 5.912827 (lr=7.0200e-04) (hash(x)=64712930)
46
+ 360 train 6.203627 (lr=7.2200e-04) (hash(x)=68519941)
47
+ 370 train 5.780438 (lr=7.4200e-04) (hash(x)=75355289)
48
+ 380 train 5.762534 (lr=7.6200e-04) (hash(x)=66796276)
49
+ 390 train 5.833106 (lr=7.8200e-04) (hash(x)=81002698)
50
+ 400 val loss 5.6717
51
+ 400 val perplexity 290.5200
52
+ 400 train 5.530255 (lr=8.0200e-04) (hash(x)=76354037)
53
+ 410 train 5.791349 (lr=8.2200e-04) (hash(x)=82242082)
54
+ 420 train 5.521913 (lr=8.4200e-04) (hash(x)=77642063)
55
+ 430 train 5.445039 (lr=8.6200e-04) (hash(x)=72281244)
56
+ 440 train 5.613255 (lr=8.8200e-04) (hash(x)=79666830)
57
+ 450 train 5.516862 (lr=9.0200e-04) (hash(x)=76014248)
58
+ 460 train 5.642200 (lr=9.2200e-04) (hash(x)=81129062)
59
+ 470 train 5.453766 (lr=9.4200e-04) (hash(x)=69374949)
60
+ 480 train 5.465752 (lr=9.6200e-04) (hash(x)=72137706)
61
+ 490 train 5.478282 (lr=9.8200e-04) (hash(x)=74403781)
62
+ 500 val loss 5.4677
63
+ 500 val perplexity 236.9264
64
+ 500 train 5.378243 (lr=1.0000e-03) (hash(x)=78816019)
65
+ 510 train 5.413568 (lr=1.0000e-03) (hash(x)=72193331)
66
+ 520 train 5.407718 (lr=9.9999e-04) (hash(x)=79344438)
67
+ 530 train 5.388953 (lr=9.9997e-04) (hash(x)=78092289)
68
+ 540 train 5.375805 (lr=9.9995e-04) (hash(x)=74733731)
69
+ 550 train 5.339091 (lr=9.9992e-04) (hash(x)=69920645)
70
+ 560 train 5.170500 (lr=9.9988e-04) (hash(x)=76131735)
71
+ 570 train 5.257198 (lr=9.9984e-04) (hash(x)=78418293)
72
+ 580 train 5.068708 (lr=9.9979e-04) (hash(x)=73699256)
73
+ 590 train 5.071388 (lr=9.9974e-04) (hash(x)=77505664)
74
+ 600 val loss 5.2762
75
+ 600 val perplexity 195.6227
76
+ 600 train 5.010283 (lr=9.9967e-04) (hash(x)=71823058)
77
+ 610 train 5.046767 (lr=9.9961e-04) (hash(x)=78450672)
78
+ 620 train 4.940421 (lr=9.9953e-04) (hash(x)=75456256)
79
+ 630 train 4.914159 (lr=9.9945e-04) (hash(x)=68515074)
80
+ 640 train 4.981021 (lr=9.9936e-04) (hash(x)=71968654)
81
+ 650 train 4.904106 (lr=9.9927e-04) (hash(x)=72586963)
82
+ 660 train 4.861030 (lr=9.9917e-04) (hash(x)=67398625)
83
+ 670 train 4.874962 (lr=9.9906e-04) (hash(x)=82737129)
84
+ 680 train 5.116166 (lr=9.9894e-04) (hash(x)=78920914)
85
+ 690 train 5.134350 (lr=9.9882e-04) (hash(x)=59859106)
86
+ 700 val loss 5.1209
87
+ 700 val perplexity 167.4840
88
+ 700 train 5.015658 (lr=9.9870e-04) (hash(x)=76856514)
89
+ 710 train 5.064469 (lr=9.9856e-04) (hash(x)=73530680)
90
+ 720 train 5.152414 (lr=9.9842e-04) (hash(x)=74382454)
91
+ 730 train 5.000039 (lr=9.9828e-04) (hash(x)=80641678)
92
+ 740 train 5.147391 (lr=9.9812e-04) (hash(x)=82419404)
93
+ 750 train 5.105894 (lr=9.9796e-04) (hash(x)=70898356)
94
+ 760 train 5.059180 (lr=9.9780e-04) (hash(x)=76422181)
95
+ 770 train 5.017817 (lr=9.9762e-04) (hash(x)=83711024)
96
+ 780 train 4.930438 (lr=9.9744e-04) (hash(x)=74365746)
97
+ 790 train 4.879364 (lr=9.9726e-04) (hash(x)=73104424)
98
+ 800 val loss 4.9352
99
+ 800 val perplexity 139.0995
100
+ 800 train 5.063818 (lr=9.9707e-04) (hash(x)=73589083)
101
+ 810 train 4.866418 (lr=9.9687e-04) (hash(x)=69132306)
102
+ 820 train 4.920248 (lr=9.9666e-04) (hash(x)=75056169)
103
+ 830 train 4.800425 (lr=9.9645e-04) (hash(x)=70032437)
104
+ 840 train 4.815232 (lr=9.9623e-04) (hash(x)=73178845)
105
+ 850 train 4.810349 (lr=9.9601e-04) (hash(x)=69463059)
106
+ 860 train 4.890258 (lr=9.9578e-04) (hash(x)=74635339)
107
+ 870 train 5.054516 (lr=9.9554e-04) (hash(x)=73964810)
108
+ 880 train 4.784234 (lr=9.9530e-04) (hash(x)=79912096)
109
+ 890 train 4.762746 (lr=9.9505e-04) (hash(x)=84832110)
110
+ 900 val loss 4.7948
111
+ 900 val perplexity 120.8858
112
+ 900 train 4.760536 (lr=9.9479e-04) (hash(x)=71823388)
113
+ 910 train 4.714203 (lr=9.9453e-04) (hash(x)=69541745)
114
+ 920 train 4.652880 (lr=9.9426e-04) (hash(x)=67018969)
115
+ 930 train 4.604096 (lr=9.9398e-04) (hash(x)=73346110)
116
+ 940 train 4.674997 (lr=9.9370e-04) (hash(x)=72105603)
117
+ 950 train 4.572038 (lr=9.9341e-04) (hash(x)=71803352)
118
+ 960 train 4.733698 (lr=9.9311e-04) (hash(x)=75135621)
119
+ 970 train 4.526983 (lr=9.9281e-04) (hash(x)=73334774)
120
+ 980 train 4.494944 (lr=9.9250e-04) (hash(x)=80861275)
121
+ 990 train 4.545967 (lr=9.9219e-04) (hash(x)=77124068)
122
+ 1000 val loss 4.7104
123
+ 1000 val perplexity 111.0961
124
+ 1000 train 4.433167 (lr=9.9187e-04) (hash(x)=78127630)
125
+ 1010 train 4.377372 (lr=9.9154e-04) (hash(x)=73670162)
126
+ 1020 train 4.492303 (lr=9.9121e-04) (hash(x)=77732174)
127
+ 1030 train 4.339534 (lr=9.9087e-04) (hash(x)=79418857)
128
+ 1040 train 4.380932 (lr=9.9052e-04) (hash(x)=77732710)
129
+ 1050 train 4.353242 (lr=9.9017e-04) (hash(x)=69751515)
130
+ 1060 train 4.329982 (lr=9.8981e-04) (hash(x)=71788756)
131
+ 1070 train 4.538909 (lr=9.8944e-04) (hash(x)=72944541)
132
+ 1080 train 4.613462 (lr=9.8907e-04) (hash(x)=75792144)
133
+ 1090 train 4.559521 (lr=9.8869e-04) (hash(x)=78746868)
134
+ 1100 val loss 4.6126
135
+ 1100 val perplexity 100.7506
136
+ 1100 train 4.641263 (lr=9.8831e-04) (hash(x)=70879945)
137
+ 1110 train 4.627989 (lr=9.8791e-04) (hash(x)=77572329)
138
+ 1120 train 5.074735 (lr=9.8752e-04) (hash(x)=68749754)
139
+ 1130 train 4.607842 (lr=9.8711e-04) (hash(x)=78632440)
140
+ 1140 train 4.538727 (lr=9.8670e-04) (hash(x)=73565080)
141
+ 1150 train 4.599656 (lr=9.8629e-04) (hash(x)=81594247)
142
+ 1160 train 4.592741 (lr=9.8586e-04) (hash(x)=63200457)
143
+ 1170 train 4.641797 (lr=9.8543e-04) (hash(x)=75895085)
144
+ 1180 train 4.660604 (lr=9.8500e-04) (hash(x)=71402056)
145
+ 1190 train 4.604215 (lr=9.8456e-04) (hash(x)=75263955)
146
+ 1200 val loss 4.5415
147
+ 1200 val perplexity 93.8294
148
+ 1200 train 4.598189 (lr=9.8411e-04) (hash(x)=73579627)
149
+ 1210 train 4.595469 (lr=9.8365e-04) (hash(x)=79106153)
150
+ 1220 train 4.654102 (lr=9.8319e-04) (hash(x)=78475838)
151
+ 1230 train 4.711361 (lr=9.8272e-04) (hash(x)=72129619)
152
+ 1240 train 4.623965 (lr=9.8225e-04) (hash(x)=58067507)
153
+ 1250 train 4.432872 (lr=9.8177e-04) (hash(x)=78666722)
154
+ 1260 train 4.517206 (lr=9.8129e-04) (hash(x)=74964097)
155
+ 1270 train 4.492600 (lr=9.8079e-04) (hash(x)=72774823)
156
+ 1280 train 4.535753 (lr=9.8030e-04) (hash(x)=69959055)
157
+ 1290 train 4.377015 (lr=9.7979e-04) (hash(x)=76629730)
158
+ 1300 val loss 4.4865
159
+ 1300 val perplexity 88.8096
160
+ 1300 train 4.413606 (lr=9.7928e-04) (hash(x)=72183284)
161
+ 1310 train 4.612698 (lr=9.7876e-04) (hash(x)=83147722)
162
+ 1320 train 4.556895 (lr=9.7824e-04) (hash(x)=69788032)
163
+ 1330 train 4.280804 (lr=9.7771e-04) (hash(x)=66629732)
164
+ 1340 train 4.352966 (lr=9.7717e-04) (hash(x)=72542387)
165
+ 1350 train 4.410437 (lr=9.7663e-04) (hash(x)=76517375)
166
+ 1360 train 4.400441 (lr=9.7608e-04) (hash(x)=74401519)
167
+ 1370 train 4.214207 (lr=9.7553e-04) (hash(x)=78409356)
168
+ 1380 train 4.176289 (lr=9.7497e-04) (hash(x)=77626183)
169
+ 1390 train 4.131511 (lr=9.7440e-04) (hash(x)=72496033)
170
+ 1400 val loss 4.4495
171
+ 1400 val perplexity 85.5817
172
+ 1400 train 4.209481 (lr=9.7383e-04) (hash(x)=73708759)
173
+ 1410 train 4.204567 (lr=9.7325e-04) (hash(x)=73706234)
174
+ 1420 train 4.080943 (lr=9.7267e-04) (hash(x)=75918530)
175
+ 1430 train 3.968040 (lr=9.7207e-04) (hash(x)=76806498)
176
+ 1440 train 4.197422 (lr=9.7148e-04) (hash(x)=77310523)
177
+ 1450 train 4.163127 (lr=9.7087e-04) (hash(x)=75964248)
178
+ 1460 train 4.009656 (lr=9.7026e-04) (hash(x)=88549155)
179
+ 1470 train 4.501064 (lr=9.6965e-04) (hash(x)=78654420)
180
+ 1480 train 4.328508 (lr=9.6903e-04) (hash(x)=53433500)
181
+ 1490 train 4.356875 (lr=9.6840e-04) (hash(x)=75862069)
182
+ 1500 val loss 4.4325
183
+ 1500 val perplexity 84.1424
184
+ 1500 train 4.486503 (lr=9.6777e-04) (hash(x)=76037312)
185
+ 1510 train 4.237566 (lr=9.6713e-04) (hash(x)=78654499)
186
+ 1520 train 4.323501 (lr=9.6648e-04) (hash(x)=72319550)
187
+ 1530 train 4.301304 (lr=9.6583e-04) (hash(x)=78901150)
188
+ 1540 train 4.351883 (lr=9.6517e-04) (hash(x)=74848448)
189
+ 1550 train 4.381869 (lr=9.6451e-04) (hash(x)=77019229)
190
+ 1560 train 4.462658 (lr=9.6384e-04) (hash(x)=74200996)
191
+ 1570 train 4.365378 (lr=9.6316e-04) (hash(x)=76717682)
192
+ 1580 train 4.258556 (lr=9.6248e-04) (hash(x)=74662317)
193
+ 1590 train 4.428346 (lr=9.6179e-04) (hash(x)=76066438)
194
+ 1600 val loss 4.3716
195
+ 1600 val perplexity 79.1720
196
+ 1600 train 4.570393 (lr=9.6110e-04) (hash(x)=76123488)
197
+ 1610 train 4.458269 (lr=9.6040e-04) (hash(x)=73211013)
198
+ 1620 train 4.327531 (lr=9.5969e-04) (hash(x)=80389252)
199
+ 1630 train 4.320492 (lr=9.5898e-04) (hash(x)=73449158)
200
+ 1640 train 4.350865 (lr=9.5826e-04) (hash(x)=75060644)
201
+ 1650 train 4.280501 (lr=9.5754e-04) (hash(x)=68966493)
202
+ 1660 train 4.356060 (lr=9.5681e-04) (hash(x)=73496223)
203
+ 1670 train 4.223134 (lr=9.5607e-04) (hash(x)=86210083)
204
+ 1680 train 4.313195 (lr=9.5533e-04) (hash(x)=74053619)
205
+ 1690 train 4.375888 (lr=9.5458e-04) (hash(x)=65910719)
206
+ 1700 val loss 4.3444
207
+ 1700 val perplexity 77.0446
208
+ 1700 train 4.226717 (lr=9.5383e-04) (hash(x)=66496027)
209
+ 1710 train 4.402362 (lr=9.5307e-04) (hash(x)=75485845)
210
+ 1720 train 4.217659 (lr=9.5231e-04) (hash(x)=72417899)
211
+ 1730 train 4.262581 (lr=9.5153e-04) (hash(x)=73593240)
212
+ 1740 train 4.163245 (lr=9.5076e-04) (hash(x)=74832651)
213
+ 1750 train 4.145145 (lr=9.4998e-04) (hash(x)=73364724)
214
+ 1760 train 4.311493 (lr=9.4919e-04) (hash(x)=75903856)
215
+ 1770 train 4.368478 (lr=9.4839e-04) (hash(x)=78183744)
216
+ 1780 train 4.272652 (lr=9.4759e-04) (hash(x)=69993946)
217
+ 1790 train 4.349708 (lr=9.4679e-04) (hash(x)=101032039)
218
+ 1800 val loss 4.3083
219
+ 1800 val perplexity 74.3140
220
+ 1800 train 4.363121 (lr=9.4598e-04) (hash(x)=86018370)
221
+ 1810 train 3.936415 (lr=9.4516e-04) (hash(x)=84205841)
222
+ 1820 train 4.260911 (lr=9.4434e-04) (hash(x)=81735398)
223
+ 1830 train 4.155681 (lr=9.4351e-04) (hash(x)=77297230)
224
+ 1840 train 4.104589 (lr=9.4268e-04) (hash(x)=75535995)
225
+ 1850 train 4.126131 (lr=9.4184e-04) (hash(x)=73930506)
226
+ 1860 train 4.089764 (lr=9.4099e-04) (hash(x)=79524567)
227
+ 1870 train 4.028361 (lr=9.4014e-04) (hash(x)=80242414)
228
+ 1880 train 3.939608 (lr=9.3928e-04) (hash(x)=71395200)
229
+ 1890 train 4.069571 (lr=9.3842e-04) (hash(x)=78269507)
230
+ 1900 val loss 4.2854
231
+ 1900 val perplexity 72.6328
232
+ 1900 train 4.301173 (lr=9.3755e-04) (hash(x)=78327979)
233
+ 1910 train 4.234810 (lr=9.3668e-04) (hash(x)=75760901)
234
+ 1920 train 4.283826 (lr=9.3580e-04) (hash(x)=81699948)
235
+ 1930 train 4.332939 (lr=9.3491e-04) (hash(x)=75326527)
236
+ 1940 train 4.159538 (lr=9.3402e-04) (hash(x)=73660211)
237
+ 1950 train 4.297121 (lr=9.3313e-04) (hash(x)=66946100)
238
+ 1960 train 4.112506 (lr=9.3223e-04) (hash(x)=45679133)
239
+ 1970 train 4.387769 (lr=9.3132e-04) (hash(x)=73647298)
240
+ 1980 train 4.293734 (lr=9.3041e-04) (hash(x)=72037215)
241
+ 1990 train 4.289642 (lr=9.2949e-04) (hash(x)=77640675)
242
+ 2000 val loss 4.2333
243
+ 2000 val perplexity 68.9460
244
+ 2000 train 4.270515 (lr=9.2856e-04) (hash(x)=78165118)
245
+ 2010 train 4.329265 (lr=9.2763e-04) (hash(x)=80013879)
246
+ 2020 train 4.595002 (lr=9.2670e-04) (hash(x)=73778143)
247
+ 2030 train 4.104586 (lr=9.2576e-04) (hash(x)=71396805)
248
+ 2040 train 4.312492 (lr=9.2481e-04) (hash(x)=83350333)
249
+ 2050 train 4.805598 (lr=9.2386e-04) (hash(x)=67399515)
250
+ 2060 train 4.292835 (lr=9.2291e-04) (hash(x)=66586949)
251
+ 2070 train 4.159421 (lr=9.2195e-04) (hash(x)=77648913)
252
+ 2080 train 4.301746 (lr=9.2098e-04) (hash(x)=80507387)
253
+ 2090 train 4.327753 (lr=9.2001e-04) (hash(x)=67897081)
254
+ 2100 val loss 4.2106
255
+ 2100 val perplexity 67.3954
256
+ 2100 train 4.091825 (lr=9.1903e-04) (hash(x)=78780658)
257
+ 2110 train 4.011407 (lr=9.1804e-04) (hash(x)=71002379)
258
+ 2120 train 4.254676 (lr=9.1706e-04) (hash(x)=78941785)
259
+ 2130 train 4.209649 (lr=9.1606e-04) (hash(x)=77840641)
260
+ 2140 train 4.070915 (lr=9.1506e-04) (hash(x)=79973491)
261
+ 2150 train 4.150324 (lr=9.1406e-04) (hash(x)=70608621)
262
+ 2160 train 4.166295 (lr=9.1305e-04) (hash(x)=77576393)
263
+ 2170 train 4.131457 (lr=9.1203e-04) (hash(x)=79697380)
264
+ 2180 train 4.089016 (lr=9.1101e-04) (hash(x)=68595163)
265
+ 2190 train 4.154143 (lr=9.0999e-04) (hash(x)=78393072)
266
+ 2200 val loss 4.1962
267
+ 2200 val perplexity 66.4345
268
+ 2200 train 3.913084 (lr=9.0896e-04) (hash(x)=68832590)
269
+ 2210 train 3.941712 (lr=9.0792e-04) (hash(x)=73069430)
270
+ 2220 train 3.871197 (lr=9.0688e-04) (hash(x)=78403531)
271
+ 2230 train 3.948771 (lr=9.0583e-04) (hash(x)=84282001)
272
+ 2240 train 3.987436 (lr=9.0478e-04) (hash(x)=68791263)
273
+ 2250 train 3.916656 (lr=9.0372e-04) (hash(x)=78324357)
274
+ 2260 train 3.761337 (lr=9.0266e-04) (hash(x)=70769509)
275
+ 2270 train 3.969893 (lr=9.0160e-04) (hash(x)=76496955)
276
+ 2280 train 3.995754 (lr=9.0052e-04) (hash(x)=76607207)
277
+ 2290 train 4.006201 (lr=8.9945e-04) (hash(x)=76204997)
278
+ 2300 val loss 4.1834
279
+ 2300 val perplexity 65.5914
280
+ 2300 train 4.198055 (lr=8.9836e-04) (hash(x)=78644214)
281
+ 2310 train 4.174824 (lr=8.9728e-04) (hash(x)=69825560)
282
+ 2320 train 4.221256 (lr=8.9618e-04) (hash(x)=67690675)
283
+ 2330 train 4.235637 (lr=8.9509e-04) (hash(x)=72400089)
284
+ 2340 train 4.130614 (lr=8.9398e-04) (hash(x)=87283063)
285
+ 2350 train 4.110744 (lr=8.9288e-04) (hash(x)=74257442)
286
+ 2360 train 4.209318 (lr=8.9177e-04) (hash(x)=76596934)
287
+ 2370 train 4.187047 (lr=8.9065e-04) (hash(x)=76345291)
288
+ 2380 train 4.102968 (lr=8.8953e-04) (hash(x)=65495351)
289
+ 2390 train 4.403996 (lr=8.8840e-04) (hash(x)=75795347)
290
+ 2400 val loss 4.1627
291
+ 2400 val perplexity 64.2450
292
+ 2400 train 4.142782 (lr=8.8727e-04) (hash(x)=85857583)
293
+ 2410 train 4.133544 (lr=8.8613e-04) (hash(x)=72305336)
294
+ 2420 train 4.091039 (lr=8.8499e-04) (hash(x)=76780579)
295
+ 2430 train 4.070125 (lr=8.8384e-04) (hash(x)=85532225)
296
+ 2440 train 4.194731 (lr=8.8269e-04) (hash(x)=74308641)
297
+ 2450 train 4.177492 (lr=8.8153e-04) (hash(x)=75205457)
298
+ 2460 train 4.192517 (lr=8.8037e-04) (hash(x)=65232578)
299
+ 2470 train 4.219167 (lr=8.7921e-04) (hash(x)=77893584)
300
+ 2480 train 4.063409 (lr=8.7804e-04) (hash(x)=63720791)
301
+ 2490 train 4.115788 (lr=8.7686e-04) (hash(x)=74714745)
302
+ 2500 val loss 4.1203
303
+ 2500 val perplexity 61.5781
304
+ 2500 train 4.230078 (lr=8.7568e-04) (hash(x)=72628906)
305
+ 2510 train 3.996775 (lr=8.7450e-04) (hash(x)=69138578)
306
+ 2520 train 4.160388 (lr=8.7331e-04) (hash(x)=65122039)
307
+ 2530 train 4.152202 (lr=8.7211e-04) (hash(x)=74953183)
308
+ 2540 train 4.064047 (lr=8.7091e-04) (hash(x)=75410150)
309
+ 2550 train 3.964011 (lr=8.6971e-04) (hash(x)=80818592)
310
+ 2560 train 3.985015 (lr=8.6850e-04) (hash(x)=73726201)
311
+ 2570 train 4.164223 (lr=8.6729e-04) (hash(x)=78518314)
312
+ 2580 train 4.135106 (lr=8.6607e-04) (hash(x)=70758402)
313
+ 2590 train 4.096724 (lr=8.6485e-04) (hash(x)=70495389)
314
+ 2600 val loss 4.1246
315
+ 2600 val perplexity 61.8457
316
+ 2600 train 4.010422 (lr=8.6362e-04) (hash(x)=82589673)
317
+ 2610 train 3.881681 (lr=8.6239e-04) (hash(x)=70936243)
318
+ 2620 train 3.823193 (lr=8.6116e-04) (hash(x)=83907871)
319
+ 2630 train 3.905704 (lr=8.5991e-04) (hash(x)=70432517)
320
+ 2640 train 3.895645 (lr=8.5867e-04) (hash(x)=73683605)
321
+ 2650 train 3.757514 (lr=8.5742e-04) (hash(x)=81772854)
322
+ 2660 train 3.899540 (lr=8.5617e-04) (hash(x)=68745438)
323
+ 2670 train 3.953605 (lr=8.5491e-04) (hash(x)=77762278)
324
+ 2680 train 3.827003 (lr=8.5365e-04) (hash(x)=70738783)
325
+ 2690 train 3.829181 (lr=8.5238e-04) (hash(x)=74534099)
326
+ 2700 val loss 4.1121
327
+ 2700 val perplexity 61.0766
328
+ 2700 train 4.027041 (lr=8.5111e-04) (hash(x)=81441457)
329
+ 2710 train 4.074991 (lr=8.4983e-04) (hash(x)=76167134)
330
+ 2720 train 4.137204 (lr=8.4855e-04) (hash(x)=79036746)
331
+ 2730 train 4.112680 (lr=8.4727e-04) (hash(x)=76789597)
332
+ 2740 train 4.071312 (lr=8.4598e-04) (hash(x)=67515905)
333
+ 2750 train 4.182815 (lr=8.4469e-04) (hash(x)=81464179)
334
+ 2760 train 4.066255 (lr=8.4339e-04) (hash(x)=83004450)
335
+ 2770 train 4.057415 (lr=8.4209e-04) (hash(x)=74647329)
336
+ 2780 train 4.233309 (lr=8.4078e-04) (hash(x)=65160033)
337
+ 2790 train 4.274494 (lr=8.3947e-04) (hash(x)=77815496)
338
+ 2800 val loss 4.0825
339
+ 2800 val perplexity 59.2950
340
+ 2800 train 4.089107 (lr=8.3816e-04) (hash(x)=68408565)
341
+ 2810 train 4.116800 (lr=8.3684e-04) (hash(x)=71127979)
342
+ 2820 train 4.139161 (lr=8.3552e-04) (hash(x)=74423275)
343
+ 2830 train 3.975283 (lr=8.3419e-04) (hash(x)=80921960)
344
+ 2840 train 4.271122 (lr=8.3286e-04) (hash(x)=79254948)
345
+ 2850 train 3.994164 (lr=8.3153e-04) (hash(x)=73700080)
346
+ 2860 train 4.181685 (lr=8.3019e-04) (hash(x)=74328412)
347
+ 2870 train 4.021886 (lr=8.2884e-04) (hash(x)=74359259)
348
+ 2880 train 4.224534 (lr=8.2750e-04) (hash(x)=74479648)
349
+ 2890 train 3.880072 (lr=8.2615e-04) (hash(x)=80993243)
350
+ 2900 val loss 4.0558
351
+ 2900 val perplexity 57.7324
352
+ 2900 train 4.103284 (lr=8.2479e-04) (hash(x)=72286433)
353
+ 2910 train 4.238840 (lr=8.2343e-04) (hash(x)=81081634)
354
+ 2920 train 4.000973 (lr=8.2207e-04) (hash(x)=71809089)
355
+ 2930 train 4.024914 (lr=8.2070e-04) (hash(x)=72668797)
356
+ 2940 train 4.210224 (lr=8.1933e-04) (hash(x)=80472855)
357
+ 2950 train 4.069575 (lr=8.1796e-04) (hash(x)=67764485)
358
+ 2960 train 3.910945 (lr=8.1658e-04) (hash(x)=78590385)
359
+ 2970 train 4.170641 (lr=8.1520e-04) (hash(x)=86434483)
360
+ 2980 train 4.078607 (lr=8.1381e-04) (hash(x)=83872337)
361
+ 2990 train 4.006131 (lr=8.1242e-04) (hash(x)=76866271)
362
+ 3000 val loss 4.0597
363
+ 3000 val perplexity 57.9593
364
+ 3000 train 3.944339 (lr=8.1103e-04) (hash(x)=98889128)
365
+ 3010 train 3.832625 (lr=8.0963e-04) (hash(x)=77128749)
366
+ 3020 train 3.935330 (lr=8.0823e-04) (hash(x)=74427692)
367
+ 3030 train 4.167792 (lr=8.0682e-04) (hash(x)=75155791)
368
+ 3040 train 3.775682 (lr=8.0541e-04) (hash(x)=80384590)
369
+ 3050 train 3.838757 (lr=8.0400e-04) (hash(x)=69078039)
370
+ 3060 train 3.852766 (lr=8.0258e-04) (hash(x)=75289844)
371
+ 3070 train 3.658214 (lr=8.0116e-04) (hash(x)=73776546)
372
+ 3080 train 3.964492 (lr=7.9974e-04) (hash(x)=83632057)
373
+ 3090 train 4.115197 (lr=7.9831e-04) (hash(x)=83219352)
374
+ 3100 val loss 4.0759
375
+ 3100 val perplexity 58.9055
376
+ 3100 train 3.719601 (lr=7.9688e-04) (hash(x)=72986705)
377
+ 3110 train 4.151382 (lr=7.9545e-04) (hash(x)=79090150)
378
+ 3120 train 3.909985 (lr=7.9401e-04) (hash(x)=82919368)
379
+ 3130 train 4.170765 (lr=7.9257e-04) (hash(x)=82752003)
380
+ 3140 train 4.190866 (lr=7.9112e-04) (hash(x)=83049547)
381
+ 3150 train 4.122423 (lr=7.8967e-04) (hash(x)=76384003)
382
+ 3160 train 4.094978 (lr=7.8822e-04) (hash(x)=83069546)
383
+ 3170 train 4.135162 (lr=7.8677e-04) (hash(x)=77245329)
384
+ 3180 train 3.917817 (lr=7.8531e-04) (hash(x)=80555988)
385
+ 3190 train 4.017499 (lr=7.8384e-04) (hash(x)=79506138)
386
+ 3200 val loss 4.0148
387
+ 3200 val perplexity 55.4118
388
+ 3200 train 3.972687 (lr=7.8238e-04) (hash(x)=74238910)
389
+ 3210 train 4.109450 (lr=7.8091e-04) (hash(x)=87246704)
390
+ 3220 train 3.943842 (lr=7.7944e-04) (hash(x)=76286650)
391
+ 3230 train 3.915924 (lr=7.7796e-04) (hash(x)=74128366)
392
+ 3240 train 3.933496 (lr=7.7648e-04) (hash(x)=83686518)
393
+ 3250 train 4.060529 (lr=7.7500e-04) (hash(x)=79334239)
394
+ 3260 train 3.840187 (lr=7.7351e-04) (hash(x)=76991073)
395
+ 3270 train 3.861104 (lr=7.7203e-04) (hash(x)=79645991)
396
+ 3280 train 3.894144 (lr=7.7053e-04) (hash(x)=79642315)
397
+ 3290 train 4.040541 (lr=7.6904e-04) (hash(x)=71394376)
398
+ 3300 val loss 4.0149
399
+ 3300 val perplexity 55.4187
400
+ 3300 train 3.768021 (lr=7.6754e-04) (hash(x)=74335345)
401
+ 3310 train 3.848773 (lr=7.6604e-04) (hash(x)=77872427)
402
+ 3320 train 3.770685 (lr=7.6453e-04) (hash(x)=65707820)
403
+ 3330 train 4.078043 (lr=7.6303e-04) (hash(x)=69949153)
404
+ 3340 train 3.953735 (lr=7.6151e-04) (hash(x)=74387552)
405
+ 3350 train 4.086580 (lr=7.6000e-04) (hash(x)=80320961)
406
+ 3360 train 4.120649 (lr=7.5848e-04) (hash(x)=75749684)
407
+ 3370 train 3.964887 (lr=7.5696e-04) (hash(x)=78610377)
408
+ 3380 train 4.018353 (lr=7.5544e-04) (hash(x)=73378836)
409
+ 3390 train 4.063714 (lr=7.5391e-04) (hash(x)=73242237)
410
+ 3400 val loss 3.9934
411
+ 3400 val perplexity 54.2374
412
+ 3400 train 4.040217 (lr=7.5238e-04) (hash(x)=68240434)
413
+ 3410 train 3.855806 (lr=7.5085e-04) (hash(x)=80087540)
414
+ 3420 train 4.308402 (lr=7.4932e-04) (hash(x)=73529815)
415
+ 3430 train 4.190962 (lr=7.4778e-04) (hash(x)=73306132)
416
+ 3440 train 4.333542 (lr=7.4624e-04) (hash(x)=70500493)
417
+ 3450 train 4.080011 (lr=7.4470e-04) (hash(x)=70007926)
418
+ 3460 train 4.030647 (lr=7.4315e-04) (hash(x)=70024043)
419
+ 3470 train 4.116632 (lr=7.4160e-04) (hash(x)=81605862)
420
+ 3480 train 3.982551 (lr=7.4005e-04) (hash(x)=67926367)
421
+ 3490 train 4.030613 (lr=7.3849e-04) (hash(x)=75476865)
422
+ 3500 val loss 3.9769
423
+ 3500 val perplexity 53.3505
424
+ 3500 train 3.875070 (lr=7.3694e-04) (hash(x)=76813079)
425
+ 3510 train 4.042651 (lr=7.3538e-04) (hash(x)=83284900)
426
+ 3520 train 4.031151 (lr=7.3381e-04) (hash(x)=72533794)
427
+ 3530 train 3.948003 (lr=7.3225e-04) (hash(x)=71496579)
428
+ 3540 train 3.915365 (lr=7.3068e-04) (hash(x)=70592072)
429
+ 3550 train 4.046032 (lr=7.2911e-04) (hash(x)=76928693)
430
+ 3560 train 3.906385 (lr=7.2754e-04) (hash(x)=74503456)
431
+ 3570 train 3.997969 (lr=7.2596e-04) (hash(x)=70401709)
432
+ 3580 train 4.073640 (lr=7.2438e-04) (hash(x)=71225931)
433
+ 3590 train 4.064763 (lr=7.2280e-04) (hash(x)=79059680)
434
+ 3600 val loss 3.9706
435
+ 3600 val perplexity 53.0154
436
+ 3600 train 3.792188 (lr=7.2122e-04) (hash(x)=71650260)
437
+ 3610 train 3.757215 (lr=7.1963e-04) (hash(x)=72844252)
438
+ 3620 train 3.790796 (lr=7.1804e-04) (hash(x)=69989480)
439
+ 3630 train 3.760044 (lr=7.1645e-04) (hash(x)=74336566)
440
+ 3640 train 3.895407 (lr=7.1486e-04) (hash(x)=60353606)
441
+ 3650 train 3.834394 (lr=7.1326e-04) (hash(x)=79424308)
442
+ 3660 train 3.973822 (lr=7.1167e-04) (hash(x)=79976421)
443
+ 3670 train 3.734791 (lr=7.1007e-04) (hash(x)=76742350)
444
+ 3680 train 3.870183 (lr=7.0846e-04) (hash(x)=73225113)
445
+ 3690 train 3.849728 (lr=7.0686e-04) (hash(x)=68913999)
446
+ 3700 val loss 3.9799
447
+ 3700 val perplexity 53.5128
448
+ 3700 train 3.800503 (lr=7.0525e-04) (hash(x)=73295022)
449
+ 3710 train 3.684872 (lr=7.0364e-04) (hash(x)=71341621)
450
+ 3720 train 3.859540 (lr=7.0203e-04) (hash(x)=76711631)
451
+ 3730 train 3.879754 (lr=7.0041e-04) (hash(x)=76294927)
452
+ 3740 train 4.059006 (lr=6.9880e-04) (hash(x)=81950742)
453
+ 3750 train 3.859149 (lr=6.9718e-04) (hash(x)=74343197)
454
+ 3760 train 4.047125 (lr=6.9556e-04) (hash(x)=71093207)
455
+ 3770 train 4.007198 (lr=6.9394e-04) (hash(x)=80706801)
456
+ 3780 train 4.013706 (lr=6.9231e-04) (hash(x)=75011477)
457
+ 3790 train 4.024292 (lr=6.9069e-04) (hash(x)=72248633)
458
+ 3800 val loss 3.9522
459
+ 3800 val perplexity 52.0493
460
+ 3800 train 3.947133 (lr=6.8906e-04) (hash(x)=69136971)
461
+ 3810 train 4.006185 (lr=6.8743e-04) (hash(x)=77522202)
462
+ 3820 train 3.796079 (lr=6.8579e-04) (hash(x)=67695101)
463
+ 3830 train 3.979883 (lr=6.8416e-04) (hash(x)=80051551)
464
+ 3840 train 3.894925 (lr=6.8252e-04) (hash(x)=74191647)
465
+ 3850 train 3.842561 (lr=6.8088e-04) (hash(x)=68047171)
466
+ 3860 train 3.939575 (lr=6.7924e-04) (hash(x)=70045527)
467
+ 3870 train 3.970980 (lr=6.7760e-04) (hash(x)=73697380)
468
+ 3880 train 3.936097 (lr=6.7596e-04) (hash(x)=72925733)
469
+ 3890 train 3.989959 (lr=6.7431e-04) (hash(x)=67921912)
470
+ 3900 val loss 3.9443
471
+ 3900 val perplexity 51.6376
472
+ 3900 train 4.042893 (lr=6.7266e-04) (hash(x)=78738821)
473
+ 3910 train 3.953716 (lr=6.7101e-04) (hash(x)=74879268)
474
+ 3920 train 4.032205 (lr=6.6936e-04) (hash(x)=67259305)
475
+ 3930 train 3.929624 (lr=6.6771e-04) (hash(x)=75660066)
476
+ 3940 train 4.000964 (lr=6.6605e-04) (hash(x)=74572945)
477
+ 3950 train 3.896636 (lr=6.6440e-04) (hash(x)=82744417)
478
+ 3960 train 3.777549 (lr=6.6274e-04) (hash(x)=76023482)
479
+ 3970 train 3.821615 (lr=6.6108e-04) (hash(x)=74825305)
480
+ 3980 train 4.284239 (lr=6.5942e-04) (hash(x)=75541926)
481
+ 3990 train 3.950650 (lr=6.5776e-04) (hash(x)=85705002)
482
+ 4000 val loss 3.9330
483
+ 4000 val perplexity 51.0578
484
+ 4000 train 3.919246 (lr=6.5609e-04) (hash(x)=75062236)
485
+ 4010 train 3.747275 (lr=6.5443e-04) (hash(x)=70532791)
486
+ 4020 train 3.999386 (lr=6.5276e-04) (hash(x)=70014939)
487
+ 4030 train 3.789179 (lr=6.5109e-04) (hash(x)=75315953)
488
+ 4040 train 3.883654 (lr=6.4942e-04) (hash(x)=74206277)
489
+ 4050 train 4.032455 (lr=6.4775e-04) (hash(x)=71678934)
490
+ 4060 train 3.981370 (lr=6.4607e-04) (hash(x)=68169236)
491
+ 4070 train 3.881472 (lr=6.4440e-04) (hash(x)=73440206)
492
+ 4080 train 3.821061 (lr=6.4272e-04) (hash(x)=77333014)
493
+ 4090 train 3.768232 (lr=6.4104e-04) (hash(x)=74699627)
494
+ 4100 val loss 3.9492
495
+ 4100 val perplexity 51.8932
496
+ 4100 train 3.907684 (lr=6.3937e-04) (hash(x)=72537935)
497
+ 4110 train 3.764351 (lr=6.3769e-04) (hash(x)=59960704)
498
+ 4120 train 3.805621 (lr=6.3600e-04) (hash(x)=79504237)
499
+ 4130 train 3.934200 (lr=6.3432e-04) (hash(x)=76513677)
500
+ 4140 train 4.047119 (lr=6.3264e-04) (hash(x)=81601150)
501
+ 4150 train 3.885881 (lr=6.3095e-04) (hash(x)=75447369)
502
+ 4160 train 3.919132 (lr=6.2927e-04) (hash(x)=84280875)
503
+ 4170 train 4.074868 (lr=6.2758e-04) (hash(x)=88239587)
504
+ 4180 train 4.120585 (lr=6.2589e-04) (hash(x)=69175879)
505
+ 4190 train 4.011086 (lr=6.2420e-04) (hash(x)=71751070)
506
+ 4200 val loss 3.9037
507
+ 4200 val perplexity 49.5859
508
+ 4200 train 3.894259 (lr=6.2251e-04) (hash(x)=63098921)
509
+ 4210 train 4.599662 (lr=6.2082e-04) (hash(x)=99229132)
510
+ 4220 train 3.885340 (lr=6.1913e-04) (hash(x)=74149842)
511
+ 4230 train 3.855309 (lr=6.1743e-04) (hash(x)=76000344)
512
+ 4240 train 3.828799 (lr=6.1574e-04) (hash(x)=73033995)
513
+ 4250 train 3.835330 (lr=6.1404e-04) (hash(x)=79351091)
514
+ 4260 train 3.858554 (lr=6.1235e-04) (hash(x)=67587193)
515
+ 4270 train 3.892792 (lr=6.1065e-04) (hash(x)=80457848)
516
+ 4280 train 3.753623 (lr=6.0895e-04) (hash(x)=79619630)
517
+ 4290 train 3.937970 (lr=6.0725e-04) (hash(x)=76884730)
518
+ 4300 val loss 3.8907
519
+ 4300 val perplexity 48.9431
520
+ 4300 train 3.796824 (lr=6.0555e-04) (hash(x)=79561461)
521
+ 4310 train 3.871387 (lr=6.0385e-04) (hash(x)=80388562)
522
+ 4320 train 3.909767 (lr=6.0215e-04) (hash(x)=78267207)
523
+ 4330 train 3.806445 (lr=6.0044e-04) (hash(x)=71301301)
524
+ 4340 train 3.984378 (lr=5.9874e-04) (hash(x)=79138432)
525
+ 4350 train 3.899526 (lr=5.9704e-04) (hash(x)=70573215)
526
+ 4360 train 3.862393 (lr=5.9533e-04) (hash(x)=71492280)
527
+ 4370 train 3.847002 (lr=5.9363e-04) (hash(x)=74751091)
528
+ 4380 train 3.865252 (lr=5.9192e-04) (hash(x)=72310274)
529
+ 4390 train 3.826214 (lr=5.9022e-04) (hash(x)=100848104)
530
+ 4400 val loss 3.8941
531
+ 4400 val perplexity 49.1098
532
+ 4400 train 4.095712 (lr=5.8851e-04) (hash(x)=71196998)
533
+ 4410 train 3.843240 (lr=5.8680e-04) (hash(x)=74205624)
534
+ 4420 train 3.801262 (lr=5.8509e-04) (hash(x)=68781238)
535
+ 4430 train 3.821807 (lr=5.8338e-04) (hash(x)=76961213)
536
+ 4440 train 3.804907 (lr=5.8168e-04) (hash(x)=80006728)
537
+ 4450 train 3.761891 (lr=5.7997e-04) (hash(x)=71337984)
538
+ 4460 train 3.900461 (lr=5.7826e-04) (hash(x)=70514009)
539
+ 4470 train 3.746757 (lr=5.7655e-04) (hash(x)=76238847)
540
+ 4480 train 3.792816 (lr=5.7483e-04) (hash(x)=67646620)
541
+ 4490 train 3.825971 (lr=5.7312e-04) (hash(x)=76200722)
542
+ 4500 val loss 3.9071
543
+ 4500 val perplexity 49.7544
544
+ 4500 train 4.010912 (lr=5.7141e-04) (hash(x)=80808376)
545
+ 4510 train 3.851734 (lr=5.6970e-04) (hash(x)=79670252)
546
+ 4520 train 3.671171 (lr=5.6799e-04) (hash(x)=71430874)
547
+ 4530 train 3.850690 (lr=5.6628e-04) (hash(x)=74443861)
548
+ 4540 train 3.942988 (lr=5.6456e-04) (hash(x)=84712247)
549
+ 4550 train 3.925770 (lr=5.6285e-04) (hash(x)=76296651)
550
+ 4560 train 3.972735 (lr=5.6114e-04) (hash(x)=76991150)
551
+ 4570 train 3.908177 (lr=5.5942e-04) (hash(x)=68369483)
552
+ 4580 train 3.874026 (lr=5.5771e-04) (hash(x)=68777608)
553
+ 4590 train 4.022985 (lr=5.5600e-04) (hash(x)=73172132)
554
+ 4600 val loss 3.8655
555
+ 4600 val perplexity 47.7254
556
+ 4600 train 3.964214 (lr=5.5428e-04) (hash(x)=78012961)
557
+ 4610 train 3.984230 (lr=5.5257e-04) (hash(x)=76270600)
558
+ 4620 train 3.786776 (lr=5.5086e-04) (hash(x)=82249567)
559
+ 4630 train 3.853583 (lr=5.4914e-04) (hash(x)=67713698)
560
+ 4640 train 4.017416 (lr=5.4743e-04) (hash(x)=54462324)
561
+ 4650 train 3.951839 (lr=5.4572e-04) (hash(x)=70825784)
562
+ 4660 train 4.053914 (lr=5.4400e-04) (hash(x)=69303774)
563
+ 4670 train 3.946279 (lr=5.4229e-04) (hash(x)=84155716)
564
+ 4680 train 4.065107 (lr=5.4058e-04) (hash(x)=76409129)
565
+ 4690 train 4.036716 (lr=5.3886e-04) (hash(x)=73455805)
566
+ 4700 val loss 3.8598
567
+ 4700 val perplexity 47.4577
568
+ 4700 train 3.867461 (lr=5.3715e-04) (hash(x)=82597167)
569
+ 4710 train 3.819057 (lr=5.3544e-04) (hash(x)=72293751)
570
+ 4720 train 3.828080 (lr=5.3372e-04) (hash(x)=77164608)
571
+ 4730 train 3.846265 (lr=5.3201e-04) (hash(x)=73781035)
572
+ 4740 train 3.820917 (lr=5.3030e-04) (hash(x)=69426231)
573
+ 4750 train 3.831368 (lr=5.2859e-04) (hash(x)=77279505)
574
+ 4760 train 3.882812 (lr=5.2688e-04) (hash(x)=83295292)
575
+ 4770 train 3.757774 (lr=5.2517e-04) (hash(x)=76817389)
576
+ 4780 train 3.807137 (lr=5.2345e-04) (hash(x)=76512693)
577
+ 4790 train 3.734309 (lr=5.2174e-04) (hash(x)=76469593)
578
+ 4800 val loss 3.8544
579
+ 4800 val perplexity 47.2018
580
+ 4800 train 3.876808 (lr=5.2003e-04) (hash(x)=75145475)
581
+ 4810 train 3.806314 (lr=5.1832e-04) (hash(x)=72173130)
582
+ 4820 train 3.986234 (lr=5.1662e-04) (hash(x)=74239752)
583
+ 4830 train 3.865416 (lr=5.1491e-04) (hash(x)=71592976)
584
+ 4840 train 3.802169 (lr=5.1320e-04) (hash(x)=70809896)
585
+ 4850 train 3.773521 (lr=5.1149e-04) (hash(x)=72044692)
586
+ 4860 train 3.668056 (lr=5.0978e-04) (hash(x)=71791939)
587
+ 4870 train 3.682731 (lr=5.0808e-04) (hash(x)=72591349)
588
+ 4880 train 3.838553 (lr=5.0637e-04) (hash(x)=83767466)
589
+ 4890 train 3.836951 (lr=5.0467e-04) (hash(x)=76798861)
590
+ 4900 val loss 3.8541
591
+ 4900 val perplexity 47.1845
592
+ 4900 train 3.870109 (lr=5.0296e-04) (hash(x)=74555348)
593
+ 4910 train 3.843024 (lr=5.0126e-04) (hash(x)=75894534)
594
+ 4920 train 3.912025 (lr=4.9956e-04) (hash(x)=84093086)
595
+ 4930 train 3.880683 (lr=4.9785e-04) (hash(x)=77209960)
596
+ 4940 train 3.876299 (lr=4.9615e-04) (hash(x)=78448905)
597
+ 4950 train 3.744340 (lr=4.9445e-04) (hash(x)=74336001)
598
+ 4960 train 3.902538 (lr=4.9275e-04) (hash(x)=80895303)
599
+ 4970 train 3.851454 (lr=4.9105e-04) (hash(x)=71883820)
600
+ 4980 train 3.774611 (lr=4.8935e-04) (hash(x)=69893767)
601
+ 4990 train 3.733173 (lr=4.8765e-04) (hash(x)=74660816)
602
+ 5000 val loss 3.8430
603
+ 5000 val perplexity 46.6632
604
+ 5000 train 4.046066 (lr=4.8596e-04) (hash(x)=78057851)
605
+ 5010 train 3.846924 (lr=4.8426e-04) (hash(x)=64235135)
606
+ 5020 train 3.811690 (lr=4.8257e-04) (hash(x)=70397272)
607
+ 5030 train 3.832674 (lr=4.8087e-04) (hash(x)=67171768)
608
+ 5040 train 3.750823 (lr=4.7918e-04) (hash(x)=70332490)
609
+ 5050 train 3.911602 (lr=4.7749e-04) (hash(x)=83006433)
610
+ 5060 train 3.858884 (lr=4.7580e-04) (hash(x)=76575571)
611
+ 5070 train 3.861979 (lr=4.7411e-04) (hash(x)=77691770)
612
+ 5080 train 4.246527 (lr=4.7242e-04) (hash(x)=57255988)
613
+ 5090 train 3.869546 (lr=4.7073e-04) (hash(x)=80108944)
614
+ 5100 val loss 3.8350
615
+ 5100 val perplexity 46.2952
616
+ 5100 train 3.795482 (lr=4.6905e-04) (hash(x)=74319538)
617
+ 5110 train 3.778503 (lr=4.6736e-04) (hash(x)=72033986)
618
+ 5120 train 3.887837 (lr=4.6568e-04) (hash(x)=75108610)
619
+ 5130 train 3.939145 (lr=4.6400e-04) (hash(x)=77275710)
620
+ 5140 train 3.820401 (lr=4.6231e-04) (hash(x)=69731327)
621
+ 5150 train 3.809475 (lr=4.6063e-04) (hash(x)=74809572)
622
+ 5160 train 3.903027 (lr=4.5896e-04) (hash(x)=80883994)
623
+ 5170 train 3.824394 (lr=4.5728e-04) (hash(x)=70390166)
624
+ 5180 train 3.989122 (lr=4.5560e-04) (hash(x)=75614276)
625
+ 5190 train 3.937702 (lr=4.5393e-04) (hash(x)=75431389)
626
+ 5200 val loss 3.8235
627
+ 5200 val perplexity 45.7637
628
+ 5200 train 3.832769 (lr=4.5225e-04) (hash(x)=76239548)
629
+ 5210 train 3.934418 (lr=4.5058e-04) (hash(x)=71988616)
630
+ 5220 train 3.874312 (lr=4.4891e-04) (hash(x)=70379284)
631
+ 5230 train 3.785838 (lr=4.4724e-04) (hash(x)=65937825)
632
+ 5240 train 3.739779 (lr=4.4557e-04) (hash(x)=75529148)
633
+ 5250 train 3.630558 (lr=4.4391e-04) (hash(x)=73186404)
634
+ 5260 train 3.811908 (lr=4.4224e-04) (hash(x)=64888735)
635
+ 5270 train 3.739376 (lr=4.4058e-04) (hash(x)=61047756)
636
+ 5280 train 3.747163 (lr=4.3892e-04) (hash(x)=72880277)
637
+ 5290 train 3.876018 (lr=4.3726e-04) (hash(x)=76312981)
638
+ 5300 val loss 3.8136
639
+ 5300 val perplexity 45.3148
640
+ 5300 train 3.784813 (lr=4.3560e-04) (hash(x)=79007351)
641
+ 5310 train 3.652471 (lr=4.3395e-04) (hash(x)=69120142)
642
+ 5320 train 3.773806 (lr=4.3229e-04) (hash(x)=69703140)
643
+ 5330 train 3.898447 (lr=4.3064e-04) (hash(x)=67515165)
644
+ 5340 train 3.808012 (lr=4.2899e-04) (hash(x)=57789017)
645
+ 5350 train 3.818954 (lr=4.2734e-04) (hash(x)=79434224)
646
+ 5360 train 3.929090 (lr=4.2569e-04) (hash(x)=81242201)
647
+ 5370 train 3.804117 (lr=4.2404e-04) (hash(x)=69646244)
648
+ 5380 train 3.773784 (lr=4.2240e-04) (hash(x)=81911267)
649
+ 5390 train 3.944068 (lr=4.2076e-04) (hash(x)=79086963)
650
+ 5400 val loss 3.7944
651
+ 5400 val perplexity 44.4533
652
+ 5400 train 3.937070 (lr=4.1912e-04) (hash(x)=73958857)
653
+ 5410 train 3.823615 (lr=4.1748e-04) (hash(x)=76487961)
654
+ 5420 train 3.781365 (lr=4.1584e-04) (hash(x)=75600730)
655
+ 5430 train 3.867919 (lr=4.1421e-04) (hash(x)=74683606)
656
+ 5440 train 3.801675 (lr=4.1257e-04) (hash(x)=70879811)
657
+ 5450 train 3.903506 (lr=4.1094e-04) (hash(x)=71654210)
658
+ 5460 train 3.920702 (lr=4.0931e-04) (hash(x)=75386299)
659
+ 5470 train 3.860429 (lr=4.0769e-04) (hash(x)=74366370)
660
+ 5480 train 3.767556 (lr=4.0606e-04) (hash(x)=74267515)
661
+ 5490 train 3.740665 (lr=4.0444e-04) (hash(x)=74435634)
662
+ 5500 val loss 3.7846
663
+ 5500 val perplexity 44.0182
664
+ 5500 train 3.896198 (lr=4.0282e-04) (hash(x)=72000576)
665
+ 5510 train 3.840467 (lr=4.0120e-04) (hash(x)=63862668)
666
+ 5520 train 3.809402 (lr=3.9959e-04) (hash(x)=70635849)
667
+ 5530 train 3.842788 (lr=3.9797e-04) (hash(x)=71094650)
668
+ 5540 train 3.694288 (lr=3.9636e-04) (hash(x)=75063498)
669
+ 5550 train 4.222892 (lr=3.9475e-04) (hash(x)=71807986)
670
+ 5560 train 3.668620 (lr=3.9314e-04) (hash(x)=66657503)
671
+ 5570 train 3.800491 (lr=3.9154e-04) (hash(x)=66345202)
672
+ 5580 train 3.862552 (lr=3.8993e-04) (hash(x)=71849709)
673
+ 5590 train 3.692991 (lr=3.8833e-04) (hash(x)=84702367)
674
+ 5600 val loss 3.7873
675
+ 5600 val perplexity 44.1373
676
+ 5600 train 3.827471 (lr=3.8674e-04) (hash(x)=79109269)
677
+ 5610 train 3.621153 (lr=3.8514e-04) (hash(x)=79051062)
678
+ 5620 train 3.778828 (lr=3.8355e-04) (hash(x)=69861636)
679
+ 5630 train 3.818734 (lr=3.8196e-04) (hash(x)=71549879)
680
+ 5640 train 3.782319 (lr=3.8037e-04) (hash(x)=71055279)
681
+ 5650 train 3.833887 (lr=3.7878e-04) (hash(x)=76078002)
682
+ 5660 train 3.749403 (lr=3.7720e-04) (hash(x)=74335344)
683
+ 5670 train 3.928575 (lr=3.7562e-04) (hash(x)=71454672)
684
+ 5680 train 3.703824 (lr=3.7404e-04) (hash(x)=68592905)
685
+ 5690 train 4.021572 (lr=3.7246e-04) (hash(x)=72942879)
686
+ 5700 val loss 3.7753
687
+ 5700 val perplexity 43.6123
688
+ 5700 train 3.722890 (lr=3.7089e-04) (hash(x)=65107511)
689
+ 5710 train 3.779541 (lr=3.6932e-04) (hash(x)=82256907)
690
+ 5720 train 3.858287 (lr=3.6775e-04) (hash(x)=83454690)
691
+ 5730 train 3.933522 (lr=3.6619e-04) (hash(x)=74898617)
692
+ 5740 train 3.745434 (lr=3.6462e-04) (hash(x)=74025050)
693
+ 5750 train 3.871578 (lr=3.6306e-04) (hash(x)=75125832)
694
+ 5760 train 3.891793 (lr=3.6151e-04) (hash(x)=82640580)
695
+ 5770 train 3.703700 (lr=3.5995e-04) (hash(x)=72169296)
696
+ 5780 train 3.707704 (lr=3.5840e-04) (hash(x)=76860672)
697
+ 5790 train 3.796941 (lr=3.5685e-04) (hash(x)=75858612)
698
+ 5800 val loss 3.7796
699
+ 5800 val perplexity 43.8007
700
+ 5800 train 3.877649 (lr=3.5530e-04) (hash(x)=68326653)
701
+ 5810 train 3.839266 (lr=3.5376e-04) (hash(x)=72416288)
702
+ 5820 train 3.726267 (lr=3.5222e-04) (hash(x)=75390575)
703
+ 5830 train 3.779076 (lr=3.5068e-04) (hash(x)=74274453)
704
+ 5840 train 3.732033 (lr=3.4915e-04) (hash(x)=80802684)
705
+ 5850 train 3.768624 (lr=3.4762e-04) (hash(x)=71662543)
706
+ 5860 train 3.717766 (lr=3.4609e-04) (hash(x)=78625653)
707
+ 5870 train 3.630612 (lr=3.4456e-04) (hash(x)=68749216)
708
+ 5880 train 3.733176 (lr=3.4304e-04) (hash(x)=71914758)
709
+ 5890 train 3.552140 (lr=3.4152e-04) (hash(x)=73589958)
710
+ 5900 val loss 3.7644
711
+ 5900 val perplexity 43.1383
712
+ 5900 train 3.702776 (lr=3.4000e-04) (hash(x)=72500673)
713
+ 5910 train 3.755464 (lr=3.3849e-04) (hash(x)=80963753)
714
+ 5920 train 3.892773 (lr=3.3697e-04) (hash(x)=89659262)
715
+ 5930 train 3.648090 (lr=3.3547e-04) (hash(x)=81546346)
716
+ 5940 train 3.794944 (lr=3.3396e-04) (hash(x)=80298540)
717
+ 5950 train 3.798755 (lr=3.3246e-04) (hash(x)=89054333)
718
+ 5960 train 3.986171 (lr=3.3096e-04) (hash(x)=76292207)
719
+ 5970 train 3.731558 (lr=3.2947e-04) (hash(x)=68170337)
720
+ 5980 train 3.912131 (lr=3.2797e-04) (hash(x)=71621953)
721
+ 5990 train 3.845360 (lr=3.2649e-04) (hash(x)=76140989)
722
+ 6000 val loss 3.7512
723
+ 6000 val perplexity 42.5728
724
+ 6000 train 3.814646 (lr=3.2500e-04) (hash(x)=68289397)
725
+ 6010 train 3.913048 (lr=3.2352e-04) (hash(x)=84960372)
726
+ 6020 train 3.757274 (lr=3.2204e-04) (hash(x)=66386985)
727
+ 6030 train 3.833538 (lr=3.2056e-04) (hash(x)=76775258)
728
+ 6040 train 3.647609 (lr=3.1909e-04) (hash(x)=75546290)
729
+ 6050 train 3.622678 (lr=3.1762e-04) (hash(x)=78124215)
730
+ 6060 train 3.751968 (lr=3.1616e-04) (hash(x)=70915016)
731
+ 6070 train 3.562368 (lr=3.1469e-04) (hash(x)=72509951)
732
+ 6080 train 3.737669 (lr=3.1323e-04) (hash(x)=74035398)
733
+ 6090 train 3.708034 (lr=3.1178e-04) (hash(x)=69655876)
734
+ 6100 val loss 3.7436
735
+ 6100 val perplexity 42.2505
736
+ 6100 train 3.739582 (lr=3.1033e-04) (hash(x)=77457733)
737
+ 6110 train 3.725136 (lr=3.0888e-04) (hash(x)=77781793)
738
+ 6120 train 3.792169 (lr=3.0743e-04) (hash(x)=72732355)
739
+ 6130 train 3.633420 (lr=3.0599e-04) (hash(x)=74032186)
740
+ 6140 train 3.822440 (lr=3.0455e-04) (hash(x)=72856829)
741
+ 6150 train 3.903080 (lr=3.0312e-04) (hash(x)=78098180)
742
+ 6160 train 3.839728 (lr=3.0169e-04) (hash(x)=73990956)
743
+ 6170 train 3.628723 (lr=3.0026e-04) (hash(x)=72632038)
744
+ 6180 train 3.670879 (lr=2.9884e-04) (hash(x)=74962782)
745
+ 6190 train 3.498938 (lr=2.9742e-04) (hash(x)=81020086)
746
+ 6200 val loss 3.7424
747
+ 6200 val perplexity 42.1974
748
+ 6200 train 3.609439 (lr=2.9600e-04) (hash(x)=87386066)
749
+ 6210 train 3.774763 (lr=2.9459e-04) (hash(x)=72382254)
750
+ 6220 train 3.938158 (lr=2.9318e-04) (hash(x)=72565426)
751
+ 6230 train 3.632704 (lr=2.9177e-04) (hash(x)=71306401)
752
+ 6240 train 3.708160 (lr=2.9037e-04) (hash(x)=78821984)
753
+ 6250 train 3.678956 (lr=2.8897e-04) (hash(x)=72065375)
754
+ 6260 train 3.722539 (lr=2.8758e-04) (hash(x)=76100774)
755
+ 6270 train 3.652312 (lr=2.8619e-04) (hash(x)=81896940)
756
+ 6280 train 3.548109 (lr=2.8480e-04) (hash(x)=84686730)
757
+ 6290 train 3.929032 (lr=2.8342e-04) (hash(x)=78981822)
758
+ 6300 val loss 3.7263
759
+ 6300 val perplexity 41.5268
760
+ 6300 train 3.746358 (lr=2.8204e-04) (hash(x)=72121153)
761
+ 6310 train 3.725568 (lr=2.8067e-04) (hash(x)=77322622)
762
+ 6320 train 3.758033 (lr=2.7930e-04) (hash(x)=70252312)
763
+ 6330 train 3.666236 (lr=2.7793e-04) (hash(x)=71167900)
764
+ 6340 train 3.668889 (lr=2.7657e-04) (hash(x)=67968536)
765
+ 6350 train 3.731276 (lr=2.7521e-04) (hash(x)=61548959)
766
+ 6360 train 3.750206 (lr=2.7385e-04) (hash(x)=76835899)
767
+ 6370 train 3.697115 (lr=2.7250e-04) (hash(x)=64612286)
768
+ 6380 train 3.755551 (lr=2.7116e-04) (hash(x)=68631278)
769
+ 6390 train 3.791091 (lr=2.6981e-04) (hash(x)=76777733)
770
+ 6400 val loss 3.7256
771
+ 6400 val perplexity 41.4942
772
+ 6400 train 3.748106 (lr=2.6847e-04) (hash(x)=74010176)
773
+ 6410 train 3.723989 (lr=2.6714e-04) (hash(x)=69368960)
774
+ 6420 train 3.751087 (lr=2.6581e-04) (hash(x)=79065455)
775
+ 6430 train 3.657869 (lr=2.6448e-04) (hash(x)=77858574)
776
+ 6440 train 3.733454 (lr=2.6316e-04) (hash(x)=80136197)
777
+ 6450 train 3.699249 (lr=2.6184e-04) (hash(x)=69612887)
778
+ 6460 train 3.714925 (lr=2.6053e-04) (hash(x)=75004840)
779
+ 6470 train 3.710804 (lr=2.5922e-04) (hash(x)=76506546)
780
+ 6480 train 3.692865 (lr=2.5791e-04) (hash(x)=65864207)
781
+ 6490 train 3.669059 (lr=2.5661e-04) (hash(x)=89740434)
782
+ 6500 val loss 3.7220
783
+ 6500 val perplexity 41.3463
784
+ 6500 train 3.771426 (lr=2.5531e-04) (hash(x)=74487846)
785
+ 6510 train 3.866957 (lr=2.5402e-04) (hash(x)=77407358)
786
+ 6520 train 3.513989 (lr=2.5273e-04) (hash(x)=71194824)
787
+ 6530 train 3.627395 (lr=2.5145e-04) (hash(x)=67104645)
788
+ 6540 train 3.705828 (lr=2.5017e-04) (hash(x)=76148759)
789
+ 6550 train 3.681012 (lr=2.4889e-04) (hash(x)=75122573)
790
+ 6560 train 3.607277 (lr=2.4762e-04) (hash(x)=78906454)
791
+ 6570 train 3.617641 (lr=2.4635e-04) (hash(x)=74690995)
792
+ 6580 train 3.640659 (lr=2.4509e-04) (hash(x)=76927297)
793
+ 6590 train 3.529907 (lr=2.4383e-04) (hash(x)=68404949)
794
+ 6600 val loss 3.7210
795
+ 6600 val perplexity 41.3074
796
+ 6600 train 3.563925 (lr=2.4258e-04) (hash(x)=70670064)
797
+ 6610 train 3.680010 (lr=2.4133e-04) (hash(x)=78186701)
798
+ 6620 train 3.629197 (lr=2.4009e-04) (hash(x)=83774586)
799
+ 6630 train 3.690136 (lr=2.3884e-04) (hash(x)=75862939)
800
+ 6640 train 3.654970 (lr=2.3761e-04) (hash(x)=69660197)
801
+ 6650 train 3.670448 (lr=2.3638e-04) (hash(x)=75014695)
802
+ 6660 train 3.615069 (lr=2.3515e-04) (hash(x)=72303759)
803
+ 6670 train 3.718569 (lr=2.3393e-04) (hash(x)=70865288)
804
+ 6680 train 3.758483 (lr=2.3271e-04) (hash(x)=83055332)
805
+ 6690 train 3.993328 (lr=2.3150e-04) (hash(x)=74803557)
806
+ 6700 val loss 3.7102
807
+ 6700 val perplexity 40.8617
808
+ 6700 train 3.784678 (lr=2.3029e-04) (hash(x)=72939636)
809
+ 6710 train 3.677957 (lr=2.2909e-04) (hash(x)=71614446)
810
+ 6720 train 3.808560 (lr=2.2789e-04) (hash(x)=67555962)
811
+ 6730 train 3.689415 (lr=2.2669e-04) (hash(x)=65536201)
812
+ 6740 train 3.774216 (lr=2.2550e-04) (hash(x)=72108773)
813
+ 6750 train 3.552342 (lr=2.2432e-04) (hash(x)=76780817)
814
+ 6760 train 3.864172 (lr=2.2314e-04) (hash(x)=78273645)
815
+ 6770 train 3.662185 (lr=2.2196e-04) (hash(x)=79605660)
816
+ 6780 train 3.716326 (lr=2.2079e-04) (hash(x)=80009237)
817
+ 6790 train 3.696447 (lr=2.1963e-04) (hash(x)=74533044)
818
+ 6800 val loss 3.7053
819
+ 6800 val perplexity 40.6634
820
+ 6800 train 3.909237 (lr=2.1847e-04) (hash(x)=80026711)
821
+ 6810 train 3.658340 (lr=2.1731e-04) (hash(x)=70338829)
822
+ 6820 train 3.619749 (lr=2.1616e-04) (hash(x)=76285045)
823
+ 6830 train 3.589653 (lr=2.1501e-04) (hash(x)=74154856)
824
+ 6840 train 3.439939 (lr=2.1387e-04) (hash(x)=77355534)
825
+ 6850 train 3.623714 (lr=2.1273e-04) (hash(x)=70624171)
826
+ 6860 train 3.600848 (lr=2.1160e-04) (hash(x)=67874910)
827
+ 6870 train 3.563037 (lr=2.1047e-04) (hash(x)=67935593)
828
+ 6880 train 3.671965 (lr=2.0935e-04) (hash(x)=73054733)
829
+ 6890 train 3.575140 (lr=2.0823e-04) (hash(x)=71490380)
830
+ 6900 val loss 3.7051
831
+ 6900 val perplexity 40.6540
832
+ 6900 train 3.737555 (lr=2.0712e-04) (hash(x)=77350548)
833
+ 6910 train 3.498832 (lr=2.0602e-04) (hash(x)=69026579)
834
+ 6920 train 3.804343 (lr=2.0491e-04) (hash(x)=73897252)
835
+ 6930 train 3.872568 (lr=2.0382e-04) (hash(x)=79854836)
836
+ 6940 train 3.714164 (lr=2.0272e-04) (hash(x)=72056411)
837
+ 6950 train 3.666084 (lr=2.0164e-04) (hash(x)=80112011)
838
+ 6960 train 3.727348 (lr=2.0055e-04) (hash(x)=57470392)
839
+ 6970 train 3.647768 (lr=1.9948e-04) (hash(x)=70347716)
840
+ 6980 train 3.832709 (lr=1.9840e-04) (hash(x)=82281687)
841
+ 6990 train 3.730881 (lr=1.9734e-04) (hash(x)=74694435)
842
+ 7000 val loss 3.6920
843
+ 7000 val perplexity 40.1236
844
+ 7000 train 3.707842 (lr=1.9628e-04) (hash(x)=81879854)
845
+ 7010 train 3.738984 (lr=1.9522e-04) (hash(x)=69165491)
846
+ 7020 train 3.639477 (lr=1.9417e-04) (hash(x)=75320851)
847
+ 7030 train 3.695074 (lr=1.9312e-04) (hash(x)=77375045)
848
+ 7040 train 3.701880 (lr=1.9208e-04) (hash(x)=80127540)
849
+ 7050 train 3.523616 (lr=1.9104e-04) (hash(x)=74056421)
850
+ 7060 train 3.650572 (lr=1.9001e-04) (hash(x)=76784854)
851
+ 7070 train 3.583572 (lr=1.8899e-04) (hash(x)=78122882)
852
+ 7080 train 3.558365 (lr=1.8797e-04) (hash(x)=75152878)
853
+ 7090 train 3.613444 (lr=1.8695e-04) (hash(x)=79990627)
854
+ 7100 val loss 3.6875
855
+ 7100 val perplexity 39.9429
856
+ 7100 train 3.680961 (lr=1.8594e-04) (hash(x)=70855798)
857
+ 7110 train 3.641630 (lr=1.8494e-04) (hash(x)=73080655)
858
+ 7120 train 3.653192 (lr=1.8394e-04) (hash(x)=69035245)
859
+ 7130 train 3.419105 (lr=1.8294e-04) (hash(x)=64555239)
860
+ 7140 train 3.565031 (lr=1.8196e-04) (hash(x)=71734297)
861
+ 7150 train 3.605397 (lr=1.8097e-04) (hash(x)=82270339)
862
+ 7160 train 3.483007 (lr=1.7999e-04) (hash(x)=76649629)
863
+ 7170 train 3.568456 (lr=1.7902e-04) (hash(x)=73236815)
864
+ 7180 train 3.475019 (lr=1.7805e-04) (hash(x)=73739555)
865
+ 7190 train 3.571881 (lr=1.7709e-04) (hash(x)=65956849)
866
+ 7200 val loss 3.6881
867
+ 7200 val perplexity 39.9672
868
+ 7200 train 3.490003 (lr=1.7614e-04) (hash(x)=76352714)
869
+ 7210 train 3.612017 (lr=1.7519e-04) (hash(x)=77523342)
870
+ 7220 train 3.680798 (lr=1.7424e-04) (hash(x)=75193820)
871
+ 7230 train 3.608378 (lr=1.7330e-04) (hash(x)=72975023)
872
+ 7240 train 3.599787 (lr=1.7237e-04) (hash(x)=70152397)
873
+ 7250 train 3.595937 (lr=1.7144e-04) (hash(x)=78430692)
874
+ 7260 train 3.722397 (lr=1.7051e-04) (hash(x)=73137006)
875
+ 7270 train 3.642856 (lr=1.6959e-04) (hash(x)=76187623)
876
+ 7280 train 3.667818 (lr=1.6868e-04) (hash(x)=76534981)
877
+ 7290 train 3.850186 (lr=1.6777e-04) (hash(x)=77886369)
878
+ 7300 val loss 3.6800
879
+ 7300 val perplexity 39.6466
880
+ 7300 train 3.701992 (lr=1.6687e-04) (hash(x)=73689615)
881
+ 7310 train 3.707147 (lr=1.6598e-04) (hash(x)=73658711)
882
+ 7320 train 3.566888 (lr=1.6509e-04) (hash(x)=81887275)
883
+ 7330 train 3.583124 (lr=1.6420e-04) (hash(x)=69952710)
884
+ 7340 train 3.728839 (lr=1.6332e-04) (hash(x)=74902753)
885
+ 7350 train 3.621027 (lr=1.6245e-04) (hash(x)=76782773)
886
+ 7360 train 3.545683 (lr=1.6158e-04) (hash(x)=73139063)
887
+ 7370 train 3.713619 (lr=1.6072e-04) (hash(x)=83222375)
888
+ 7380 train 3.646162 (lr=1.5986e-04) (hash(x)=74319008)
889
+ 7390 train 3.495682 (lr=1.5901e-04) (hash(x)=79871842)
890
+ 7400 val loss 3.6763
891
+ 7400 val perplexity 39.4997
892
+ 7400 train 3.673163 (lr=1.5816e-04) (hash(x)=74853788)
893
+ 7410 train 3.626156 (lr=1.5732e-04) (hash(x)=79119178)
894
+ 7420 train 3.576902 (lr=1.5649e-04) (hash(x)=71667016)
895
+ 7430 train 3.557258 (lr=1.5566e-04) (hash(x)=70294981)
896
+ 7440 train 3.618197 (lr=1.5484e-04) (hash(x)=76749861)
897
+ 7450 train 3.441044 (lr=1.5402e-04) (hash(x)=67932362)
898
+ 7460 train 3.588001 (lr=1.5321e-04) (hash(x)=71646803)
899
+ 7470 train 3.512496 (lr=1.5241e-04) (hash(x)=76832078)
900
+ 7480 train 3.549227 (lr=1.5161e-04) (hash(x)=78030848)
901
+ 7490 train 3.532587 (lr=1.5081e-04) (hash(x)=79789250)
902
+ 7500 val loss 3.6748
903
+ 7500 val perplexity 39.4388
904
+ 7500 train 3.584122 (lr=1.5002e-04) (hash(x)=74510406)
905
+ 7510 train 3.614876 (lr=1.4924e-04) (hash(x)=75354875)
906
+ 7520 train 3.647244 (lr=1.4847e-04) (hash(x)=75617004)
907
+ 7530 train 3.654216 (lr=1.4769e-04) (hash(x)=70778712)
908
+ 7540 train 3.853573 (lr=1.4693e-04) (hash(x)=65376419)
909
+ 7550 train 3.667049 (lr=1.4617e-04) (hash(x)=72248752)
910
+ 7560 train 3.673964 (lr=1.4542e-04) (hash(x)=73692695)
911
+ 7570 train 3.667290 (lr=1.4467e-04) (hash(x)=74787412)
912
+ 7580 train 3.748160 (lr=1.4393e-04) (hash(x)=78483480)
913
+ 7590 train 3.630544 (lr=1.4319e-04) (hash(x)=73847861)
914
+ 7600 val loss 3.6631
915
+ 7600 val perplexity 38.9807
916
+ 7600 train 3.615480 (lr=1.4246e-04) (hash(x)=71708692)
917
+ 7610 train 3.681148 (lr=1.4174e-04) (hash(x)=79089102)
918
+ 7620 train 3.566707 (lr=1.4102e-04) (hash(x)=71032201)
919
+ 7630 train 3.727380 (lr=1.4031e-04) (hash(x)=71581412)
920
+ 7640 train 3.652773 (lr=1.3960e-04) (hash(x)=79778462)
921
+ 7650 train 3.850894 (lr=1.3890e-04) (hash(x)=71585919)
922
+ 7660 train 3.748426 (lr=1.3821e-04) (hash(x)=72139974)
923
+ 7670 train 3.512209 (lr=1.3752e-04) (hash(x)=75620437)
924
+ 7680 train 3.754200 (lr=1.3684e-04) (hash(x)=71555417)
925
+ 7690 train 3.520679 (lr=1.3616e-04) (hash(x)=63719984)
926
+ 7700 val loss 3.6634
927
+ 7700 val perplexity 38.9919
928
+ 7700 train 3.576077 (lr=1.3549e-04) (hash(x)=74494467)
929
+ 7710 train 3.500696 (lr=1.3483e-04) (hash(x)=76821872)
930
+ 7720 train 3.523712 (lr=1.3417e-04) (hash(x)=76993734)
931
+ 7730 train 3.411629 (lr=1.3352e-04) (hash(x)=74942500)
932
+ 7740 train 3.652175 (lr=1.3287e-04) (hash(x)=72583299)
933
+ 7750 train 3.415241 (lr=1.3223e-04) (hash(x)=68209490)
934
+ 7760 train 3.475169 (lr=1.3160e-04) (hash(x)=69093853)
935
+ 7770 train 3.661530 (lr=1.3097e-04) (hash(x)=74460215)
936
+ 7780 train 3.589716 (lr=1.3035e-04) (hash(x)=68407147)
937
+ 7790 train 3.401358 (lr=1.2974e-04) (hash(x)=76153902)
938
+ 7800 val loss 3.6641
939
+ 7800 val perplexity 39.0217
940
+ 7800 train 3.446107 (lr=1.2913e-04) (hash(x)=84446164)
941
+ 7810 train 3.659085 (lr=1.2852e-04) (hash(x)=73681777)
942
+ 7820 train 3.678928 (lr=1.2793e-04) (hash(x)=79064713)
943
+ 7830 train 3.647282 (lr=1.2733e-04) (hash(x)=72713629)
944
+ 7840 train 3.862705 (lr=1.2675e-04) (hash(x)=83622263)
945
+ 7850 train 3.620575 (lr=1.2617e-04) (hash(x)=73574852)
946
+ 7860 train 3.744040 (lr=1.2560e-04) (hash(x)=67285714)
947
+ 7870 train 3.727985 (lr=1.2503e-04) (hash(x)=77222135)
948
+ 7880 train 3.757639 (lr=1.2447e-04) (hash(x)=73302118)
949
+ 7890 train 3.617595 (lr=1.2392e-04) (hash(x)=61831553)
950
+ 7900 val loss 3.6559
951
+ 7900 val perplexity 38.7007
952
+ 7900 train 3.576494 (lr=1.2337e-04) (hash(x)=70412356)
953
+ 7910 train 3.749923 (lr=1.2283e-04) (hash(x)=69987458)
954
+ 7920 train 3.720824 (lr=1.2229e-04) (hash(x)=72573609)
955
+ 7930 train 3.469617 (lr=1.2176e-04) (hash(x)=67089230)
956
+ 7940 train 3.561009 (lr=1.2124e-04) (hash(x)=66572639)
957
+ 7950 train 3.671310 (lr=1.2072e-04) (hash(x)=66156765)
958
+ 7960 train 3.632620 (lr=1.2021e-04) (hash(x)=75055477)
959
+ 7970 train 3.742238 (lr=1.1970e-04) (hash(x)=65210584)
960
+ 7980 train 3.549185 (lr=1.1921e-04) (hash(x)=63802308)
961
+ 7990 train 3.641349 (lr=1.1871e-04) (hash(x)=76379611)
962
+ 8000 val loss 3.6532
963
+ 8000 val perplexity 38.5991
964
+ 8000 train 3.620697 (lr=1.1823e-04) (hash(x)=68482769)
965
+ 8010 train 3.457824 (lr=1.1775e-04) (hash(x)=77934567)
966
+ 8020 train 3.546319 (lr=1.1728e-04) (hash(x)=78687646)
967
+ 8030 train 3.450068 (lr=1.1681e-04) (hash(x)=73044643)
968
+ 8040 train 3.506208 (lr=1.1635e-04) (hash(x)=68884035)
969
+ 8050 train 3.331522 (lr=1.1589e-04) (hash(x)=74889605)
970
+ 8060 train 3.522590 (lr=1.1544e-04) (hash(x)=76659012)
971
+ 8070 train 3.617751 (lr=1.1500e-04) (hash(x)=70403652)
972
+ 8080 train 3.471875 (lr=1.1457e-04) (hash(x)=78290747)
973
+ 8090 train 3.539882 (lr=1.1414e-04) (hash(x)=81394906)
974
+ 8100 val loss 3.6557
975
+ 8100 val perplexity 38.6956
976
+ 8100 train 3.519256 (lr=1.1371e-04) (hash(x)=67764612)
977
+ 8110 train 3.531004 (lr=1.1330e-04) (hash(x)=76334301)
978
+ 8120 train 3.595799 (lr=1.1289e-04) (hash(x)=75412335)
979
+ 8130 train 3.525723 (lr=1.1248e-04) (hash(x)=74846582)
980
+ 8140 train 3.580329 (lr=1.1209e-04) (hash(x)=80776867)
981
+ 8150 train 3.605508 (lr=1.1169e-04) (hash(x)=74759651)
982
+ 8160 train 3.614704 (lr=1.1131e-04) (hash(x)=75135943)
983
+ 8170 train 3.608184 (lr=1.1093e-04) (hash(x)=88547984)
984
+ 8180 train 3.686463 (lr=1.1056e-04) (hash(x)=67588135)
985
+ 8190 train 3.530904 (lr=1.1019e-04) (hash(x)=75190422)
986
+ 8200 val loss 3.6496
987
+ 8200 val perplexity 38.4602
988
+ 8200 train 3.528319 (lr=1.0983e-04) (hash(x)=79609415)
989
+ 8210 train 3.654772 (lr=1.0948e-04) (hash(x)=73291424)
990
+ 8220 train 3.646541 (lr=1.0913e-04) (hash(x)=76172342)
991
+ 8230 train 3.579254 (lr=1.0879e-04) (hash(x)=72229274)
992
+ 8240 train 3.529650 (lr=1.0846e-04) (hash(x)=70393283)
993
+ 8250 train 3.603933 (lr=1.0813e-04) (hash(x)=76659165)
994
+ 8260 train 3.586252 (lr=1.0781e-04) (hash(x)=75284539)
995
+ 8270 train 3.363368 (lr=1.0750e-04) (hash(x)=71495901)
996
+ 8280 train 3.597528 (lr=1.0719e-04) (hash(x)=74742339)
997
+ 8290 train 3.697159 (lr=1.0689e-04) (hash(x)=73748628)
998
+ 8300 val loss 3.6489
999
+ 8300 val perplexity 38.4340
1000
+ 8300 train 3.675335 (lr=1.0659e-04) (hash(x)=75127072)
1001
+ 8310 train 3.515121 (lr=1.0630e-04) (hash(x)=69260375)
1002
+ 8320 train 3.535777 (lr=1.0602e-04) (hash(x)=77687746)
1003
+ 8330 train 3.466916 (lr=1.0574e-04) (hash(x)=69399950)
1004
+ 8340 train 3.686811 (lr=1.0547e-04) (hash(x)=79102450)
1005
+ 8350 train 3.624428 (lr=1.0521e-04) (hash(x)=74555497)
1006
+ 8360 train 3.891762 (lr=1.0495e-04) (hash(x)=65367249)
1007
+ 8370 train 3.801868 (lr=1.0470e-04) (hash(x)=78862619)
1008
+ 8380 train 3.592611 (lr=1.0446e-04) (hash(x)=70989373)
1009
+ 8390 train 3.709392 (lr=1.0422e-04) (hash(x)=80650634)
1010
+ 8400 val loss 3.6415
1011
+ 8400 val perplexity 38.1496
1012
+ 8400 train 3.582992 (lr=1.0399e-04) (hash(x)=74168359)
1013
+ 8410 train 3.681455 (lr=1.0377e-04) (hash(x)=80595745)
1014
+ 8420 train 3.633307 (lr=1.0355e-04) (hash(x)=74239969)
1015
+ 8430 train 3.648883 (lr=1.0334e-04) (hash(x)=72089061)
1016
+ 8440 train 3.654515 (lr=1.0313e-04) (hash(x)=79471873)
1017
+ 8450 train 3.650064 (lr=1.0293e-04) (hash(x)=70814855)
1018
+ 8460 train 3.604288 (lr=1.0274e-04) (hash(x)=72900750)
1019
+ 8470 train 3.747749 (lr=1.0256e-04) (hash(x)=73723555)
1020
+ 8480 train 3.577946 (lr=1.0238e-04) (hash(x)=77750599)
1021
+ 8490 train 3.581637 (lr=1.0220e-04) (hash(x)=82913770)
1022
+ 8500 val loss 3.6393
1023
+ 8500 val perplexity 38.0661
1024
+ 8500 train 3.756681 (lr=1.0204e-04) (hash(x)=58014966)
1025
+ 8510 train 3.610657 (lr=1.0188e-04) (hash(x)=78964850)
1026
+ 8520 train 3.834585 (lr=1.0172e-04) (hash(x)=79106421)
1027
+ 8530 train 3.616487 (lr=1.0158e-04) (hash(x)=75662227)
1028
+ 8540 train 3.494726 (lr=1.0144e-04) (hash(x)=73928041)
1029
+ 8550 train 3.649508 (lr=1.0130e-04) (hash(x)=73949623)
1030
+ 8560 train 3.492687 (lr=1.0118e-04) (hash(x)=66671864)
1031
+ 8570 train 3.556316 (lr=1.0106e-04) (hash(x)=73819413)
1032
+ 8580 train 3.510235 (lr=1.0094e-04) (hash(x)=71116170)
1033
+ 8590 train 3.515911 (lr=1.0083e-04) (hash(x)=75792152)
1034
+ 8600 val loss 3.6428
1035
+ 8600 val perplexity 38.1969
1036
+ 8600 train 3.480192 (lr=1.0073e-04) (hash(x)=76199926)
1037
+ 8610 train 3.456395 (lr=1.0064e-04) (hash(x)=72341134)
1038
+ 8620 train 3.504135 (lr=1.0055e-04) (hash(x)=73669000)
1039
+ 8630 train 3.587533 (lr=1.0047e-04) (hash(x)=76197518)
1040
+ 8640 train 3.586119 (lr=1.0039e-04) (hash(x)=77186492)
1041
+ 8650 train 3.582174 (lr=1.0033e-04) (hash(x)=72965152)
1042
+ 8660 train 3.615223 (lr=1.0026e-04) (hash(x)=66084492)
1043
+ 8670 train 3.700512 (lr=1.0021e-04) (hash(x)=77644722)
1044
+ 8680 train 3.528085 (lr=1.0016e-04) (hash(x)=70362453)
1045
+ 8690 train 3.636358 (lr=1.0012e-04) (hash(x)=77832585)
1046
+ 8700 val loss 3.6341
1047
+ 8700 val perplexity 37.8663
1048
+ 8700 train 3.727388 (lr=1.0008e-04) (hash(x)=93249763)
1049
+ 8710 train 3.673150 (lr=1.0005e-04) (hash(x)=88835109)
1050
+ 8720 train 3.790535 (lr=1.0003e-04) (hash(x)=76773769)
1051
+ 8730 train 3.594644 (lr=1.0001e-04) (hash(x)=78751013)
1052
+ 8740 train 3.531550 (lr=1.0000e-04) (hash(x)=72850278)
1053
+ 8749 val loss 3.6338
1054
+ 8749 val perplexity 37.8553
n_heads12_lr10e-4_total_batch_size61440_seed1338/model_08749.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:183aa52ffebb0e9ab4fb70fc95d04d38858a385e34e7028d3e6c59d89765c190
3
+ size 498607490
n_heads12_lr10e-4_total_batch_size61440_seed1338/optimizer_08749.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9715ac7ee691eea63cfda50c5bfa9079aa910e7ebec5c1b69acb5b7bb68e42cf
3
+ size 990934406