andrew-healey commited on
Commit
0321886
·
verified ·
1 Parent(s): 607c5d9

Upload folder using huggingface_hub

Browse files
att_conv_playground/i_small_heads_wd_2_latent_masks_seed_1339/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "att_conv_playground/i_small_heads_wd_2_latent_masks_seed_1339", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 24, "n_embd": 264, "head_dim": 11, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 4375, "warmup_steps": 250, "group": "att_conv_playground", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1339, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "n_latent_masks", "selection_head_linear_combo_scale": 1.0, "disable_selection_head_linear_combo_bias": false, "assert_latent_matches_no_head": false, "n_latent_masks": 2, "init_latent_masks_to_identity": true, "init_latent_masks_to_inverse": false, "latent_mask_scale": null, "latent_mask_runtime_multiplier": null, "latent_mask_sigmoid": false, "S_layernorm": false, "one_head_per_latent_mask": true, "att_conv": true, "att_conv_init": "eye", "att_conv_scale": 1.0, "att_conv_weight_decay": true, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 32, "total_batch_size": 131072, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.003, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "i_small_heads_wd_2_latent_masks", "latent_mask_precision": "float32", "att_conv_precision": "bfloat16"}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "att_conv_playground/i_small_heads_wd_2_latent_masks_seed_1339", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 22, "n_embd": 264, "head_dim": 11, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 4375, "warmup_steps": 250, "group": "att_conv_playground", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1339, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "n_latent_masks", "selection_head_linear_combo_scale": 1.0, "disable_selection_head_linear_combo_bias": false, "assert_latent_matches_no_head": false, "n_latent_masks": 4, "init_latent_masks_to_identity": true, "init_latent_masks_to_inverse": false, "latent_mask_scale": null, "latent_mask_runtime_multiplier": null, "latent_mask_sigmoid": false, "S_layernorm": false, "one_head_per_latent_mask": true, "att_conv": true, "att_conv_init": "eye", "att_conv_scale": 1.0, "att_conv_weight_decay": true, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 32, "total_batch_size": 131072, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.003, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "i_small_heads_wd_2_latent_masks", "latent_mask_precision": "float32", "att_conv_precision": "bfloat16"}
att_conv_playground/i_small_heads_wd_2_latent_masks_seed_1339/log2.txt CHANGED
@@ -1,529 +1,529 @@
1
  max_steps: 4375
2
- 0 val loss 11.3717
3
- 0 val perplexity 86825.3984
4
- 0 train 11.363577 (lr=4.1958e-06) (hash(x)=44992657)
5
- 10 train 9.922216 (lr=4.6154e-05) (hash(x)=33468021)
6
- 20 train 9.374759 (lr=8.8112e-05) (hash(x)=40941803)
7
- 30 train 8.678726 (lr=1.3007e-04) (hash(x)=36715902)
8
- 40 train 7.961945 (lr=1.7203e-04) (hash(x)=32710993)
9
- 50 train 7.699759 (lr=2.1399e-04) (hash(x)=43839896)
10
- 60 train 7.751177 (lr=2.5594e-04) (hash(x)=40675468)
11
- 70 train 7.417886 (lr=2.9790e-04) (hash(x)=34592631)
12
- 80 train 7.395207 (lr=3.3986e-04) (hash(x)=44444845)
13
- 90 train 7.187118 (lr=3.8182e-04) (hash(x)=41965258)
14
- 100 val loss 7.0550
15
- 100 val perplexity 1158.6124
16
- 100 train 6.913579 (lr=4.2378e-04) (hash(x)=41284750)
17
- 110 train 6.851485 (lr=4.6573e-04) (hash(x)=41118734)
18
- 120 train 6.700687 (lr=5.0769e-04) (hash(x)=37537547)
19
- 130 train 6.547397 (lr=5.4965e-04) (hash(x)=43625179)
20
- 140 train 6.525340 (lr=5.9161e-04) (hash(x)=41940760)
21
- 150 train 6.595750 (lr=6.3357e-04) (hash(x)=39210431)
22
- 160 train 6.500293 (lr=6.7552e-04) (hash(x)=41128294)
23
- 170 train 6.408043 (lr=7.1748e-04) (hash(x)=41590227)
24
- 180 train 6.342257 (lr=7.5944e-04) (hash(x)=38084871)
25
- 190 train 6.252754 (lr=8.0140e-04) (hash(x)=34534333)
26
- 200 val loss 6.1526
27
- 200 val perplexity 469.9528
28
- 200 train 6.191759 (lr=8.4336e-04) (hash(x)=41614299)
29
- 210 train 6.071459 (lr=8.8531e-04) (hash(x)=35551597)
30
- 220 train 5.996685 (lr=9.2727e-04) (hash(x)=44615432)
31
- 230 train 6.131633 (lr=9.6923e-04) (hash(x)=37078768)
32
- 240 train 5.840449 (lr=1.0112e-03) (hash(x)=40644078)
33
- 250 train 5.889553 (lr=1.0531e-03) (hash(x)=44636066)
34
- 260 train 5.777018 (lr=1.0951e-03) (hash(x)=43247952)
35
- 270 train 5.736223 (lr=1.1371e-03) (hash(x)=37989370)
36
- 280 train 5.658681 (lr=1.1790e-03) (hash(x)=36229730)
37
- 290 train 5.593200 (lr=1.2210e-03) (hash(x)=35032996)
38
- 300 val loss 5.7312
39
- 300 val perplexity 308.3363
40
- 300 train 5.473669 (lr=1.2629e-03) (hash(x)=33868636)
41
- 310 train 5.389345 (lr=1.3049e-03) (hash(x)=39782705)
42
- 320 train 5.486483 (lr=1.3469e-03) (hash(x)=43525765)
43
- 330 train 5.566754 (lr=1.3888e-03) (hash(x)=38074926)
44
- 340 train 5.623736 (lr=1.4308e-03) (hash(x)=50125702)
45
- 350 train 5.597556 (lr=1.4727e-03) (hash(x)=35062222)
46
- 360 train 5.524805 (lr=1.5147e-03) (hash(x)=39671001)
47
- 370 train 5.572453 (lr=1.5566e-03) (hash(x)=35748364)
48
- 380 train 5.367682 (lr=1.5986e-03) (hash(x)=39962666)
49
- 390 train 5.329647 (lr=1.6406e-03) (hash(x)=39213360)
50
- 400 val loss 5.3464
51
- 400 val perplexity 209.8587
52
- 400 train 5.324333 (lr=1.6825e-03) (hash(x)=37919584)
53
- 410 train 5.278996 (lr=1.7245e-03) (hash(x)=42001527)
54
- 420 train 5.268399 (lr=1.7664e-03) (hash(x)=33940005)
55
- 430 train 5.307408 (lr=1.8084e-03) (hash(x)=41749217)
56
- 440 train 5.188647 (lr=1.8503e-03) (hash(x)=45219739)
57
- 450 train 5.177632 (lr=1.8923e-03) (hash(x)=38515090)
58
- 460 train 5.087607 (lr=1.9343e-03) (hash(x)=37518607)
59
- 470 train 4.881213 (lr=1.9762e-03) (hash(x)=46830716)
60
- 480 train 5.026694 (lr=2.0182e-03) (hash(x)=49262598)
61
- 490 train 4.819833 (lr=2.0601e-03) (hash(x)=41370559)
62
- 500 val loss 5.1326
63
- 500 val perplexity 169.4548
64
- 500 train 4.905719 (lr=2.1021e-03) (hash(x)=35237459)
65
- 510 train 5.010959 (lr=2.1441e-03) (hash(x)=35070579)
66
- 520 train 5.061775 (lr=2.1860e-03) (hash(x)=37355110)
67
- 530 train 5.144450 (lr=2.2280e-03) (hash(x)=39398044)
68
- 540 train 5.065890 (lr=2.2699e-03) (hash(x)=39156676)
69
- 550 train 5.013740 (lr=2.3119e-03) (hash(x)=38553909)
70
- 560 train 5.080887 (lr=2.3538e-03) (hash(x)=36555371)
71
- 570 train 4.975443 (lr=2.3958e-03) (hash(x)=42048991)
72
- 580 train 4.926690 (lr=2.4378e-03) (hash(x)=37359399)
73
- 590 train 4.973100 (lr=2.4797e-03) (hash(x)=39254663)
74
- 600 val loss 4.9274
75
- 600 val perplexity 138.0236
76
- 600 train 4.933433 (lr=2.5217e-03) (hash(x)=38982539)
77
- 610 train 4.852407 (lr=2.5636e-03) (hash(x)=35859145)
78
- 620 train 4.837459 (lr=2.6056e-03) (hash(x)=35593542)
79
- 630 train 4.754174 (lr=2.6476e-03) (hash(x)=39124418)
80
- 640 train 4.744407 (lr=2.6895e-03) (hash(x)=39107374)
81
- 650 train 4.673471 (lr=2.7315e-03) (hash(x)=34529151)
82
- 660 train 4.607458 (lr=2.7734e-03) (hash(x)=39483931)
83
- 670 train 4.589476 (lr=2.8154e-03) (hash(x)=43008010)
84
- 680 train 4.502759 (lr=2.8573e-03) (hash(x)=33469328)
85
- 690 train 4.547626 (lr=2.8993e-03) (hash(x)=35025253)
86
- 700 val loss 4.7850
87
- 700 val perplexity 119.6962
88
- 700 train 4.515065 (lr=2.9413e-03) (hash(x)=43280188)
89
- 710 train 4.817590 (lr=2.9832e-03) (hash(x)=40999263)
90
- 720 train 4.786658 (lr=3.0000e-03) (hash(x)=38304742)
91
- 730 train 4.792525 (lr=2.9999e-03) (hash(x)=34705188)
92
- 740 train 4.745008 (lr=2.9997e-03) (hash(x)=40028964)
93
- 750 train 4.689537 (lr=2.9994e-03) (hash(x)=39051591)
94
- 760 train 4.738353 (lr=2.9990e-03) (hash(x)=41286198)
95
- 770 train 4.677357 (lr=2.9985e-03) (hash(x)=44512967)
96
- 780 train 4.705936 (lr=2.9979e-03) (hash(x)=45171627)
97
- 790 train 4.664905 (lr=2.9972e-03) (hash(x)=46345673)
98
- 800 val loss 4.6640
99
- 800 val perplexity 106.0645
100
- 800 train 4.625056 (lr=2.9964e-03) (hash(x)=35529545)
101
- 810 train 4.543311 (lr=2.9955e-03) (hash(x)=42321793)
102
- 820 train 4.543081 (lr=2.9945e-03) (hash(x)=49327569)
103
- 830 train 4.616230 (lr=2.9934e-03) (hash(x)=41325057)
104
- 840 train 4.561257 (lr=2.9922e-03) (hash(x)=37305614)
105
- 850 train 4.519702 (lr=2.9909e-03) (hash(x)=38000800)
106
- 860 train 4.635437 (lr=2.9896e-03) (hash(x)=44047697)
107
- 870 train 4.637448 (lr=2.9881e-03) (hash(x)=41576206)
108
- 880 train 4.608762 (lr=2.9865e-03) (hash(x)=44685458)
109
- 890 train 4.589599 (lr=2.9848e-03) (hash(x)=40398597)
110
- 900 val loss 4.5638
111
- 900 val perplexity 95.9509
112
- 900 train 4.526895 (lr=2.9830e-03) (hash(x)=39312673)
113
- 910 train 4.544037 (lr=2.9811e-03) (hash(x)=38881875)
114
- 920 train 4.566689 (lr=2.9792e-03) (hash(x)=39140166)
115
- 930 train 4.465323 (lr=2.9771e-03) (hash(x)=39075781)
116
- 940 train 4.481410 (lr=2.9749e-03) (hash(x)=46887071)
117
- 950 train 4.476902 (lr=2.9726e-03) (hash(x)=39740603)
118
- 960 train 4.521322 (lr=2.9703e-03) (hash(x)=39590268)
119
- 970 train 4.476286 (lr=2.9678e-03) (hash(x)=43580052)
120
- 980 train 4.420864 (lr=2.9652e-03) (hash(x)=40226735)
121
- 990 train 4.520546 (lr=2.9626e-03) (hash(x)=40723896)
122
- 1000 val loss 4.4791
123
- 1000 val perplexity 88.1552
124
- 1000 train 4.480749 (lr=2.9598e-03) (hash(x)=39408050)
125
- 1010 train 4.672426 (lr=2.9570e-03) (hash(x)=32247950)
126
- 1020 train 4.555251 (lr=2.9540e-03) (hash(x)=39049938)
127
- 1030 train 4.519478 (lr=2.9510e-03) (hash(x)=37453736)
128
- 1040 train 4.429589 (lr=2.9478e-03) (hash(x)=36649666)
129
- 1050 train 4.493746 (lr=2.9446e-03) (hash(x)=41404097)
130
- 1060 train 4.484105 (lr=2.9412e-03) (hash(x)=40701599)
131
- 1070 train 4.501267 (lr=2.9378e-03) (hash(x)=41787393)
132
- 1080 train 4.388685 (lr=2.9343e-03) (hash(x)=42325465)
133
- 1090 train 4.426929 (lr=2.9307e-03) (hash(x)=45018925)
134
- 1100 val loss 4.4183
135
- 1100 val perplexity 82.9554
136
- 1100 train 4.446635 (lr=2.9270e-03) (hash(x)=53751982)
137
- 1110 train 4.370152 (lr=2.9231e-03) (hash(x)=43118376)
138
- 1120 train 4.402332 (lr=2.9192e-03) (hash(x)=41489942)
139
- 1130 train 4.420882 (lr=2.9152e-03) (hash(x)=48020487)
140
- 1140 train 4.280365 (lr=2.9112e-03) (hash(x)=43266072)
141
- 1150 train 4.487528 (lr=2.9070e-03) (hash(x)=41790132)
142
- 1160 train 4.445304 (lr=2.9027e-03) (hash(x)=33739090)
143
- 1170 train 4.420757 (lr=2.8983e-03) (hash(x)=43885030)
144
- 1180 train 4.368628 (lr=2.8939e-03) (hash(x)=39046714)
145
- 1190 train 4.399640 (lr=2.8893e-03) (hash(x)=44387023)
146
- 1200 val loss 4.3480
147
- 1200 val perplexity 77.3216
148
- 1200 train 4.260426 (lr=2.8847e-03) (hash(x)=36667141)
149
- 1210 train 4.338835 (lr=2.8800e-03) (hash(x)=45487179)
150
- 1220 train 4.315816 (lr=2.8751e-03) (hash(x)=43456413)
151
- 1230 train 4.248469 (lr=2.8702e-03) (hash(x)=42473632)
152
- 1240 train 4.327480 (lr=2.8652e-03) (hash(x)=42966315)
153
- 1250 train 4.178415 (lr=2.8601e-03) (hash(x)=42930524)
154
- 1260 train 4.320940 (lr=2.8550e-03) (hash(x)=43467676)
155
- 1270 train 4.246199 (lr=2.8497e-03) (hash(x)=40872580)
156
- 1280 train 4.317531 (lr=2.8443e-03) (hash(x)=40619157)
157
- 1290 train 4.327679 (lr=2.8389e-03) (hash(x)=44406165)
158
- 1300 val loss 4.3118
159
- 1300 val perplexity 74.5715
160
- 1300 train 4.291611 (lr=2.8333e-03) (hash(x)=40808029)
161
- 1310 train 4.469441 (lr=2.8277e-03) (hash(x)=46125736)
162
- 1320 train 4.235446 (lr=2.8220e-03) (hash(x)=41698487)
163
- 1330 train 4.268370 (lr=2.8162e-03) (hash(x)=39302878)
164
- 1340 train 4.383691 (lr=2.8103e-03) (hash(x)=41826369)
165
- 1350 train 4.202989 (lr=2.8044e-03) (hash(x)=37061413)
166
- 1360 train 4.325859 (lr=2.7983e-03) (hash(x)=40770974)
167
- 1370 train 4.238734 (lr=2.7922e-03) (hash(x)=38738718)
168
- 1380 train 4.229860 (lr=2.7860e-03) (hash(x)=34961558)
169
- 1390 train 4.285625 (lr=2.7797e-03) (hash(x)=38559342)
170
- 1400 val loss 4.2974
171
- 1400 val perplexity 73.5120
172
- 1400 train 4.240549 (lr=2.7733e-03) (hash(x)=38840020)
173
- 1410 train 4.395767 (lr=2.7668e-03) (hash(x)=44448100)
174
- 1420 train 4.446609 (lr=2.7603e-03) (hash(x)=39199838)
175
- 1430 train 4.242622 (lr=2.7536e-03) (hash(x)=41669873)
176
- 1440 train 4.373518 (lr=2.7469e-03) (hash(x)=53253836)
177
- 1450 train 4.293159 (lr=2.7401e-03) (hash(x)=41320631)
178
- 1460 train 4.212480 (lr=2.7333e-03) (hash(x)=48462803)
179
- 1470 train 4.253891 (lr=2.7263e-03) (hash(x)=42356864)
180
- 1480 train 4.305854 (lr=2.7193e-03) (hash(x)=41118068)
181
- 1490 train 4.219227 (lr=2.7121e-03) (hash(x)=37242854)
182
- 1500 val loss 4.2488
183
- 1500 val perplexity 70.0222
184
- 1500 train 4.264964 (lr=2.7050e-03) (hash(x)=40571031)
185
- 1510 train 4.219374 (lr=2.6977e-03) (hash(x)=35384608)
186
- 1520 train 4.243370 (lr=2.6903e-03) (hash(x)=35341056)
187
- 1530 train 4.140860 (lr=2.6829e-03) (hash(x)=36471779)
188
- 1540 train 4.172465 (lr=2.6754e-03) (hash(x)=40356153)
189
- 1550 train 4.217282 (lr=2.6678e-03) (hash(x)=40133377)
190
- 1560 train 4.143442 (lr=2.6602e-03) (hash(x)=33722160)
191
- 1570 train 4.294497 (lr=2.6525e-03) (hash(x)=39555295)
192
- 1580 train 4.189536 (lr=2.6447e-03) (hash(x)=35950666)
193
- 1590 train 4.345695 (lr=2.6368e-03) (hash(x)=36528535)
194
- 1600 val loss 4.2286
195
- 1600 val perplexity 68.6207
196
- 1600 train 4.230598 (lr=2.6289e-03) (hash(x)=42613017)
197
- 1610 train 4.376665 (lr=2.6208e-03) (hash(x)=42845965)
198
- 1620 train 4.284088 (lr=2.6128e-03) (hash(x)=36137074)
199
- 1630 train 4.227170 (lr=2.6046e-03) (hash(x)=37560569)
200
- 1640 train 4.325464 (lr=2.5964e-03) (hash(x)=43680614)
201
- 1650 train 4.180369 (lr=2.5881e-03) (hash(x)=37281218)
202
- 1660 train 4.206783 (lr=2.5797e-03) (hash(x)=36165222)
203
- 1670 train 4.159068 (lr=2.5713e-03) (hash(x)=50222513)
204
- 1680 train 4.044919 (lr=2.5628e-03) (hash(x)=35239173)
205
- 1690 train 4.103105 (lr=2.5542e-03) (hash(x)=40287349)
206
- 1700 val loss 4.2301
207
- 1700 val perplexity 68.7223
208
- 1700 train 4.290835 (lr=2.5455e-03) (hash(x)=37444780)
209
- 1710 train 4.271445 (lr=2.5368e-03) (hash(x)=38962723)
210
- 1720 train 4.277743 (lr=2.5281e-03) (hash(x)=33882305)
211
- 1730 train 4.255969 (lr=2.5192e-03) (hash(x)=36501540)
212
- 1740 train 4.253308 (lr=2.5103e-03) (hash(x)=38723266)
213
- 1750 train 4.188325 (lr=2.5014e-03) (hash(x)=38772789)
214
- 1760 train 4.264967 (lr=2.4924e-03) (hash(x)=37707898)
215
- 1770 train 4.122534 (lr=2.4833e-03) (hash(x)=30529327)
216
- 1780 train 4.212236 (lr=2.4741e-03) (hash(x)=37885464)
217
- 1790 train 4.224483 (lr=2.4649e-03) (hash(x)=39013967)
218
- 1800 val loss 4.1900
219
- 1800 val perplexity 66.0248
220
- 1800 train 4.262849 (lr=2.4556e-03) (hash(x)=34906955)
221
- 1810 train 4.135907 (lr=2.4463e-03) (hash(x)=40790682)
222
- 1820 train 4.010628 (lr=2.4369e-03) (hash(x)=39912223)
223
- 1830 train 4.132839 (lr=2.4275e-03) (hash(x)=39304486)
224
- 1840 train 4.100446 (lr=2.4180e-03) (hash(x)=37247975)
225
- 1850 train 4.091725 (lr=2.4084e-03) (hash(x)=41340614)
226
- 1860 train 4.249487 (lr=2.3988e-03) (hash(x)=39924627)
227
- 1870 train 4.231539 (lr=2.3891e-03) (hash(x)=40235993)
228
- 1880 train 4.119097 (lr=2.3794e-03) (hash(x)=37693476)
229
- 1890 train 4.238847 (lr=2.3696e-03) (hash(x)=40449864)
230
- 1900 val loss 4.1672
231
- 1900 val perplexity 64.5368
232
- 1900 train 4.147777 (lr=2.3598e-03) (hash(x)=37907749)
233
- 1910 train 4.121119 (lr=2.3499e-03) (hash(x)=34547901)
234
- 1920 train 4.150450 (lr=2.3400e-03) (hash(x)=37771788)
235
- 1930 train 4.025002 (lr=2.3300e-03) (hash(x)=36049454)
236
- 1940 train 4.118981 (lr=2.3200e-03) (hash(x)=29564336)
237
- 1950 train 4.109178 (lr=2.3099e-03) (hash(x)=41689281)
238
- 1960 train 4.021831 (lr=2.2998e-03) (hash(x)=41976699)
239
- 1970 train 4.217256 (lr=2.2896e-03) (hash(x)=41538621)
240
- 1980 train 4.101467 (lr=2.2793e-03) (hash(x)=36490902)
241
- 1990 train 4.133924 (lr=2.2691e-03) (hash(x)=42739315)
242
- 2000 val loss 4.1470
243
- 2000 val perplexity 63.2408
244
- 2000 train 4.148038 (lr=2.2588e-03) (hash(x)=38600074)
245
- 2010 train 4.161505 (lr=2.2484e-03) (hash(x)=34555488)
246
- 2020 train 4.178379 (lr=2.2380e-03) (hash(x)=37227345)
247
- 2030 train 4.130076 (lr=2.2275e-03) (hash(x)=38172386)
248
- 2040 train 4.244411 (lr=2.2170e-03) (hash(x)=39340786)
249
- 2050 train 4.088753 (lr=2.2065e-03) (hash(x)=37237298)
250
- 2060 train 4.324434 (lr=2.1959e-03) (hash(x)=35894782)
251
- 2070 train 4.149876 (lr=2.1853e-03) (hash(x)=44161861)
252
- 2080 train 4.162563 (lr=2.1746e-03) (hash(x)=38062558)
253
- 2090 train 4.012234 (lr=2.1639e-03) (hash(x)=38608301)
254
- 2100 val loss 4.1251
255
- 2100 val perplexity 61.8755
256
- 2100 train 4.045103 (lr=2.1532e-03) (hash(x)=47611537)
257
- 2110 train 4.043654 (lr=2.1424e-03) (hash(x)=38076651)
258
- 2120 train 4.052896 (lr=2.1316e-03) (hash(x)=39870893)
259
- 2130 train 4.200476 (lr=2.1208e-03) (hash(x)=48123321)
260
- 2140 train 4.227482 (lr=2.1099e-03) (hash(x)=44124977)
261
- 2150 train 4.132596 (lr=2.0990e-03) (hash(x)=42181374)
262
- 2160 train 4.162151 (lr=2.0881e-03) (hash(x)=41837878)
263
- 2170 train 4.176892 (lr=2.0771e-03) (hash(x)=42161375)
264
- 2180 train 4.143106 (lr=2.0661e-03) (hash(x)=41792986)
265
- 2190 train 4.112856 (lr=2.0550e-03) (hash(x)=41872416)
266
- 2200 val loss 4.1050
267
- 2200 val perplexity 60.6451
268
- 2200 train 4.088569 (lr=2.0440e-03) (hash(x)=37395985)
269
- 2210 train 4.141064 (lr=2.0329e-03) (hash(x)=40616096)
270
- 2220 train 4.037234 (lr=2.0217e-03) (hash(x)=40118423)
271
- 2230 train 3.955050 (lr=2.0106e-03) (hash(x)=39641586)
272
- 2240 train 4.042272 (lr=1.9994e-03) (hash(x)=34720116)
273
- 2250 train 3.986983 (lr=1.9882e-03) (hash(x)=43641508)
274
- 2260 train 4.015029 (lr=1.9770e-03) (hash(x)=31179786)
275
- 2270 train 3.975202 (lr=1.9657e-03) (hash(x)=42721932)
276
- 2280 train 4.307312 (lr=1.9544e-03) (hash(x)=38474505)
277
- 2290 train 4.129827 (lr=1.9431e-03) (hash(x)=41398545)
278
- 2300 val loss 4.0890
279
- 2300 val perplexity 59.6830
280
- 2300 train 4.058106 (lr=1.9318e-03) (hash(x)=44131094)
281
- 2310 train 4.136691 (lr=1.9205e-03) (hash(x)=41667948)
282
- 2320 train 4.216187 (lr=1.9091e-03) (hash(x)=36339232)
283
- 2330 train 4.097335 (lr=1.8977e-03) (hash(x)=41630644)
284
- 2340 train 4.184237 (lr=1.8863e-03) (hash(x)=41278955)
285
- 2350 train 4.061925 (lr=1.8749e-03) (hash(x)=39491976)
286
- 2360 train 4.120791 (lr=1.8635e-03) (hash(x)=41771238)
287
- 2370 train 4.212980 (lr=1.8520e-03) (hash(x)=45374570)
288
- 2380 train 4.096973 (lr=1.8406e-03) (hash(x)=41419267)
289
- 2390 train 4.127297 (lr=1.8291e-03) (hash(x)=38869169)
290
- 2400 val loss 4.0747
291
- 2400 val perplexity 58.8337
292
- 2400 train 4.028738 (lr=1.8176e-03) (hash(x)=38619293)
293
- 2410 train 3.848051 (lr=1.8061e-03) (hash(x)=41600240)
294
- 2420 train 3.982481 (lr=1.7946e-03) (hash(x)=40891045)
295
- 2430 train 3.973238 (lr=1.7830e-03) (hash(x)=41023249)
296
- 2440 train 3.942607 (lr=1.7715e-03) (hash(x)=33813452)
297
- 2450 train 3.894412 (lr=1.7600e-03) (hash(x)=38464119)
298
- 2460 train 3.793348 (lr=1.7484e-03) (hash(x)=40699982)
299
- 2470 train 3.798558 (lr=1.7368e-03) (hash(x)=38254854)
300
- 2480 train 4.089022 (lr=1.7253e-03) (hash(x)=42736069)
301
- 2490 train 4.096173 (lr=1.7137e-03) (hash(x)=41928525)
302
- 2500 val loss 4.0552
303
- 2500 val perplexity 57.6992
304
- 2500 train 4.051444 (lr=1.7021e-03) (hash(x)=45864011)
305
- 2510 train 4.064780 (lr=1.6906e-03) (hash(x)=41380960)
306
- 2520 train 4.060643 (lr=1.6790e-03) (hash(x)=38366545)
307
- 2530 train 4.070795 (lr=1.6674e-03) (hash(x)=41045176)
308
- 2540 train 4.094515 (lr=1.6558e-03) (hash(x)=43435705)
309
- 2550 train 4.050467 (lr=1.6442e-03) (hash(x)=40693090)
310
- 2560 train 3.964399 (lr=1.6326e-03) (hash(x)=40502478)
311
- 2570 train 4.002869 (lr=1.6210e-03) (hash(x)=38610920)
312
- 2580 train 3.924541 (lr=1.6094e-03) (hash(x)=38333499)
313
- 2590 train 4.000230 (lr=1.5979e-03) (hash(x)=41982736)
314
- 2600 val loss 4.0374
315
- 2600 val perplexity 56.6800
316
- 2600 train 3.979413 (lr=1.5863e-03) (hash(x)=37724702)
317
- 2610 train 3.916890 (lr=1.5747e-03) (hash(x)=39564630)
318
- 2620 train 3.860455 (lr=1.5632e-03) (hash(x)=38465803)
319
- 2630 train 3.785778 (lr=1.5516e-03) (hash(x)=36955007)
320
- 2640 train 3.820798 (lr=1.5400e-03) (hash(x)=32994568)
321
- 2650 train 4.001617 (lr=1.5285e-03) (hash(x)=41165765)
322
- 2660 train 4.017964 (lr=1.5170e-03) (hash(x)=42934000)
323
- 2670 train 4.212893 (lr=1.5054e-03) (hash(x)=42726316)
324
- 2680 train 4.099064 (lr=1.4939e-03) (hash(x)=35880318)
325
- 2690 train 3.982738 (lr=1.4824e-03) (hash(x)=41747329)
326
- 2700 val loss 4.0287
327
- 2700 val perplexity 56.1880
328
- 2700 train 4.103619 (lr=1.4709e-03) (hash(x)=40259630)
329
- 2710 train 4.233391 (lr=1.4594e-03) (hash(x)=39895798)
330
- 2720 train 3.959761 (lr=1.4480e-03) (hash(x)=36146683)
331
- 2730 train 3.940617 (lr=1.4365e-03) (hash(x)=36181984)
332
- 2740 train 3.921445 (lr=1.4251e-03) (hash(x)=43700349)
333
- 2750 train 3.987399 (lr=1.4137e-03) (hash(x)=41610597)
334
- 2760 train 3.975067 (lr=1.4023e-03) (hash(x)=31183639)
335
- 2770 train 3.970547 (lr=1.3909e-03) (hash(x)=37722489)
336
- 2780 train 3.913685 (lr=1.3795e-03) (hash(x)=47290688)
337
- 2790 train 3.952569 (lr=1.3682e-03) (hash(x)=41205574)
338
- 2800 val loss 4.0120
339
- 2800 val perplexity 55.2587
340
- 2800 train 3.805958 (lr=1.3569e-03) (hash(x)=40257962)
341
- 2810 train 3.780186 (lr=1.3456e-03) (hash(x)=39529014)
342
- 2820 train 3.799081 (lr=1.3343e-03) (hash(x)=42244749)
343
- 2830 train 3.854383 (lr=1.3230e-03) (hash(x)=32820090)
344
- 2840 train 3.813854 (lr=1.3118e-03) (hash(x)=40315769)
345
- 2850 train 4.049391 (lr=1.3006e-03) (hash(x)=41524462)
346
- 2860 train 4.043042 (lr=1.2894e-03) (hash(x)=38365734)
347
- 2870 train 4.177433 (lr=1.2783e-03) (hash(x)=37682602)
348
- 2880 train 4.027946 (lr=1.2671e-03) (hash(x)=39162991)
349
- 2890 train 3.886543 (lr=1.2560e-03) (hash(x)=33316384)
350
- 2900 val loss 3.9878
351
- 2900 val perplexity 53.9357
352
- 2900 train 3.934094 (lr=1.2450e-03) (hash(x)=37271132)
353
- 2910 train 4.123441 (lr=1.2339e-03) (hash(x)=35586242)
354
- 2920 train 4.012568 (lr=1.2229e-03) (hash(x)=33320586)
355
- 2930 train 4.025620 (lr=1.2119e-03) (hash(x)=43531361)
356
- 2940 train 3.913611 (lr=1.2010e-03) (hash(x)=37368286)
357
- 2950 train 3.950181 (lr=1.1901e-03) (hash(x)=40363394)
358
- 2960 train 3.918155 (lr=1.1792e-03) (hash(x)=42253792)
359
- 2970 train 3.860552 (lr=1.1684e-03) (hash(x)=38072598)
360
- 2980 train 3.932182 (lr=1.1576e-03) (hash(x)=41470557)
361
- 2990 train 3.858858 (lr=1.1468e-03) (hash(x)=42600033)
362
- 3000 val loss 3.9912
363
- 3000 val perplexity 54.1222
364
- 3000 train 3.814529 (lr=1.1361e-03) (hash(x)=46890983)
365
- 3010 train 3.680441 (lr=1.1254e-03) (hash(x)=37986759)
366
- 3020 train 3.801029 (lr=1.1147e-03) (hash(x)=36270703)
367
- 3030 train 4.068460 (lr=1.1041e-03) (hash(x)=38228599)
368
- 3040 train 3.918266 (lr=1.0935e-03) (hash(x)=44344296)
369
- 3050 train 4.007051 (lr=1.0830e-03) (hash(x)=39962297)
370
- 3060 train 3.984616 (lr=1.0725e-03) (hash(x)=39817394)
371
- 3070 train 3.982054 (lr=1.0620e-03) (hash(x)=41763868)
372
- 3080 train 4.066197 (lr=1.0516e-03) (hash(x)=42343051)
373
- 3090 train 4.064052 (lr=1.0412e-03) (hash(x)=39204893)
374
- 3100 val loss 3.9691
375
- 3100 val perplexity 52.9344
376
- 3100 train 3.964641 (lr=1.0309e-03) (hash(x)=39353599)
377
- 3110 train 4.035765 (lr=1.0207e-03) (hash(x)=36118073)
378
- 3120 train 3.741359 (lr=1.0104e-03) (hash(x)=44070271)
379
- 3130 train 3.763264 (lr=1.0002e-03) (hash(x)=43368151)
380
- 3140 train 3.840649 (lr=9.9011e-04) (hash(x)=40612203)
381
- 3150 train 3.977300 (lr=9.8002e-04) (hash(x)=39598447)
382
- 3160 train 4.026806 (lr=9.6999e-04) (hash(x)=43897682)
383
- 3170 train 4.014703 (lr=9.6000e-04) (hash(x)=39029243)
384
- 3180 train 4.017778 (lr=9.5007e-04) (hash(x)=38586684)
385
- 3190 train 3.969115 (lr=9.4019e-04) (hash(x)=39299115)
386
- 3200 val loss 3.9483
387
- 3200 val perplexity 51.8474
388
- 3200 train 3.997803 (lr=9.3036e-04) (hash(x)=41064949)
389
- 3210 train 3.983777 (lr=9.2058e-04) (hash(x)=41752822)
390
- 3220 train 3.991479 (lr=9.1085e-04) (hash(x)=37484805)
391
- 3230 train 3.953899 (lr=9.0118e-04) (hash(x)=40514919)
392
- 3240 train 4.007973 (lr=8.9157e-04) (hash(x)=42440343)
393
- 3250 train 3.910555 (lr=8.8201e-04) (hash(x)=32464136)
394
- 3260 train 3.926669 (lr=8.7251e-04) (hash(x)=47270558)
395
- 3270 train 3.866263 (lr=8.6307e-04) (hash(x)=39307546)
396
- 3280 train 3.903190 (lr=8.5368e-04) (hash(x)=40410299)
397
- 3290 train 3.904997 (lr=8.4435e-04) (hash(x)=40541497)
398
- 3300 val loss 3.9433
399
- 3300 val perplexity 51.5865
400
- 3300 train 3.800336 (lr=8.3508e-04) (hash(x)=38089139)
401
- 3310 train 3.837263 (lr=8.2588e-04) (hash(x)=42006576)
402
- 3320 train 3.929136 (lr=8.1673e-04) (hash(x)=40422556)
403
- 3330 train 3.721211 (lr=8.0764e-04) (hash(x)=39021643)
404
- 3340 train 3.982839 (lr=7.9862e-04) (hash(x)=40434305)
405
- 3350 train 4.005759 (lr=7.8966e-04) (hash(x)=40833559)
406
- 3360 train 4.008427 (lr=7.8076e-04) (hash(x)=40045231)
407
- 3370 train 3.968306 (lr=7.7192e-04) (hash(x)=34668317)
408
- 3380 train 3.967199 (lr=7.6315e-04) (hash(x)=42738568)
409
- 3390 train 4.063785 (lr=7.5445e-04) (hash(x)=38980166)
410
- 3400 val loss 3.9229
411
- 3400 val perplexity 50.5446
412
- 3400 train 3.973525 (lr=7.4581e-04) (hash(x)=43878176)
413
- 3410 train 4.014478 (lr=7.3724e-04) (hash(x)=38201991)
414
- 3420 train 3.991537 (lr=7.2874e-04) (hash(x)=34905889)
415
- 3430 train 3.991725 (lr=7.2030e-04) (hash(x)=41680405)
416
- 3440 train 3.953187 (lr=7.1193e-04) (hash(x)=39948184)
417
- 3450 train 4.002751 (lr=7.0363e-04) (hash(x)=33623728)
418
- 3460 train 3.877424 (lr=6.9541e-04) (hash(x)=41027647)
419
- 3470 train 3.910935 (lr=6.8725e-04) (hash(x)=42779482)
420
- 3480 train 3.923709 (lr=6.7916e-04) (hash(x)=41579288)
421
- 3490 train 3.778706 (lr=6.7114e-04) (hash(x)=37174846)
422
- 3500 val loss 3.9204
423
- 3500 val perplexity 50.4205
424
- 3500 train 3.658134 (lr=6.6320e-04) (hash(x)=37082090)
425
- 3510 train 3.853891 (lr=6.5533e-04) (hash(x)=43131340)
426
- 3520 train 3.772217 (lr=6.4753e-04) (hash(x)=42457250)
427
- 3530 train 3.998982 (lr=6.3981e-04) (hash(x)=39504492)
428
- 3540 train 3.943151 (lr=6.3216e-04) (hash(x)=36225997)
429
- 3550 train 4.074574 (lr=6.2458e-04) (hash(x)=40912087)
430
- 3560 train 3.912014 (lr=6.1708e-04) (hash(x)=41545925)
431
- 3570 train 4.000611 (lr=6.0966e-04) (hash(x)=32553193)
432
- 3580 train 3.847172 (lr=6.0231e-04) (hash(x)=38799856)
433
- 3590 train 3.893481 (lr=5.9504e-04) (hash(x)=40772075)
434
- 3600 val loss 3.9020
435
- 3600 val perplexity 49.5003
436
- 3600 train 3.936508 (lr=5.8785e-04) (hash(x)=39299903)
437
- 3610 train 3.983123 (lr=5.8074e-04) (hash(x)=39503247)
438
- 3620 train 3.876462 (lr=5.7370e-04) (hash(x)=38730668)
439
- 3630 train 3.897536 (lr=5.6675e-04) (hash(x)=40503276)
440
- 3640 train 3.900669 (lr=5.5987e-04) (hash(x)=36504573)
441
- 3650 train 3.922709 (lr=5.5308e-04) (hash(x)=44903075)
442
- 3660 train 3.828653 (lr=5.4636e-04) (hash(x)=33800148)
443
- 3670 train 3.741939 (lr=5.3973e-04) (hash(x)=39404541)
444
- 3680 train 3.687747 (lr=5.3318e-04) (hash(x)=44777432)
445
- 3690 train 3.784186 (lr=5.2671e-04) (hash(x)=45649913)
446
- 3700 val loss 3.9036
447
- 3700 val perplexity 49.5806
448
- 3700 train 3.911320 (lr=5.2033e-04) (hash(x)=33812537)
449
- 3710 train 4.057878 (lr=5.1402e-04) (hash(x)=36161222)
450
- 3720 train 3.933919 (lr=5.0780e-04) (hash(x)=43690616)
451
- 3730 train 3.941305 (lr=5.0167e-04) (hash(x)=35462313)
452
- 3740 train 4.237491 (lr=4.9562e-04) (hash(x)=44674254)
453
- 3750 train 3.919392 (lr=4.8965e-04) (hash(x)=44276297)
454
- 3760 train 3.964192 (lr=4.8377e-04) (hash(x)=38492178)
455
- 3770 train 3.966371 (lr=4.7798e-04) (hash(x)=44347531)
456
- 3780 train 3.881049 (lr=4.7227e-04) (hash(x)=33792059)
457
- 3790 train 3.978416 (lr=4.6665e-04) (hash(x)=38549318)
458
- 3800 val loss 3.8848
459
- 3800 val perplexity 48.6579
460
- 3800 train 3.856662 (lr=4.6112e-04) (hash(x)=41437448)
461
- 3810 train 3.760668 (lr=4.5567e-04) (hash(x)=36656768)
462
- 3820 train 3.922438 (lr=4.5031e-04) (hash(x)=46387893)
463
- 3830 train 3.918259 (lr=4.4504e-04) (hash(x)=44312129)
464
- 3840 train 3.843921 (lr=4.3986e-04) (hash(x)=40341896)
465
- 3850 train 3.894210 (lr=4.3477e-04) (hash(x)=43589160)
466
- 3860 train 3.875674 (lr=4.2977e-04) (hash(x)=40388945)
467
- 3870 train 3.770464 (lr=4.2486e-04) (hash(x)=33336679)
468
- 3880 train 3.864926 (lr=4.2004e-04) (hash(x)=37643311)
469
- 3890 train 3.888263 (lr=4.1530e-04) (hash(x)=38272927)
470
- 3900 val loss 3.8792
471
- 3900 val perplexity 48.3860
472
- 3900 train 3.833359 (lr=4.1066e-04) (hash(x)=41925748)
473
- 3910 train 3.923103 (lr=4.0611e-04) (hash(x)=38377253)
474
- 3920 train 3.932636 (lr=4.0166e-04) (hash(x)=41616611)
475
- 3930 train 3.854330 (lr=3.9729e-04) (hash(x)=32301827)
476
- 3940 train 3.906909 (lr=3.9302e-04) (hash(x)=47697363)
477
- 3950 train 3.858984 (lr=3.8884e-04) (hash(x)=37867767)
478
- 3960 train 3.860146 (lr=3.8475e-04) (hash(x)=37187295)
479
- 3970 train 3.915128 (lr=3.8076e-04) (hash(x)=41952752)
480
- 3980 train 3.885388 (lr=3.7685e-04) (hash(x)=38358660)
481
- 3990 train 3.756501 (lr=3.7305e-04) (hash(x)=40207878)
482
- 4000 val loss 3.8731
483
- 4000 val perplexity 48.0901
484
- 4000 train 3.842866 (lr=3.6933e-04) (hash(x)=39134015)
485
- 4010 train 3.912690 (lr=3.6572e-04) (hash(x)=38313135)
486
- 4020 train 3.843382 (lr=3.6219e-04) (hash(x)=40710513)
487
- 4030 train 3.969596 (lr=3.5876e-04) (hash(x)=35530165)
488
- 4040 train 3.895833 (lr=3.5543e-04) (hash(x)=39494066)
489
- 4050 train 3.878303 (lr=3.5219e-04) (hash(x)=40861777)
490
- 4060 train 3.866653 (lr=3.4905e-04) (hash(x)=36151939)
491
- 4070 train 3.827314 (lr=3.4600e-04) (hash(x)=39008241)
492
- 4080 train 3.916543 (lr=3.4305e-04) (hash(x)=40395746)
493
- 4090 train 3.953969 (lr=3.4019e-04) (hash(x)=38855480)
494
- 4100 val loss 3.8613
495
- 4100 val perplexity 47.5253
496
- 4100 train 3.893692 (lr=3.3744e-04) (hash(x)=45013254)
497
- 4110 train 3.895224 (lr=3.3477e-04) (hash(x)=39934818)
498
- 4120 train 3.898216 (lr=3.3221e-04) (hash(x)=33417705)
499
- 4130 train 3.835839 (lr=3.2974e-04) (hash(x)=41267978)
500
- 4140 train 3.829896 (lr=3.2737e-04) (hash(x)=38961609)
501
- 4150 train 3.831160 (lr=3.2510e-04) (hash(x)=42173878)
502
- 4160 train 3.794766 (lr=3.2292e-04) (hash(x)=36867405)
503
- 4170 train 3.773464 (lr=3.2085e-04) (hash(x)=38991954)
504
- 4180 train 3.835734 (lr=3.1887e-04) (hash(x)=39032592)
505
- 4190 train 3.812086 (lr=3.1699e-04) (hash(x)=61329402)
506
- 4200 val loss 3.8609
507
- 4200 val perplexity 47.5074
508
- 4200 train 3.824891 (lr=3.1520e-04) (hash(x)=40455200)
509
- 4210 train 3.854889 (lr=3.1352e-04) (hash(x)=40913417)
510
- 4220 train 3.887470 (lr=3.1193e-04) (hash(x)=38809704)
511
- 4230 train 3.867836 (lr=3.1044e-04) (hash(x)=39560204)
512
- 4240 train 3.873663 (lr=3.0905e-04) (hash(x)=41496549)
513
- 4250 train 3.912349 (lr=3.0776e-04) (hash(x)=33994768)
514
- 4260 train 3.898098 (lr=3.0657e-04) (hash(x)=39658687)
515
- 4270 train 3.774222 (lr=3.0548e-04) (hash(x)=42583064)
516
- 4280 train 3.840446 (lr=3.0449e-04) (hash(x)=40635811)
517
- 4290 train 3.738791 (lr=3.0359e-04) (hash(x)=35313841)
518
- 4300 val loss 3.8585
519
- 4300 val perplexity 47.3941
520
- 4300 train 3.921301 (lr=3.0280e-04) (hash(x)=39332245)
521
- 4310 train 3.818811 (lr=3.0210e-04) (hash(x)=43732999)
522
- 4320 train 3.821961 (lr=3.0150e-04) (hash(x)=40831835)
523
- 4330 train 3.961745 (lr=3.0101e-04) (hash(x)=41813368)
524
- 4340 train 3.863962 (lr=3.0061e-04) (hash(x)=36952586)
525
- 4350 train 3.885816 (lr=3.0031e-04) (hash(x)=42470745)
526
- 4360 train 3.813696 (lr=3.0011e-04) (hash(x)=39581495)
527
- 4370 train 4.003681 (lr=3.0001e-04) (hash(x)=35914538)
528
- 4374 val loss 3.8514
529
- 4374 val perplexity 47.0584
 
1
  max_steps: 4375
2
+ 0 val loss 11.2835
3
+ 0 val perplexity 79495.7422
4
+ 0 train 11.290979 (lr=4.1958e-06) (hash(x)=44992657)
5
+ 10 train 9.967012 (lr=4.6154e-05) (hash(x)=33468021)
6
+ 20 train 9.386133 (lr=8.8112e-05) (hash(x)=40941803)
7
+ 30 train 8.686039 (lr=1.3007e-04) (hash(x)=36715902)
8
+ 40 train 7.944033 (lr=1.7203e-04) (hash(x)=32710993)
9
+ 50 train 7.708426 (lr=2.1399e-04) (hash(x)=43839896)
10
+ 60 train 7.768104 (lr=2.5594e-04) (hash(x)=40675468)
11
+ 70 train 7.454903 (lr=2.9790e-04) (hash(x)=34592631)
12
+ 80 train 7.425917 (lr=3.3986e-04) (hash(x)=44444845)
13
+ 90 train 7.210289 (lr=3.8182e-04) (hash(x)=41965258)
14
+ 100 val loss 7.0900
15
+ 100 val perplexity 1199.8834
16
+ 100 train 6.943618 (lr=4.2378e-04) (hash(x)=41284750)
17
+ 110 train 6.879869 (lr=4.6573e-04) (hash(x)=41118734)
18
+ 120 train 6.713333 (lr=5.0769e-04) (hash(x)=37537547)
19
+ 130 train 6.566779 (lr=5.4965e-04) (hash(x)=43625179)
20
+ 140 train 6.535774 (lr=5.9161e-04) (hash(x)=41940760)
21
+ 150 train 6.596593 (lr=6.3357e-04) (hash(x)=39210431)
22
+ 160 train 6.543444 (lr=6.7552e-04) (hash(x)=41128294)
23
+ 170 train 6.445559 (lr=7.1748e-04) (hash(x)=41590227)
24
+ 180 train 6.361904 (lr=7.5944e-04) (hash(x)=38084871)
25
+ 190 train 6.270483 (lr=8.0140e-04) (hash(x)=34534333)
26
+ 200 val loss 6.1927
27
+ 200 val perplexity 489.1622
28
+ 200 train 6.234326 (lr=8.4336e-04) (hash(x)=41614299)
29
+ 210 train 6.105705 (lr=8.8531e-04) (hash(x)=35551597)
30
+ 220 train 6.018891 (lr=9.2727e-04) (hash(x)=44615432)
31
+ 230 train 6.168056 (lr=9.6923e-04) (hash(x)=37078768)
32
+ 240 train 5.860798 (lr=1.0112e-03) (hash(x)=40644078)
33
+ 250 train 5.914165 (lr=1.0531e-03) (hash(x)=44636066)
34
+ 260 train 5.808303 (lr=1.0951e-03) (hash(x)=43247952)
35
+ 270 train 5.739007 (lr=1.1371e-03) (hash(x)=37989370)
36
+ 280 train 5.688848 (lr=1.1790e-03) (hash(x)=36229730)
37
+ 290 train 5.625371 (lr=1.2210e-03) (hash(x)=35032996)
38
+ 300 val loss 5.7157
39
+ 300 val perplexity 303.5841
40
+ 300 train 5.452739 (lr=1.2629e-03) (hash(x)=33868636)
41
+ 310 train 5.389076 (lr=1.3049e-03) (hash(x)=39782705)
42
+ 320 train 5.493956 (lr=1.3469e-03) (hash(x)=43525765)
43
+ 330 train 5.583786 (lr=1.3888e-03) (hash(x)=38074926)
44
+ 340 train 5.623821 (lr=1.4308e-03) (hash(x)=50125702)
45
+ 350 train 5.601887 (lr=1.4727e-03) (hash(x)=35062222)
46
+ 360 train 5.536216 (lr=1.5147e-03) (hash(x)=39671001)
47
+ 370 train 5.570259 (lr=1.5566e-03) (hash(x)=35748364)
48
+ 380 train 5.366518 (lr=1.5986e-03) (hash(x)=39962666)
49
+ 390 train 5.338472 (lr=1.6406e-03) (hash(x)=39213360)
50
+ 400 val loss 5.3521
51
+ 400 val perplexity 211.0562
52
+ 400 train 5.336286 (lr=1.6825e-03) (hash(x)=37919584)
53
+ 410 train 5.288481 (lr=1.7245e-03) (hash(x)=42001527)
54
+ 420 train 5.276488 (lr=1.7664e-03) (hash(x)=33940005)
55
+ 430 train 5.319012 (lr=1.8084e-03) (hash(x)=41749217)
56
+ 440 train 5.209821 (lr=1.8503e-03) (hash(x)=45219739)
57
+ 450 train 5.202966 (lr=1.8923e-03) (hash(x)=38515090)
58
+ 460 train 5.107697 (lr=1.9343e-03) (hash(x)=37518607)
59
+ 470 train 4.909622 (lr=1.9762e-03) (hash(x)=46830716)
60
+ 480 train 5.021257 (lr=2.0182e-03) (hash(x)=49262598)
61
+ 490 train 4.841714 (lr=2.0601e-03) (hash(x)=41370559)
62
+ 500 val loss 5.1417
63
+ 500 val perplexity 171.0144
64
+ 500 train 4.923532 (lr=2.1021e-03) (hash(x)=35237459)
65
+ 510 train 5.049324 (lr=2.1441e-03) (hash(x)=35070579)
66
+ 520 train 5.089362 (lr=2.1860e-03) (hash(x)=37355110)
67
+ 530 train 5.168279 (lr=2.2280e-03) (hash(x)=39398044)
68
+ 540 train 5.100167 (lr=2.2699e-03) (hash(x)=39156676)
69
+ 550 train 5.044903 (lr=2.3119e-03) (hash(x)=38553909)
70
+ 560 train 5.109780 (lr=2.3538e-03) (hash(x)=36555371)
71
+ 570 train 5.016411 (lr=2.3958e-03) (hash(x)=42048991)
72
+ 580 train 4.954191 (lr=2.4378e-03) (hash(x)=37359399)
73
+ 590 train 4.982177 (lr=2.4797e-03) (hash(x)=39254663)
74
+ 600 val loss 4.9401
75
+ 600 val perplexity 139.7894
76
+ 600 train 4.951568 (lr=2.5217e-03) (hash(x)=38982539)
77
+ 610 train 4.876301 (lr=2.5636e-03) (hash(x)=35859145)
78
+ 620 train 4.862639 (lr=2.6056e-03) (hash(x)=35593542)
79
+ 630 train 4.757588 (lr=2.6476e-03) (hash(x)=39124418)
80
+ 640 train 4.755217 (lr=2.6895e-03) (hash(x)=39107374)
81
+ 650 train 4.696900 (lr=2.7315e-03) (hash(x)=34529151)
82
+ 660 train 4.609609 (lr=2.7734e-03) (hash(x)=39483931)
83
+ 670 train 4.610886 (lr=2.8154e-03) (hash(x)=43008010)
84
+ 680 train 4.519474 (lr=2.8573e-03) (hash(x)=33469328)
85
+ 690 train 4.562234 (lr=2.8993e-03) (hash(x)=35025253)
86
+ 700 val loss 4.8037
87
+ 700 val perplexity 121.9615
88
+ 700 train 4.525724 (lr=2.9413e-03) (hash(x)=43280188)
89
+ 710 train 4.817435 (lr=2.9832e-03) (hash(x)=40999263)
90
+ 720 train 4.784417 (lr=3.0000e-03) (hash(x)=38304742)
91
+ 730 train 4.795308 (lr=2.9999e-03) (hash(x)=34705188)
92
+ 740 train 4.757912 (lr=2.9997e-03) (hash(x)=40028964)
93
+ 750 train 4.705203 (lr=2.9994e-03) (hash(x)=39051591)
94
+ 760 train 4.748432 (lr=2.9990e-03) (hash(x)=41286198)
95
+ 770 train 4.693416 (lr=2.9985e-03) (hash(x)=44512967)
96
+ 780 train 4.718927 (lr=2.9979e-03) (hash(x)=45171627)
97
+ 790 train 4.684438 (lr=2.9972e-03) (hash(x)=46345673)
98
+ 800 val loss 4.6812
99
+ 800 val perplexity 107.8989
100
+ 800 train 4.642310 (lr=2.9964e-03) (hash(x)=35529545)
101
+ 810 train 4.542624 (lr=2.9955e-03) (hash(x)=42321793)
102
+ 820 train 4.568599 (lr=2.9945e-03) (hash(x)=49327569)
103
+ 830 train 4.631002 (lr=2.9934e-03) (hash(x)=41325057)
104
+ 840 train 4.571959 (lr=2.9922e-03) (hash(x)=37305614)
105
+ 850 train 4.526261 (lr=2.9909e-03) (hash(x)=38000800)
106
+ 860 train 4.653804 (lr=2.9896e-03) (hash(x)=44047697)
107
+ 870 train 4.645876 (lr=2.9881e-03) (hash(x)=41576206)
108
+ 880 train 4.619092 (lr=2.9865e-03) (hash(x)=44685458)
109
+ 890 train 4.603628 (lr=2.9848e-03) (hash(x)=40398597)
110
+ 900 val loss 4.5726
111
+ 900 val perplexity 96.7920
112
+ 900 train 4.538272 (lr=2.9830e-03) (hash(x)=39312673)
113
+ 910 train 4.555772 (lr=2.9811e-03) (hash(x)=38881875)
114
+ 920 train 4.588659 (lr=2.9792e-03) (hash(x)=39140166)
115
+ 930 train 4.481109 (lr=2.9771e-03) (hash(x)=39075781)
116
+ 940 train 4.498732 (lr=2.9749e-03) (hash(x)=46887071)
117
+ 950 train 4.491267 (lr=2.9726e-03) (hash(x)=39740603)
118
+ 960 train 4.538983 (lr=2.9703e-03) (hash(x)=39590268)
119
+ 970 train 4.495248 (lr=2.9678e-03) (hash(x)=43580052)
120
+ 980 train 4.424994 (lr=2.9652e-03) (hash(x)=40226735)
121
+ 990 train 4.537102 (lr=2.9626e-03) (hash(x)=40723896)
122
+ 1000 val loss 4.4893
123
+ 1000 val perplexity 89.0589
124
+ 1000 train 4.491249 (lr=2.9598e-03) (hash(x)=39408050)
125
+ 1010 train 4.680374 (lr=2.9570e-03) (hash(x)=32247950)
126
+ 1020 train 4.570291 (lr=2.9540e-03) (hash(x)=39049938)
127
+ 1030 train 4.537564 (lr=2.9510e-03) (hash(x)=37453736)
128
+ 1040 train 4.439554 (lr=2.9478e-03) (hash(x)=36649666)
129
+ 1050 train 4.504604 (lr=2.9446e-03) (hash(x)=41404097)
130
+ 1060 train 4.494673 (lr=2.9412e-03) (hash(x)=40701599)
131
+ 1070 train 4.509439 (lr=2.9378e-03) (hash(x)=41787393)
132
+ 1080 train 4.399241 (lr=2.9343e-03) (hash(x)=42325465)
133
+ 1090 train 4.430777 (lr=2.9307e-03) (hash(x)=45018925)
134
+ 1100 val loss 4.4258
135
+ 1100 val perplexity 83.5794
136
+ 1100 train 4.454267 (lr=2.9270e-03) (hash(x)=53751982)
137
+ 1110 train 4.375455 (lr=2.9231e-03) (hash(x)=43118376)
138
+ 1120 train 4.406147 (lr=2.9192e-03) (hash(x)=41489942)
139
+ 1130 train 4.437531 (lr=2.9152e-03) (hash(x)=48020487)
140
+ 1140 train 4.292471 (lr=2.9112e-03) (hash(x)=43266072)
141
+ 1150 train 4.486030 (lr=2.9070e-03) (hash(x)=41790132)
142
+ 1160 train 4.459498 (lr=2.9027e-03) (hash(x)=33739090)
143
+ 1170 train 4.435995 (lr=2.8983e-03) (hash(x)=43885030)
144
+ 1180 train 4.382488 (lr=2.8939e-03) (hash(x)=39046714)
145
+ 1190 train 4.415123 (lr=2.8893e-03) (hash(x)=44387023)
146
+ 1200 val loss 4.3627
147
+ 1200 val perplexity 78.4702
148
+ 1200 train 4.278837 (lr=2.8847e-03) (hash(x)=36667141)
149
+ 1210 train 4.354375 (lr=2.8800e-03) (hash(x)=45487179)
150
+ 1220 train 4.327986 (lr=2.8751e-03) (hash(x)=43456413)
151
+ 1230 train 4.258804 (lr=2.8702e-03) (hash(x)=42473632)
152
+ 1240 train 4.341355 (lr=2.8652e-03) (hash(x)=42966315)
153
+ 1250 train 4.185344 (lr=2.8601e-03) (hash(x)=42930524)
154
+ 1260 train 4.330383 (lr=2.8550e-03) (hash(x)=43467676)
155
+ 1270 train 4.257578 (lr=2.8497e-03) (hash(x)=40872580)
156
+ 1280 train 4.329517 (lr=2.8443e-03) (hash(x)=40619157)
157
+ 1290 train 4.345195 (lr=2.8389e-03) (hash(x)=44406165)
158
+ 1300 val loss 4.3248
159
+ 1300 val perplexity 75.5501
160
+ 1300 train 4.308760 (lr=2.8333e-03) (hash(x)=40808029)
161
+ 1310 train 4.474747 (lr=2.8277e-03) (hash(x)=46125736)
162
+ 1320 train 4.249431 (lr=2.8220e-03) (hash(x)=41698487)
163
+ 1330 train 4.280139 (lr=2.8162e-03) (hash(x)=39302878)
164
+ 1340 train 4.392772 (lr=2.8103e-03) (hash(x)=41826369)
165
+ 1350 train 4.212983 (lr=2.8044e-03) (hash(x)=37061413)
166
+ 1360 train 4.337604 (lr=2.7983e-03) (hash(x)=40770974)
167
+ 1370 train 4.249659 (lr=2.7922e-03) (hash(x)=38738718)
168
+ 1380 train 4.246631 (lr=2.7860e-03) (hash(x)=34961558)
169
+ 1390 train 4.296053 (lr=2.7797e-03) (hash(x)=38559342)
170
+ 1400 val loss 4.3069
171
+ 1400 val perplexity 74.2111
172
+ 1400 train 4.249496 (lr=2.7733e-03) (hash(x)=38840020)
173
+ 1410 train 4.404141 (lr=2.7668e-03) (hash(x)=44448100)
174
+ 1420 train 4.456838 (lr=2.7603e-03) (hash(x)=39199838)
175
+ 1430 train 4.253977 (lr=2.7536e-03) (hash(x)=41669873)
176
+ 1440 train 4.384237 (lr=2.7469e-03) (hash(x)=53253836)
177
+ 1450 train 4.307827 (lr=2.7401e-03) (hash(x)=41320631)
178
+ 1460 train 4.223444 (lr=2.7333e-03) (hash(x)=48462803)
179
+ 1470 train 4.264635 (lr=2.7263e-03) (hash(x)=42356864)
180
+ 1480 train 4.316753 (lr=2.7193e-03) (hash(x)=41118068)
181
+ 1490 train 4.232095 (lr=2.7121e-03) (hash(x)=37242854)
182
+ 1500 val loss 4.2585
183
+ 1500 val perplexity 70.7028
184
+ 1500 train 4.271239 (lr=2.7050e-03) (hash(x)=40571031)
185
+ 1510 train 4.224005 (lr=2.6977e-03) (hash(x)=35384608)
186
+ 1520 train 4.261134 (lr=2.6903e-03) (hash(x)=35341056)
187
+ 1530 train 4.147518 (lr=2.6829e-03) (hash(x)=36471779)
188
+ 1540 train 4.178072 (lr=2.6754e-03) (hash(x)=40356153)
189
+ 1550 train 4.222429 (lr=2.6678e-03) (hash(x)=40133377)
190
+ 1560 train 4.149037 (lr=2.6602e-03) (hash(x)=33722160)
191
+ 1570 train 4.306149 (lr=2.6525e-03) (hash(x)=39555295)
192
+ 1580 train 4.206088 (lr=2.6447e-03) (hash(x)=35950666)
193
+ 1590 train 4.360620 (lr=2.6368e-03) (hash(x)=36528535)
194
+ 1600 val loss 4.2378
195
+ 1600 val perplexity 69.2551
196
+ 1600 train 4.245578 (lr=2.6289e-03) (hash(x)=42613017)
197
+ 1610 train 4.390632 (lr=2.6208e-03) (hash(x)=42845965)
198
+ 1620 train 4.270403 (lr=2.6128e-03) (hash(x)=36137074)
199
+ 1630 train 4.234583 (lr=2.6046e-03) (hash(x)=37560569)
200
+ 1640 train 4.336964 (lr=2.5964e-03) (hash(x)=43680614)
201
+ 1650 train 4.189793 (lr=2.5881e-03) (hash(x)=37281218)
202
+ 1660 train 4.212651 (lr=2.5797e-03) (hash(x)=36165222)
203
+ 1670 train 4.173405 (lr=2.5713e-03) (hash(x)=50222513)
204
+ 1680 train 4.059635 (lr=2.5628e-03) (hash(x)=35239173)
205
+ 1690 train 4.114145 (lr=2.5542e-03) (hash(x)=40287349)
206
+ 1700 val loss 4.2444
207
+ 1700 val perplexity 69.7117
208
+ 1700 train 4.296540 (lr=2.5455e-03) (hash(x)=37444780)
209
+ 1710 train 4.283964 (lr=2.5368e-03) (hash(x)=38962723)
210
+ 1720 train 4.285658 (lr=2.5281e-03) (hash(x)=33882305)
211
+ 1730 train 4.265724 (lr=2.5192e-03) (hash(x)=36501540)
212
+ 1740 train 4.257672 (lr=2.5103e-03) (hash(x)=38723266)
213
+ 1750 train 4.192972 (lr=2.5014e-03) (hash(x)=38772789)
214
+ 1760 train 4.271956 (lr=2.4924e-03) (hash(x)=37707898)
215
+ 1770 train 4.136791 (lr=2.4833e-03) (hash(x)=30529327)
216
+ 1780 train 4.219057 (lr=2.4741e-03) (hash(x)=37885464)
217
+ 1790 train 4.232553 (lr=2.4649e-03) (hash(x)=39013967)
218
+ 1800 val loss 4.2003
219
+ 1800 val perplexity 66.7062
220
+ 1800 train 4.269434 (lr=2.4556e-03) (hash(x)=34906955)
221
+ 1810 train 4.146118 (lr=2.4463e-03) (hash(x)=40790682)
222
+ 1820 train 4.017489 (lr=2.4369e-03) (hash(x)=39912223)
223
+ 1830 train 4.142346 (lr=2.4275e-03) (hash(x)=39304486)
224
+ 1840 train 4.115877 (lr=2.4180e-03) (hash(x)=37247975)
225
+ 1850 train 4.111137 (lr=2.4084e-03) (hash(x)=41340614)
226
+ 1860 train 4.262663 (lr=2.3988e-03) (hash(x)=39924627)
227
+ 1870 train 4.246150 (lr=2.3891e-03) (hash(x)=40235993)
228
+ 1880 train 4.131333 (lr=2.3794e-03) (hash(x)=37693476)
229
+ 1890 train 4.249671 (lr=2.3696e-03) (hash(x)=40449864)
230
+ 1900 val loss 4.1751
231
+ 1900 val perplexity 65.0474
232
+ 1900 train 4.158910 (lr=2.3598e-03) (hash(x)=37907749)
233
+ 1910 train 4.127439 (lr=2.3499e-03) (hash(x)=34547901)
234
+ 1920 train 4.159626 (lr=2.3400e-03) (hash(x)=37771788)
235
+ 1930 train 4.031431 (lr=2.3300e-03) (hash(x)=36049454)
236
+ 1940 train 4.125335 (lr=2.3200e-03) (hash(x)=29564336)
237
+ 1950 train 4.119464 (lr=2.3099e-03) (hash(x)=41689281)
238
+ 1960 train 4.030754 (lr=2.2998e-03) (hash(x)=41976699)
239
+ 1970 train 4.233627 (lr=2.2896e-03) (hash(x)=41538621)
240
+ 1980 train 4.111691 (lr=2.2793e-03) (hash(x)=36490902)
241
+ 1990 train 4.140900 (lr=2.2691e-03) (hash(x)=42739315)
242
+ 2000 val loss 4.1559
243
+ 2000 val perplexity 63.8092
244
+ 2000 train 4.154397 (lr=2.2588e-03) (hash(x)=38600074)
245
+ 2010 train 4.173006 (lr=2.2484e-03) (hash(x)=34555488)
246
+ 2020 train 4.189758 (lr=2.2380e-03) (hash(x)=37227345)
247
+ 2030 train 4.141366 (lr=2.2275e-03) (hash(x)=38172386)
248
+ 2040 train 4.252366 (lr=2.2170e-03) (hash(x)=39340786)
249
+ 2050 train 4.096076 (lr=2.2065e-03) (hash(x)=37237298)
250
+ 2060 train 4.336809 (lr=2.1959e-03) (hash(x)=35894782)
251
+ 2070 train 4.156165 (lr=2.1853e-03) (hash(x)=44161861)
252
+ 2080 train 4.170679 (lr=2.1746e-03) (hash(x)=38062558)
253
+ 2090 train 4.026491 (lr=2.1639e-03) (hash(x)=38608301)
254
+ 2100 val loss 4.1336
255
+ 2100 val perplexity 62.4022
256
+ 2100 train 4.050180 (lr=2.1532e-03) (hash(x)=47611537)
257
+ 2110 train 4.056353 (lr=2.1424e-03) (hash(x)=38076651)
258
+ 2120 train 4.061294 (lr=2.1316e-03) (hash(x)=39870893)
259
+ 2130 train 4.214941 (lr=2.1208e-03) (hash(x)=48123321)
260
+ 2140 train 4.240106 (lr=2.1099e-03) (hash(x)=44124977)
261
+ 2150 train 4.140234 (lr=2.0990e-03) (hash(x)=42181374)
262
+ 2160 train 4.167806 (lr=2.0881e-03) (hash(x)=41837878)
263
+ 2170 train 4.184142 (lr=2.0771e-03) (hash(x)=42161375)
264
+ 2180 train 4.151351 (lr=2.0661e-03) (hash(x)=41792986)
265
+ 2190 train 4.124248 (lr=2.0550e-03) (hash(x)=41872416)
266
+ 2200 val loss 4.1133
267
+ 2200 val perplexity 61.1486
268
+ 2200 train 4.101959 (lr=2.0440e-03) (hash(x)=37395985)
269
+ 2210 train 4.149935 (lr=2.0329e-03) (hash(x)=40616096)
270
+ 2220 train 4.050853 (lr=2.0217e-03) (hash(x)=40118423)
271
+ 2230 train 3.964693 (lr=2.0106e-03) (hash(x)=39641586)
272
+ 2240 train 4.058895 (lr=1.9994e-03) (hash(x)=34720116)
273
+ 2250 train 3.997940 (lr=1.9882e-03) (hash(x)=43641508)
274
+ 2260 train 4.016213 (lr=1.9770e-03) (hash(x)=31179786)
275
+ 2270 train 3.987716 (lr=1.9657e-03) (hash(x)=42721932)
276
+ 2280 train 4.313298 (lr=1.9544e-03) (hash(x)=38474505)
277
+ 2290 train 4.138318 (lr=1.9431e-03) (hash(x)=41398545)
278
+ 2300 val loss 4.0968
279
+ 2300 val perplexity 60.1500
280
+ 2300 train 4.070576 (lr=1.9318e-03) (hash(x)=44131094)
281
+ 2310 train 4.149317 (lr=1.9205e-03) (hash(x)=41667948)
282
+ 2320 train 4.225599 (lr=1.9091e-03) (hash(x)=36339232)
283
+ 2330 train 4.105408 (lr=1.8977e-03) (hash(x)=41630644)
284
+ 2340 train 4.193828 (lr=1.8863e-03) (hash(x)=41278955)
285
+ 2350 train 4.065461 (lr=1.8749e-03) (hash(x)=39491976)
286
+ 2360 train 4.131691 (lr=1.8635e-03) (hash(x)=41771238)
287
+ 2370 train 4.226571 (lr=1.8520e-03) (hash(x)=45374570)
288
+ 2380 train 4.104238 (lr=1.8406e-03) (hash(x)=41419267)
289
+ 2390 train 4.135674 (lr=1.8291e-03) (hash(x)=38869169)
290
+ 2400 val loss 4.0823
291
+ 2400 val perplexity 59.2833
292
+ 2400 train 4.035968 (lr=1.8176e-03) (hash(x)=38619293)
293
+ 2410 train 3.865609 (lr=1.8061e-03) (hash(x)=41600240)
294
+ 2420 train 3.989235 (lr=1.7946e-03) (hash(x)=40891045)
295
+ 2430 train 3.982191 (lr=1.7830e-03) (hash(x)=41023249)
296
+ 2440 train 3.952365 (lr=1.7715e-03) (hash(x)=33813452)
297
+ 2450 train 3.906143 (lr=1.7600e-03) (hash(x)=38464119)
298
+ 2460 train 3.798860 (lr=1.7484e-03) (hash(x)=40699982)
299
+ 2470 train 3.809197 (lr=1.7368e-03) (hash(x)=38254854)
300
+ 2480 train 4.096316 (lr=1.7253e-03) (hash(x)=42736069)
301
+ 2490 train 4.110186 (lr=1.7137e-03) (hash(x)=41928525)
302
+ 2500 val loss 4.0642
303
+ 2500 val perplexity 58.2198
304
+ 2500 train 4.060219 (lr=1.7021e-03) (hash(x)=45864011)
305
+ 2510 train 4.076487 (lr=1.6906e-03) (hash(x)=41380960)
306
+ 2520 train 4.071254 (lr=1.6790e-03) (hash(x)=38366545)
307
+ 2530 train 4.078263 (lr=1.6674e-03) (hash(x)=41045176)
308
+ 2540 train 4.105968 (lr=1.6558e-03) (hash(x)=43435705)
309
+ 2550 train 4.058458 (lr=1.6442e-03) (hash(x)=40693090)
310
+ 2560 train 3.975257 (lr=1.6326e-03) (hash(x)=40502478)
311
+ 2570 train 4.009761 (lr=1.6210e-03) (hash(x)=38610920)
312
+ 2580 train 3.932878 (lr=1.6094e-03) (hash(x)=38333499)
313
+ 2590 train 4.011162 (lr=1.5979e-03) (hash(x)=41982736)
314
+ 2600 val loss 4.0478
315
+ 2600 val perplexity 57.2725
316
+ 2600 train 3.989073 (lr=1.5863e-03) (hash(x)=37724702)
317
+ 2610 train 3.925298 (lr=1.5747e-03) (hash(x)=39564630)
318
+ 2620 train 3.873578 (lr=1.5632e-03) (hash(x)=38465803)
319
+ 2630 train 3.794012 (lr=1.5516e-03) (hash(x)=36955007)
320
+ 2640 train 3.834682 (lr=1.5400e-03) (hash(x)=32994568)
321
+ 2650 train 4.010465 (lr=1.5285e-03) (hash(x)=41165765)
322
+ 2660 train 4.020971 (lr=1.5170e-03) (hash(x)=42934000)
323
+ 2670 train 4.223475 (lr=1.5054e-03) (hash(x)=42726316)
324
+ 2680 train 4.103465 (lr=1.4939e-03) (hash(x)=35880318)
325
+ 2690 train 3.990810 (lr=1.4824e-03) (hash(x)=41747329)
326
+ 2700 val loss 4.0371
327
+ 2700 val perplexity 56.6628
328
+ 2700 train 4.117911 (lr=1.4709e-03) (hash(x)=40259630)
329
+ 2710 train 4.240554 (lr=1.4594e-03) (hash(x)=39895798)
330
+ 2720 train 3.971967 (lr=1.4480e-03) (hash(x)=36146683)
331
+ 2730 train 3.954703 (lr=1.4365e-03) (hash(x)=36181984)
332
+ 2740 train 3.928339 (lr=1.4251e-03) (hash(x)=43700349)
333
+ 2750 train 3.994356 (lr=1.4137e-03) (hash(x)=41610597)
334
+ 2760 train 3.985327 (lr=1.4023e-03) (hash(x)=31183639)
335
+ 2770 train 3.978098 (lr=1.3909e-03) (hash(x)=37722489)
336
+ 2780 train 3.921297 (lr=1.3795e-03) (hash(x)=47290688)
337
+ 2790 train 3.960868 (lr=1.3682e-03) (hash(x)=41205574)
338
+ 2800 val loss 4.0213
339
+ 2800 val perplexity 55.7758
340
+ 2800 train 3.810033 (lr=1.3569e-03) (hash(x)=40257962)
341
+ 2810 train 3.791974 (lr=1.3456e-03) (hash(x)=39529014)
342
+ 2820 train 3.809175 (lr=1.3343e-03) (hash(x)=42244749)
343
+ 2830 train 3.861666 (lr=1.3230e-03) (hash(x)=32820090)
344
+ 2840 train 3.823705 (lr=1.3118e-03) (hash(x)=40315769)
345
+ 2850 train 4.058960 (lr=1.3006e-03) (hash(x)=41524462)
346
+ 2860 train 4.052763 (lr=1.2894e-03) (hash(x)=38365734)
347
+ 2870 train 4.179628 (lr=1.2783e-03) (hash(x)=37682602)
348
+ 2880 train 4.035479 (lr=1.2671e-03) (hash(x)=39162991)
349
+ 2890 train 3.894207 (lr=1.2560e-03) (hash(x)=33316384)
350
+ 2900 val loss 3.9967
351
+ 2900 val perplexity 54.4161
352
+ 2900 train 3.946078 (lr=1.2450e-03) (hash(x)=37271132)
353
+ 2910 train 4.133524 (lr=1.2339e-03) (hash(x)=35586242)
354
+ 2920 train 4.027472 (lr=1.2229e-03) (hash(x)=33320586)
355
+ 2930 train 4.037361 (lr=1.2119e-03) (hash(x)=43531361)
356
+ 2940 train 3.922794 (lr=1.2010e-03) (hash(x)=37368286)
357
+ 2950 train 3.955195 (lr=1.1901e-03) (hash(x)=40363394)
358
+ 2960 train 3.927172 (lr=1.1792e-03) (hash(x)=42253792)
359
+ 2970 train 3.866165 (lr=1.1684e-03) (hash(x)=38072598)
360
+ 2980 train 3.941077 (lr=1.1576e-03) (hash(x)=41470557)
361
+ 2990 train 3.867032 (lr=1.1468e-03) (hash(x)=42600033)
362
+ 3000 val loss 3.9980
363
+ 3000 val perplexity 54.4914
364
+ 3000 train 3.819512 (lr=1.1361e-03) (hash(x)=46890983)
365
+ 3010 train 3.687428 (lr=1.1254e-03) (hash(x)=37986759)
366
+ 3020 train 3.810691 (lr=1.1147e-03) (hash(x)=36270703)
367
+ 3030 train 4.076555 (lr=1.1041e-03) (hash(x)=38228599)
368
+ 3040 train 3.931058 (lr=1.0935e-03) (hash(x)=44344296)
369
+ 3050 train 4.014858 (lr=1.0830e-03) (hash(x)=39962297)
370
+ 3060 train 4.003596 (lr=1.0725e-03) (hash(x)=39817394)
371
+ 3070 train 3.996688 (lr=1.0620e-03) (hash(x)=41763868)
372
+ 3080 train 4.076163 (lr=1.0516e-03) (hash(x)=42343051)
373
+ 3090 train 4.070370 (lr=1.0412e-03) (hash(x)=39204893)
374
+ 3100 val loss 3.9772
375
+ 3100 val perplexity 53.3671
376
+ 3100 train 3.976465 (lr=1.0309e-03) (hash(x)=39353599)
377
+ 3110 train 4.044365 (lr=1.0207e-03) (hash(x)=36118073)
378
+ 3120 train 3.746538 (lr=1.0104e-03) (hash(x)=44070271)
379
+ 3130 train 3.773758 (lr=1.0002e-03) (hash(x)=43368151)
380
+ 3140 train 3.848818 (lr=9.9011e-04) (hash(x)=40612203)
381
+ 3150 train 3.991338 (lr=9.8002e-04) (hash(x)=39598447)
382
+ 3160 train 4.034787 (lr=9.6999e-04) (hash(x)=43897682)
383
+ 3170 train 4.019437 (lr=9.6000e-04) (hash(x)=39029243)
384
+ 3180 train 4.027864 (lr=9.5007e-04) (hash(x)=38586684)
385
+ 3190 train 3.981925 (lr=9.4019e-04) (hash(x)=39299115)
386
+ 3200 val loss 3.9579
387
+ 3200 val perplexity 52.3484
388
+ 3200 train 4.006319 (lr=9.3036e-04) (hash(x)=41064949)
389
+ 3210 train 3.993791 (lr=9.2058e-04) (hash(x)=41752822)
390
+ 3220 train 3.999174 (lr=9.1085e-04) (hash(x)=37484805)
391
+ 3230 train 3.965427 (lr=9.0118e-04) (hash(x)=40514919)
392
+ 3240 train 4.017501 (lr=8.9157e-04) (hash(x)=42440343)
393
+ 3250 train 3.921484 (lr=8.8201e-04) (hash(x)=32464136)
394
+ 3260 train 3.941777 (lr=8.7251e-04) (hash(x)=47270558)
395
+ 3270 train 3.877330 (lr=8.6307e-04) (hash(x)=39307546)
396
+ 3280 train 3.914576 (lr=8.5368e-04) (hash(x)=40410299)
397
+ 3290 train 3.908976 (lr=8.4435e-04) (hash(x)=40541497)
398
+ 3300 val loss 3.9534
399
+ 3300 val perplexity 52.1106
400
+ 3300 train 3.812737 (lr=8.3508e-04) (hash(x)=38089139)
401
+ 3310 train 3.845575 (lr=8.2588e-04) (hash(x)=42006576)
402
+ 3320 train 3.935588 (lr=8.1673e-04) (hash(x)=40422556)
403
+ 3330 train 3.729753 (lr=8.0764e-04) (hash(x)=39021643)
404
+ 3340 train 3.995386 (lr=7.9862e-04) (hash(x)=40434305)
405
+ 3350 train 4.014440 (lr=7.8966e-04) (hash(x)=40833559)
406
+ 3360 train 4.015814 (lr=7.8076e-04) (hash(x)=40045231)
407
+ 3370 train 3.978151 (lr=7.7192e-04) (hash(x)=34668317)
408
+ 3380 train 3.977051 (lr=7.6315e-04) (hash(x)=42738568)
409
+ 3390 train 4.075114 (lr=7.5445e-04) (hash(x)=38980166)
410
+ 3400 val loss 3.9321
411
+ 3400 val perplexity 51.0156
412
+ 3400 train 3.982142 (lr=7.4581e-04) (hash(x)=43878176)
413
+ 3410 train 4.026713 (lr=7.3724e-04) (hash(x)=38201991)
414
+ 3420 train 4.000719 (lr=7.2874e-04) (hash(x)=34905889)
415
+ 3430 train 4.004856 (lr=7.2030e-04) (hash(x)=41680405)
416
+ 3440 train 3.964763 (lr=7.1193e-04) (hash(x)=39948184)
417
+ 3450 train 4.013325 (lr=7.0363e-04) (hash(x)=33623728)
418
+ 3460 train 3.882361 (lr=6.9541e-04) (hash(x)=41027647)
419
+ 3470 train 3.914577 (lr=6.8725e-04) (hash(x)=42779482)
420
+ 3480 train 3.936655 (lr=6.7916e-04) (hash(x)=41579288)
421
+ 3490 train 3.787006 (lr=6.7114e-04) (hash(x)=37174846)
422
+ 3500 val loss 3.9291
423
+ 3500 val perplexity 50.8637
424
+ 3500 train 3.666730 (lr=6.6320e-04) (hash(x)=37082090)
425
+ 3510 train 3.868412 (lr=6.5533e-04) (hash(x)=43131340)
426
+ 3520 train 3.781068 (lr=6.4753e-04) (hash(x)=42457250)
427
+ 3530 train 4.007407 (lr=6.3981e-04) (hash(x)=39504492)
428
+ 3540 train 3.956300 (lr=6.3216e-04) (hash(x)=36225997)
429
+ 3550 train 4.083768 (lr=6.2458e-04) (hash(x)=40912087)
430
+ 3560 train 3.919294 (lr=6.1708e-04) (hash(x)=41545925)
431
+ 3570 train 4.009060 (lr=6.0966e-04) (hash(x)=32553193)
432
+ 3580 train 3.855056 (lr=6.0231e-04) (hash(x)=38799856)
433
+ 3590 train 3.902007 (lr=5.9504e-04) (hash(x)=40772075)
434
+ 3600 val loss 3.9103
435
+ 3600 val perplexity 49.9139
436
+ 3600 train 3.941343 (lr=5.8785e-04) (hash(x)=39299903)
437
+ 3610 train 3.984620 (lr=5.8074e-04) (hash(x)=39503247)
438
+ 3620 train 3.884426 (lr=5.7370e-04) (hash(x)=38730668)
439
+ 3630 train 3.903308 (lr=5.6675e-04) (hash(x)=40503276)
440
+ 3640 train 3.913339 (lr=5.5987e-04) (hash(x)=36504573)
441
+ 3650 train 3.936536 (lr=5.5308e-04) (hash(x)=44903075)
442
+ 3660 train 3.839731 (lr=5.4636e-04) (hash(x)=33800148)
443
+ 3670 train 3.752671 (lr=5.3973e-04) (hash(x)=39404541)
444
+ 3680 train 3.696729 (lr=5.3318e-04) (hash(x)=44777432)
445
+ 3690 train 3.793703 (lr=5.2671e-04) (hash(x)=45649913)
446
+ 3700 val loss 3.9128
447
+ 3700 val perplexity 50.0381
448
+ 3700 train 3.912190 (lr=5.2033e-04) (hash(x)=33812537)
449
+ 3710 train 4.065012 (lr=5.1402e-04) (hash(x)=36161222)
450
+ 3720 train 3.945955 (lr=5.0780e-04) (hash(x)=43690616)
451
+ 3730 train 3.949650 (lr=5.0167e-04) (hash(x)=35462313)
452
+ 3740 train 4.245599 (lr=4.9562e-04) (hash(x)=44674254)
453
+ 3750 train 3.925692 (lr=4.8965e-04) (hash(x)=44276297)
454
+ 3760 train 3.973274 (lr=4.8377e-04) (hash(x)=38492178)
455
+ 3770 train 3.980526 (lr=4.7798e-04) (hash(x)=44347531)
456
+ 3780 train 3.888068 (lr=4.7227e-04) (hash(x)=33792059)
457
+ 3790 train 3.986055 (lr=4.6665e-04) (hash(x)=38549318)
458
+ 3800 val loss 3.8952
459
+ 3800 val perplexity 49.1641
460
+ 3800 train 3.866667 (lr=4.6112e-04) (hash(x)=41437448)
461
+ 3810 train 3.767452 (lr=4.5567e-04) (hash(x)=36656768)
462
+ 3820 train 3.930921 (lr=4.5031e-04) (hash(x)=46387893)
463
+ 3830 train 3.928599 (lr=4.4504e-04) (hash(x)=44312129)
464
+ 3840 train 3.848206 (lr=4.3986e-04) (hash(x)=40341896)
465
+ 3850 train 3.900620 (lr=4.3477e-04) (hash(x)=43589160)
466
+ 3860 train 3.883543 (lr=4.2977e-04) (hash(x)=40388945)
467
+ 3870 train 3.781083 (lr=4.2486e-04) (hash(x)=33336679)
468
+ 3880 train 3.875562 (lr=4.2004e-04) (hash(x)=37643311)
469
+ 3890 train 3.899103 (lr=4.1530e-04) (hash(x)=38272927)
470
+ 3900 val loss 3.8884
471
+ 3900 val perplexity 48.8312
472
+ 3900 train 3.843403 (lr=4.1066e-04) (hash(x)=41925748)
473
+ 3910 train 3.933294 (lr=4.0611e-04) (hash(x)=38377253)
474
+ 3920 train 3.937147 (lr=4.0166e-04) (hash(x)=41616611)
475
+ 3930 train 3.862437 (lr=3.9729e-04) (hash(x)=32301827)
476
+ 3940 train 3.918568 (lr=3.9302e-04) (hash(x)=47697363)
477
+ 3950 train 3.869142 (lr=3.8884e-04) (hash(x)=37867767)
478
+ 3960 train 3.869044 (lr=3.8475e-04) (hash(x)=37187295)
479
+ 3970 train 3.930926 (lr=3.8076e-04) (hash(x)=41952752)
480
+ 3980 train 3.895832 (lr=3.7685e-04) (hash(x)=38358660)
481
+ 3990 train 3.762591 (lr=3.7305e-04) (hash(x)=40207878)
482
+ 4000 val loss 3.8828
483
+ 4000 val perplexity 48.5594
484
+ 4000 train 3.854169 (lr=3.6933e-04) (hash(x)=39134015)
485
+ 4010 train 3.922479 (lr=3.6572e-04) (hash(x)=38313135)
486
+ 4020 train 3.850610 (lr=3.6219e-04) (hash(x)=40710513)
487
+ 4030 train 3.977275 (lr=3.5876e-04) (hash(x)=35530165)
488
+ 4040 train 3.903843 (lr=3.5543e-04) (hash(x)=39494066)
489
+ 4050 train 3.894374 (lr=3.5219e-04) (hash(x)=40861777)
490
+ 4060 train 3.870666 (lr=3.4905e-04) (hash(x)=36151939)
491
+ 4070 train 3.838002 (lr=3.4600e-04) (hash(x)=39008241)
492
+ 4080 train 3.923163 (lr=3.4305e-04) (hash(x)=40395746)
493
+ 4090 train 3.960686 (lr=3.4019e-04) (hash(x)=38855480)
494
+ 4100 val loss 3.8702
495
+ 4100 val perplexity 47.9507
496
+ 4100 train 3.892154 (lr=3.3744e-04) (hash(x)=45013254)
497
+ 4110 train 3.902840 (lr=3.3477e-04) (hash(x)=39934818)
498
+ 4120 train 3.908931 (lr=3.3221e-04) (hash(x)=33417705)
499
+ 4130 train 3.844224 (lr=3.2974e-04) (hash(x)=41267978)
500
+ 4140 train 3.841904 (lr=3.2737e-04) (hash(x)=38961609)
501
+ 4150 train 3.838660 (lr=3.2510e-04) (hash(x)=42173878)
502
+ 4160 train 3.804020 (lr=3.2292e-04) (hash(x)=36867405)
503
+ 4170 train 3.786273 (lr=3.2085e-04) (hash(x)=38991954)
504
+ 4180 train 3.842915 (lr=3.1887e-04) (hash(x)=39032592)
505
+ 4190 train 3.820993 (lr=3.1699e-04) (hash(x)=61329402)
506
+ 4200 val loss 3.8693
507
+ 4200 val perplexity 47.9103
508
+ 4200 train 3.836763 (lr=3.1520e-04) (hash(x)=40455200)
509
+ 4210 train 3.864570 (lr=3.1352e-04) (hash(x)=40913417)
510
+ 4220 train 3.893960 (lr=3.1193e-04) (hash(x)=38809704)
511
+ 4230 train 3.879956 (lr=3.1044e-04) (hash(x)=39560204)
512
+ 4240 train 3.882278 (lr=3.0905e-04) (hash(x)=41496549)
513
+ 4250 train 3.919138 (lr=3.0776e-04) (hash(x)=33994768)
514
+ 4260 train 3.905617 (lr=3.0657e-04) (hash(x)=39658687)
515
+ 4270 train 3.783062 (lr=3.0548e-04) (hash(x)=42583064)
516
+ 4280 train 3.848428 (lr=3.0449e-04) (hash(x)=40635811)
517
+ 4290 train 3.748234 (lr=3.0359e-04) (hash(x)=35313841)
518
+ 4300 val loss 3.8669
519
+ 4300 val perplexity 47.7929
520
+ 4300 train 3.927847 (lr=3.0280e-04) (hash(x)=39332245)
521
+ 4310 train 3.829502 (lr=3.0210e-04) (hash(x)=43732999)
522
+ 4320 train 3.832353 (lr=3.0150e-04) (hash(x)=40831835)
523
+ 4330 train 3.969457 (lr=3.0101e-04) (hash(x)=41813368)
524
+ 4340 train 3.870416 (lr=3.0061e-04) (hash(x)=36952586)
525
+ 4350 train 3.895773 (lr=3.0031e-04) (hash(x)=42470745)
526
+ 4360 train 3.817516 (lr=3.0011e-04) (hash(x)=39581495)
527
+ 4370 train 4.013284 (lr=3.0001e-04) (hash(x)=35914538)
528
+ 4374 val loss 3.8591
529
+ 4374 val perplexity 47.4219
att_conv_playground/i_small_heads_wd_2_latent_masks_seed_1339/model_04374.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:12d88164dc9cbf758bd394678575652bb7bb723498418a3b57619e85588f09e1
3
- size 97746890
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c0e0326138ae07abb8d2f0127a2280177e5b63c2343bbd1eee618a840ac5ff34
3
+ size 97469578
att_conv_playground/i_small_heads_wd_2_latent_masks_seed_1339/optimizer_04374.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8fd1657909922b0fd6b6da41d834ddfd349e7eb7223cbb7cf3e287166f859f3e
3
- size 189216718
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd9c57fb10cd826b2addb1095c346a9a777b6bd50acc8cef5892deff46ff0335
3
+ size 188662222