andrew-healey commited on
Commit
822b8d2
·
verified ·
1 Parent(s): 4c77236

Upload folder using huggingface_hub

Browse files
Files changed (26) hide show
  1. attention_kindselective_n_heads4_seed1339/args.json +1 -1
  2. attention_kindselective_n_heads4_seed1339/dataloader_10000.pt +3 -0
  3. attention_kindselective_n_heads4_seed1339/dataloader_40000.pt +3 -0
  4. attention_kindselective_n_heads4_seed1339/dataloader_42500.pt +3 -0
  5. attention_kindselective_n_heads4_seed1339/dataloader_45000.pt +3 -0
  6. attention_kindselective_n_heads4_seed1339/dataloader_47500.pt +3 -0
  7. attention_kindselective_n_heads4_seed1339/dataloader_49999.pt +3 -0
  8. attention_kindselective_n_heads4_seed1339/log2.txt +697 -601
  9. attention_kindselective_n_heads4_seed1339/model_02500.pt +1 -1
  10. attention_kindselective_n_heads4_seed1339/model_05000.pt +1 -1
  11. attention_kindselective_n_heads4_seed1339/model_07500.pt +1 -1
  12. attention_kindselective_n_heads4_seed1339/model_10000.pt +3 -0
  13. attention_kindselective_n_heads4_seed1339/model_40000.pt +3 -0
  14. attention_kindselective_n_heads4_seed1339/model_42500.pt +3 -0
  15. attention_kindselective_n_heads4_seed1339/model_45000.pt +3 -0
  16. attention_kindselective_n_heads4_seed1339/model_47500.pt +3 -0
  17. attention_kindselective_n_heads4_seed1339/model_49999.pt +3 -0
  18. attention_kindselective_n_heads4_seed1339/optimizer_02500.pt +1 -1
  19. attention_kindselective_n_heads4_seed1339/optimizer_05000.pt +1 -1
  20. attention_kindselective_n_heads4_seed1339/optimizer_07500.pt +1 -1
  21. attention_kindselective_n_heads4_seed1339/optimizer_10000.pt +3 -0
  22. attention_kindselective_n_heads4_seed1339/optimizer_40000.pt +3 -0
  23. attention_kindselective_n_heads4_seed1339/optimizer_42500.pt +3 -0
  24. attention_kindselective_n_heads4_seed1339/optimizer_45000.pt +3 -0
  25. attention_kindselective_n_heads4_seed1339/optimizer_47500.pt +3 -0
  26. attention_kindselective_n_heads4_seed1339/optimizer_49999.pt +3 -0
attention_kindselective_n_heads4_seed1339/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_6/attention_kindselective_n_heads4_seed1339", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10000, "warmup_steps": 200, "group": "wider_is_better_6", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1339, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 40, "total_batch_size": 10240, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.00015, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "15e-5_10240_4_1339", "n_embd": 256}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_7/attention_kindselective_n_heads4_seed1339", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 50000, "warmup_steps": 200, "group": "wider_is_better_7", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1339, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 40, "total_batch_size": 10240, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.0001, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "10e-5_10240_4_1339", "n_embd": 256}
attention_kindselective_n_heads4_seed1339/dataloader_10000.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f3858f6c832feea78a674d8c5c384061cc7d4f22cddbd0a2be6de33bc91e2c72
3
+ size 964
attention_kindselective_n_heads4_seed1339/dataloader_40000.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:84f58ba3b4a1f9be7da4e697e06782f0e1ce4d3aca49f1997087fc83aa466dd9
3
+ size 964
attention_kindselective_n_heads4_seed1339/dataloader_42500.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf6d24c78d89100d146bce9f26be940db3d71092473d9b55db97d6b35531eac2
3
+ size 964
attention_kindselective_n_heads4_seed1339/dataloader_45000.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:22bb90b43d81f3da5454f91a70e1ed29aeb2f470a727ce38390ff8a5c4924889
3
+ size 964
attention_kindselective_n_heads4_seed1339/dataloader_47500.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:55507725e6988f190e4963078652fafa6b68e8d4f79221387612612babf3e1c1
3
+ size 964
attention_kindselective_n_heads4_seed1339/dataloader_49999.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:47776cddb8021172f048a950b83f25b692cb340214b800ce3837c15ceb58907c
3
+ size 964
attention_kindselective_n_heads4_seed1339/log2.txt CHANGED
@@ -1,603 +1,699 @@
1
- 0 train 11.297240 (lr=5.0000e-07) (hash(x)=55241167)
2
- max_steps: 10000
3
- 100 val loss 9.4113
4
- 100 val perplexity 12226.2285
5
- 100 train 9.392327 (lr=5.0500e-05) (hash(x)=46387919)
6
  0 val loss 11.2766
7
  0 val perplexity 78951.9062
8
- 200 val loss 7.8296
9
- 200 val perplexity 2513.9221
10
- 200 train 7.923450 (lr=1.0000e-04) (hash(x)=51276369)
11
- 300 val loss 7.6256
12
- 300 val perplexity 2049.9619
13
- 300 train 7.519519 (lr=9.9977e-05) (hash(x)=48572728)
14
- 0 train 11.297240 (lr=7.5000e-07) (hash(x)=55241167)
15
- 400 val loss 7.5602
16
- 400 val perplexity 1920.2125
17
- 400 train 7.274498 (lr=9.9908e-05) (hash(x)=44943400)
18
- 100 val loss 9.0840
19
- 100 val perplexity 8813.1025
20
- 100 train 9.057368 (lr=7.5750e-05) (hash(x)=46387919)
21
- 500 val loss 7.5149
22
- 500 val perplexity 1835.2683
23
- 500 train 7.412261 (lr=9.9792e-05) (hash(x)=48434590)
24
- 200 val loss 7.7009
25
- 200 val perplexity 2210.4219
26
- 200 train 7.782194 (lr=1.5000e-04) (hash(x)=51276369)
27
- 600 val loss 7.4697
28
- 600 val perplexity 1754.1002
29
- 600 train 7.309482 (lr=9.9631e-05) (hash(x)=47442266)
30
- 300 val loss 7.6257
31
- 300 val perplexity 2050.2629
32
- 300 train 7.521665 (lr=1.4997e-04) (hash(x)=48572728)
33
- 700 val loss 7.4248
34
- 700 val perplexity 1676.9968
35
- 700 train 7.487512 (lr=9.9423e-05) (hash(x)=56495712)
36
- 400 val loss 7.6210
37
- 400 val perplexity 2040.5312
38
- 400 train 7.349378 (lr=1.4986e-04) (hash(x)=44943400)
39
- 800 val loss 7.4002
40
- 800 val perplexity 1636.3661
41
- 800 train 7.382157 (lr=9.9170e-05) (hash(x)=50093738)
42
- 500 val loss 7.5935
43
- 500 val perplexity 1985.2406
44
- 500 train 7.501320 (lr=1.4969e-04) (hash(x)=48434590)
45
- 900 val loss 7.3485
46
- 900 val perplexity 1553.9221
47
- 900 train 7.314708 (lr=9.8872e-05) (hash(x)=49436061)
48
- 600 val loss 7.5432
49
- 600 val perplexity 1887.9182
50
- 600 train 7.397351 (lr=1.4945e-04) (hash(x)=47442266)
51
- 1000 val loss 7.3260
52
- 1000 val perplexity 1519.2688
53
- 1000 train 7.252484 (lr=9.8528e-05) (hash(x)=48818282)
54
- 700 val loss 7.5011
55
- 700 val perplexity 1809.9755
56
- 700 train 7.557298 (lr=1.4913e-04) (hash(x)=56495712)
57
- 1100 val loss 7.2749
58
- 1100 val perplexity 1443.6171
59
- 1100 train 7.333434 (lr=9.8140e-05) (hash(x)=55387425)
60
- 800 val loss 7.4475
61
- 800 val perplexity 1715.6282
62
- 800 train 7.430103 (lr=1.4876e-04) (hash(x)=50093738)
63
- 1200 val loss 7.2674
64
- 1200 val perplexity 1432.7633
65
- 1200 train 7.136762 (lr=9.7708e-05) (hash(x)=48328378)
66
- 900 val loss 7.3857
67
- 900 val perplexity 1612.8206
68
- 900 train 7.335268 (lr=1.4831e-04) (hash(x)=49436061)
69
- 1300 val loss 7.2723
70
- 1300 val perplexity 1439.9219
71
- 1300 train 7.192955 (lr=9.7231e-05) (hash(x)=48956717)
72
- 1000 val loss 7.3329
73
- 1000 val perplexity 1529.8805
74
- 1000 train 7.257126 (lr=1.4779e-04) (hash(x)=48818282)
75
- 1400 val loss 7.2745
76
- 1400 val perplexity 1443.0217
77
- 1400 train 7.159895 (lr=9.6711e-05) (hash(x)=49782571)
78
- 1100 val loss 7.2824
79
- 1100 val perplexity 1454.4734
80
- 1100 train 7.329366 (lr=1.4721e-04) (hash(x)=55387425)
81
- 1500 val loss 7.2582
82
- 1500 val perplexity 1419.7051
83
- 1500 train 7.043027 (lr=9.6149e-05) (hash(x)=51081994)
84
- 1200 val loss 7.2663
85
- 1200 val perplexity 1431.2822
86
- 1200 train 7.143242 (lr=1.4656e-04) (hash(x)=48328378)
87
- 1600 val loss 7.2452
88
- 1600 val perplexity 1401.4271
89
- 1600 train 7.309634 (lr=9.5544e-05) (hash(x)=48791204)
90
- 1300 val loss 7.2275
91
- 1300 val perplexity 1376.7683
92
- 1300 train 7.136340 (lr=1.4585e-04) (hash(x)=48956717)
93
- 1700 val loss 7.2259
94
- 1700 val perplexity 1374.5807
95
- 1700 train 6.931890 (lr=9.4897e-05) (hash(x)=46425011)
96
- 1400 val loss 7.2388
97
- 1400 val perplexity 1392.4169
98
- 1400 train 7.132669 (lr=1.4507e-04) (hash(x)=49782571)
99
- 1800 val loss 7.2145
100
- 1800 val perplexity 1358.9602
101
- 1800 train 6.850976 (lr=9.4209e-05) (hash(x)=44919141)
102
- 1500 val loss 7.2698
103
- 1500 val perplexity 1436.2498
104
- 1500 train 7.063611 (lr=1.4422e-04) (hash(x)=51081994)
105
- 1900 val loss 7.1925
106
- 1900 val perplexity 1329.3683
107
- 1900 train 7.279314 (lr=9.3481e-05) (hash(x)=48299675)
108
- 1600 val loss 7.2511
109
- 1600 val perplexity 1409.6339
110
- 1600 train 7.338916 (lr=1.4332e-04) (hash(x)=48791204)
111
- 2000 val loss 7.1303
112
- 2000 val perplexity 1249.2107
113
- 2000 train 7.887177 (lr=9.2714e-05) (hash(x)=61804797)
114
- 1700 val loss 7.2541
115
- 1700 val perplexity 1413.9268
116
- 1700 train 6.979441 (lr=1.4235e-04) (hash(x)=46425011)
117
- 2100 val loss 7.0845
118
- 2100 val perplexity 1193.3490
119
- 2100 train 7.169927 (lr=9.1908e-05) (hash(x)=53638030)
120
- 1800 val loss 7.2169
121
- 1800 val perplexity 1362.2542
122
- 1800 train 6.886115 (lr=1.4131e-04) (hash(x)=44919141)
123
- 2200 val loss 7.0669
124
- 2200 val perplexity 1172.4698
125
- 2200 train 7.293911 (lr=9.1064e-05) (hash(x)=56364593)
126
- 1900 val loss 7.1736
127
- 1900 val perplexity 1304.4712
128
- 1900 train 7.252931 (lr=1.4022e-04) (hash(x)=48299675)
129
- 2300 val loss 7.0647
130
- 2300 val perplexity 1169.9177
131
- 2300 train 7.168211 (lr=9.0182e-05) (hash(x)=52945580)
132
- 2000 val loss 7.1572
133
- 2000 val perplexity 1283.3210
134
- 2000 train 7.978416 (lr=1.3907e-04) (hash(x)=61804797)
135
- 2400 val loss 7.0197
136
- 2400 val perplexity 1118.4805
137
- 2400 train 7.089632 (lr=8.9265e-05) (hash(x)=54595211)
138
- 2100 val loss 7.0765
139
- 2100 val perplexity 1183.8523
140
- 2500 val loss 7.0181
141
- 2500 val perplexity 1116.6877
142
- 2100 train 7.178714 (lr=1.3786e-04) (hash(x)=53638030)
143
- 2500 train 7.027974 (lr=8.8313e-05) (hash(x)=52323363)
144
- 2200 val loss 7.0702
145
- 2200 val perplexity 1176.4192
146
- 2200 train 7.273838 (lr=1.3660e-04) (hash(x)=56364593)
147
- 2600 val loss 6.9974
148
- 2600 val perplexity 1093.8115
149
- 2600 train 6.790699 (lr=8.7326e-05) (hash(x)=42652707)
150
- 2700 val loss 6.9823
151
- 2700 val perplexity 1077.4113
152
- 2300 val loss 7.0534
153
- 2300 val perplexity 1156.7692
154
- 2700 train 6.681511 (lr=8.6306e-05) (hash(x)=45687855)
155
- 2300 train 7.148480 (lr=1.3527e-04) (hash(x)=52945580)
156
- 2800 val loss 6.9617
157
- 2800 val perplexity 1055.4443
158
- 2800 train 6.951207 (lr=8.5254e-05) (hash(x)=49962830)
159
- 2400 val loss 7.0180
160
- 2400 val perplexity 1116.6062
161
- 2400 train 7.111838 (lr=1.3390e-04) (hash(x)=54595211)
162
- 2900 val loss 6.9430
163
- 2900 val perplexity 1035.8958
164
- 2900 train 6.955583 (lr=8.4170e-05) (hash(x)=52958451)
165
- 2500 val loss 6.9926
166
- 2500 val perplexity 1088.5376
167
- 2500 train 6.990477 (lr=1.3247e-04) (hash(x)=52323363)
168
- 3000 val loss 6.9176
169
- 3000 val perplexity 1009.8487
170
- 3000 train 6.864679 (lr=8.3057e-05) (hash(x)=48896990)
171
- 2600 val loss 7.0060
172
- 2600 val perplexity 1103.2196
173
- 2600 train 6.810057 (lr=1.3099e-04) (hash(x)=42652707)
174
- 3100 val loss 6.9101
175
- 3100 val perplexity 1002.2977
176
- 3100 train 6.856146 (lr=8.1915e-05) (hash(x)=47064930)
177
- 2700 val loss 6.9722
178
- 2700 val perplexity 1066.5864
179
- 2700 train 6.679486 (lr=1.2946e-04) (hash(x)=45687855)
180
- 3200 val loss 6.8815
181
- 3200 val perplexity 974.0536
182
- 3200 train 6.596547 (lr=8.0745e-05) (hash(x)=45145921)
183
- 2800 val loss 6.9433
184
- 2800 val perplexity 1036.2302
185
- 2800 train 6.932369 (lr=1.2788e-04) (hash(x)=49962830)
186
- 3300 val loss 6.8839
187
- 3300 val perplexity 976.3783
188
- 3300 train 6.722473 (lr=7.9549e-05) (hash(x)=48085018)
189
- 2900 val loss 6.9177
190
- 2900 val perplexity 1009.9710
191
- 2900 train 6.917989 (lr=1.2626e-04) (hash(x)=52958451)
192
- 3400 val loss 6.8586
193
- 3400 val perplexity 951.9899
194
- 3400 train 6.852911 (lr=7.8328e-05) (hash(x)=49596053)
195
- 3000 val loss 6.8656
196
- 3000 val perplexity 958.6799
197
- 3000 train 6.826707 (lr=1.2459e-04) (hash(x)=48896990)
198
- 3500 val loss 6.8550
199
- 3500 val perplexity 948.5732
200
- 3500 train 6.454469 (lr=7.7082e-05) (hash(x)=41135634)
201
- 3100 val loss 6.9046
202
- 3100 val perplexity 996.8251
203
- 3100 train 6.876366 (lr=1.2287e-04) (hash(x)=47064930)
204
- 3600 val loss 6.8194
205
- 3600 val perplexity 915.4774
206
- 3600 train 6.813796 (lr=7.5814e-05) (hash(x)=54286330)
207
- 3200 val loss 6.8627
208
- 3200 val perplexity 955.9679
209
- 3200 train 6.592710 (lr=1.2112e-04) (hash(x)=45145921)
210
- 3700 val loss 6.8307
211
- 3700 val perplexity 925.8421
212
- 3700 train 6.721279 (lr=7.4525e-05) (hash(x)=57061993)
213
- 3300 val loss 6.8442
214
- 3300 val perplexity 938.4250
215
- 3300 train 6.669895 (lr=1.1932e-04) (hash(x)=48085018)
216
- 3800 val loss 6.8211
217
- 3800 val perplexity 916.9943
218
- 3800 train 6.377489 (lr=7.3215e-05) (hash(x)=46544884)
219
- 3400 val loss 6.8391
220
- 3400 val perplexity 933.6216
221
- 3400 train 6.844838 (lr=1.1749e-04) (hash(x)=49596053)
222
- 3900 val loss 6.7986
223
- 3900 val perplexity 896.6097
224
- 3900 train 6.638959 (lr=7.1887e-05) (hash(x)=48937435)
225
- 3500 val loss 6.8285
226
- 3500 val perplexity 923.8395
227
- 3500 train 6.439719 (lr=1.1562e-04) (hash(x)=41135634)
228
- 4000 val loss 6.7805
229
- 4000 val perplexity 880.4681
230
- 4000 train 6.811349 (lr=7.0541e-05) (hash(x)=54466186)
231
- 3600 val loss 6.8305
232
- 3600 val perplexity 925.6660
233
- 3600 train 6.827404 (lr=1.1372e-04) (hash(x)=54286330)
234
- 4100 val loss 6.7587
235
- 4100 val perplexity 861.4922
236
- 4100 train 6.585770 (lr=6.9180e-05) (hash(x)=51079995)
237
- 3700 val loss 6.8390
238
- 3700 val perplexity 933.5700
239
- 3700 train 6.702039 (lr=1.1179e-04) (hash(x)=57061993)
240
- 4200 val loss 6.7599
241
- 4200 val perplexity 862.5894
242
- 4200 train 6.885074 (lr=6.7804e-05) (hash(x)=56495581)
243
- 3800 val loss 6.8435
244
- 3800 val perplexity 937.7438
245
- 3800 train 6.419742 (lr=1.0982e-04) (hash(x)=46544884)
246
- 4300 val loss 6.7067
247
- 4300 val perplexity 817.8332
248
- 4300 train 6.550481 (lr=6.6414e-05) (hash(x)=45095478)
249
- 3900 val loss 6.8543
250
- 3900 val perplexity 947.9782
251
- 3900 train 6.697006 (lr=1.0783e-04) (hash(x)=48937435)
252
- 4400 val loss 6.6989
253
- 4400 val perplexity 811.4722
254
- 4400 train 6.710745 (lr=6.5013e-05) (hash(x)=49954028)
255
- 4000 val loss 6.8472
256
- 4000 val perplexity 941.2394
257
- 4000 train 6.863346 (lr=1.0581e-04) (hash(x)=54466186)
258
- 4500 val loss 6.6732
259
- 4500 val perplexity 790.9513
260
- 4500 train 6.604428 (lr=6.3602e-05) (hash(x)=48975821)
261
- 4100 val loss 6.8166
262
- 4100 val perplexity 912.8441
263
- 4100 train 6.650379 (lr=1.0377e-04) (hash(x)=51079995)
264
- 4600 val loss 6.6435
265
- 4600 val perplexity 767.7492
266
- 4600 train 6.736774 (lr=6.2182e-05) (hash(x)=49293028)
267
- 4200 val loss 6.8169
268
- 4200 val perplexity 913.1793
269
- 4200 train 6.960119 (lr=1.0171e-04) (hash(x)=56495581)
270
- 4700 val loss 6.6407
271
- 4700 val perplexity 765.6613
272
- 4700 train 6.549062 (lr=6.0754e-05) (hash(x)=48077753)
273
- 4300 val loss 6.7746
274
- 4300 val perplexity 875.3004
275
- 4300 train 6.632710 (lr=9.9622e-05) (hash(x)=45095478)
276
- 4800 val loss 6.6097
277
- 4800 val perplexity 742.2697
278
- 4800 train 6.485708 (lr=5.9321e-05) (hash(x)=45037879)
279
- 4400 val loss 6.7593
280
- 4400 val perplexity 862.0560
281
- 4400 train 6.739607 (lr=9.7520e-05) (hash(x)=49954028)
282
- 4900 val loss 6.5948
283
- 4900 val perplexity 731.2781
284
- 4900 train 6.503368 (lr=5.7883e-05) (hash(x)=50320154)
285
- 4500 val loss 6.7230
286
- 4500 val perplexity 831.2733
287
- 4500 train 6.650680 (lr=9.5403e-05) (hash(x)=48975821)
288
- 5000 val loss 6.5871
289
- 5000 val perplexity 725.6609
290
- 5000 train 6.830953 (lr=5.6442e-05) (hash(x)=55024523)
291
- 4600 val loss 6.7008
292
- 4600 val perplexity 813.0734
293
- 4600 train 6.782051 (lr=9.3273e-05) (hash(x)=49293028)
294
- 5100 val loss 6.5751
295
- 5100 val perplexity 717.0258
296
- 5100 train 6.487334 (lr=5.5000e-05) (hash(x)=47570607)
297
- 4700 val loss 6.7082
298
- 4700 val perplexity 819.0656
299
- 4700 train 6.629680 (lr=9.1132e-05) (hash(x)=48077753)
300
- 5200 val loss 6.5637
301
- 5200 val perplexity 708.9213
302
- 5200 train 6.681831 (lr=5.3558e-05) (hash(x)=54311349)
303
- 4800 val loss 6.7095
304
- 4800 val perplexity 820.1912
305
- 4800 train 6.573892 (lr=8.8982e-05) (hash(x)=45037879)
306
- 5300 val loss 6.5509
307
- 5300 val perplexity 699.8708
308
- 5300 train 6.444979 (lr=5.2117e-05) (hash(x)=47178524)
309
- 4900 val loss 6.6770
310
- 4900 val perplexity 793.9644
311
- 4900 train 6.600446 (lr=8.6825e-05) (hash(x)=50320154)
312
- 5400 val loss 6.5382
313
- 5400 val perplexity 691.0194
314
- 5400 train 6.540273 (lr=5.0679e-05) (hash(x)=49425088)
315
- 5000 val loss 6.6830
316
- 5000 val perplexity 798.7117
317
- 5000 train 6.917428 (lr=8.4663e-05) (hash(x)=55024523)
318
- 5500 val loss 6.5273
319
- 5500 val perplexity 683.5435
320
- 5500 train 6.369995 (lr=4.9246e-05) (hash(x)=46383189)
321
- 5100 val loss 6.6579
322
- 5100 val perplexity 778.9476
323
- 5100 train 6.595668 (lr=8.2500e-05) (hash(x)=47570607)
324
- 5600 val loss 6.5284
325
- 5600 val perplexity 684.2851
326
- 5600 train 6.712617 (lr=4.7818e-05) (hash(x)=54084990)
327
- 5200 val loss 6.6476
328
- 5200 val perplexity 770.9180
329
- 5200 train 6.782622 (lr=8.0337e-05) (hash(x)=54311349)
330
- 5700 val loss 6.5284
331
- 5700 val perplexity 684.2838
332
- 5700 train 6.222934 (lr=4.6398e-05) (hash(x)=47384182)
333
- 5800 val loss 6.5110
334
- 5800 val perplexity 672.5214
335
- 5800 train 6.396578 (lr=4.4987e-05) (hash(x)=51683744)
336
- 5300 val loss 6.6252
337
- 5300 val perplexity 753.8685
338
- 5300 train 6.532175 (lr=7.8175e-05) (hash(x)=47178524)
339
- 5900 val loss 6.5014
340
- 5900 val perplexity 666.0516
341
- 5900 train 6.510395 (lr=4.3586e-05) (hash(x)=52798836)
342
- 5400 val loss 6.6181
343
- 5400 val perplexity 748.5306
344
- 5400 train 6.616325 (lr=7.6018e-05) (hash(x)=49425088)
345
- 6000 val loss 6.5040
346
- 6000 val perplexity 667.8166
347
- 6000 train 6.393989 (lr=4.2196e-05) (hash(x)=50945000)
348
- 6100 val loss 6.5062
349
- 6100 val perplexity 669.2753
350
- 6100 train 6.196708 (lr=4.0820e-05) (hash(x)=48964427)
351
- 5500 val loss 6.6147
352
- 5500 val perplexity 745.9890
353
- 5500 train 6.448332 (lr=7.3868e-05) (hash(x)=46383189)
354
- 6200 val loss 6.5098
355
- 6200 val perplexity 671.7039
356
- 6200 train 6.268740 (lr=3.9459e-05) (hash(x)=47148610)
357
- 5600 val loss 6.6037
358
- 5600 val perplexity 737.8403
359
- 5600 train 6.795812 (lr=7.1727e-05) (hash(x)=54084990)
360
- 6300 val loss 6.5004
361
- 6300 val perplexity 665.4230
362
- 6300 train 6.412795 (lr=3.8113e-05) (hash(x)=50860553)
363
- 6400 val loss 6.4993
364
- 6400 val perplexity 664.6778
365
- 6400 train 6.270996 (lr=3.6785e-05) (hash(x)=50553992)
366
- 5700 val loss 6.5924
367
- 5700 val perplexity 729.5043
368
- 5700 train 6.275771 (lr=6.9597e-05) (hash(x)=47384182)
369
- 6500 val loss 6.4847
370
- 6500 val perplexity 655.0747
371
- 6500 train 6.289717 (lr=3.5475e-05) (hash(x)=52521332)
372
- 5800 val loss 6.5847
373
- 5800 val perplexity 723.9128
374
- 5800 train 6.476488 (lr=6.7480e-05) (hash(x)=51683744)
375
- 6600 val loss 6.4606
376
- 6600 val perplexity 639.4189
377
- 6600 train 6.806929 (lr=3.4186e-05) (hash(x)=52609843)
378
- 6700 val loss 6.4414
379
- 6700 val perplexity 627.2814
380
- 6700 train 6.480598 (lr=3.2918e-05) (hash(x)=53632957)
381
- 5900 val loss 6.5876
382
- 5900 val perplexity 726.0012
383
- 5900 train 6.579369 (lr=6.5378e-05) (hash(x)=52798836)
384
- 6800 val loss 6.4358
385
- 6800 val perplexity 623.7556
386
- 6800 train 6.441747 (lr=3.1672e-05) (hash(x)=46927608)
387
- 6000 val loss 6.5899
388
- 6000 val perplexity 727.6727
389
- 6000 train 6.478214 (lr=6.3294e-05) (hash(x)=50945000)
390
- 6900 val loss 6.4211
391
- 6900 val perplexity 614.6884
392
- 6900 train 6.448431 (lr=3.0451e-05) (hash(x)=54483057)
393
- 7000 val loss 6.4171
394
- 7000 val perplexity 612.2233
395
- 6100 val loss 6.5737
396
- 6100 val perplexity 716.0176
397
- 7000 train 6.564780 (lr=2.9255e-05) (hash(x)=51493884)
398
- 6100 train 6.276618 (lr=6.1230e-05) (hash(x)=48964427)
399
- 7100 val loss 6.4078
400
- 7100 val perplexity 606.5637
401
- 7100 train 6.538711 (lr=2.8085e-05) (hash(x)=53303341)
402
- 6200 val loss 6.5912
403
- 6200 val perplexity 728.6831
404
- 6200 train 6.360708 (lr=5.9188e-05) (hash(x)=47148610)
405
- 7200 val loss 6.4123
406
- 7200 val perplexity 609.2900
407
- 7200 train 6.105514 (lr=2.6943e-05) (hash(x)=45272178)
408
- 6300 val loss 6.5876
409
- 6300 val perplexity 726.0035
410
- 6300 train 6.502324 (lr=5.7169e-05) (hash(x)=50860553)
411
- 7300 val loss 6.4032
412
- 7300 val perplexity 603.7695
413
- 7300 train 6.337297 (lr=2.5830e-05) (hash(x)=50389872)
414
- 7400 val loss 6.3916
415
- 7400 val perplexity 596.8148
416
- 7400 train 6.031796 (lr=2.4746e-05) (hash(x)=43796301)
417
- 6400 val loss 6.5650
418
- 6400 val perplexity 709.8069
419
- 6400 train 6.330476 (lr=5.5177e-05) (hash(x)=50553992)
420
- 7500 val loss 6.3875
421
- 7500 val perplexity 594.3956
422
- 7500 train 6.322572 (lr=2.3694e-05) (hash(x)=47808686)
423
- 6500 val loss 6.5650
424
- 6500 val perplexity 709.7889
425
- 6500 train 6.371252 (lr=5.3213e-05) (hash(x)=52521332)
426
- 7600 val loss 6.3867
427
- 7600 val perplexity 593.9199
428
- 7600 train 6.118526 (lr=2.2674e-05) (hash(x)=41936898)
429
- 7700 val loss 6.3863
430
- 7700 val perplexity 593.6702
431
- 7700 train 6.602107 (lr=2.1687e-05) (hash(x)=57550318)
432
- 6600 val loss 6.5361
433
- 6600 val perplexity 689.5613
434
- 6600 train 6.896532 (lr=5.1279e-05) (hash(x)=52609843)
435
- 7800 val loss 6.3803
436
- 7800 val perplexity 590.1259
437
- 7800 train 6.245681 (lr=2.0735e-05) (hash(x)=47485210)
438
- 6700 val loss 6.5179
439
- 6700 val perplexity 677.1226
440
- 6700 train 6.555040 (lr=4.9377e-05) (hash(x)=53632957)
441
- 7900 val loss 6.3767
442
- 7900 val perplexity 587.9981
443
- 7900 train 6.719403 (lr=1.9818e-05) (hash(x)=53228688)
444
- 8000 val loss 6.3760
445
- 8000 val perplexity 587.5701
446
- 8000 train 6.432549 (lr=1.8936e-05) (hash(x)=52018673)
447
- 6800 val loss 6.5208
448
- 6800 val perplexity 679.0950
449
- 6800 train 6.524570 (lr=4.7509e-05) (hash(x)=46927608)
450
- 8100 val loss 6.3687
451
- 8100 val perplexity 583.2793
452
- 8100 train 6.201787 (lr=1.8092e-05) (hash(x)=47079349)
453
- 6900 val loss 6.5172
454
- 6900 val perplexity 676.6768
455
- 6900 train 6.538620 (lr=4.5676e-05) (hash(x)=54483057)
456
- 8200 val loss 6.3645
457
- 8200 val perplexity 580.8560
458
- 8200 train 6.434308 (lr=1.7286e-05) (hash(x)=57921563)
459
- 8300 val loss 6.3590
460
- 8300 val perplexity 577.6404
461
- 8300 train 5.898799 (lr=1.6519e-05) (hash(x)=45038933)
462
- 7000 val loss 6.5124
463
- 7000 val perplexity 673.4177
464
- 7000 train 6.681723 (lr=4.3882e-05) (hash(x)=51493884)
465
- 8400 val loss 6.3775
466
- 8400 val perplexity 588.4750
467
- 8400 train 6.225904 (lr=1.5791e-05) (hash(x)=47763246)
468
- 7100 val loss 6.4924
469
- 7100 val perplexity 660.0879
470
- 7100 train 6.627834 (lr=4.2128e-05) (hash(x)=53303341)
471
- 8500 val loss 6.3612
472
- 8500 val perplexity 578.9673
473
- 8500 train 6.429732 (lr=1.5103e-05) (hash(x)=56176595)
474
- 8600 val loss 6.3597
475
- 8600 val perplexity 578.0970
476
- 8600 train 6.434188 (lr=1.4456e-05) (hash(x)=55184249)
477
- 7200 val loss 6.5003
478
- 7200 val perplexity 665.3522
479
- 7200 train 6.209903 (lr=4.0414e-05) (hash(x)=45272178)
480
- 8700 val loss 6.3549
481
- 8700 val perplexity 575.2811
482
- 8700 train 6.294187 (lr=1.3851e-05) (hash(x)=46471646)
483
- 8800 val loss 6.3584
484
- 8800 val perplexity 577.3042
485
- 8800 train 6.057069 (lr=1.3289e-05) (hash(x)=46233162)
486
- 7300 val loss 6.4876
487
- 7300 val perplexity 656.9751
488
- 7300 train 6.401985 (lr=3.8745e-05) (hash(x)=50389872)
489
- 8900 val loss 6.3599
490
- 8900 val perplexity 578.1836
491
- 8900 train 6.283579 (lr=1.2769e-05) (hash(x)=47233684)
492
- 7400 val loss 6.4828
493
- 7400 val perplexity 653.8246
494
- 7400 train 6.129344 (lr=3.7120e-05) (hash(x)=43796301)
495
- 9000 val loss 6.3502
496
- 9000 val perplexity 572.6089
497
- 9000 train 6.235326 (lr=1.2292e-05) (hash(x)=48374529)
498
- 9100 val loss 6.3393
499
- 9100 val perplexity 566.3860
500
- 9100 train 6.325945 (lr=1.1860e-05) (hash(x)=48065371)
501
- 7500 val loss 6.4820
502
- 7500 val perplexity 653.2842
503
- 7500 train 6.422829 (lr=3.5541e-05) (hash(x)=47808686)
504
- 9200 val loss 6.3315
505
- 9200 val perplexity 561.9741
506
- 9200 train 6.326896 (lr=1.1472e-05) (hash(x)=47408078)
507
- 7600 val loss 6.4791
508
- 7600 val perplexity 651.3569
509
- 7600 train 6.215595 (lr=3.4011e-05) (hash(x)=41936898)
510
- 9300 val loss 6.3276
511
- 9300 val perplexity 559.8051
512
- 9300 train 6.309064 (lr=1.1128e-05) (hash(x)=50749781)
513
- 9400 val loss 6.3237
514
- 9400 val perplexity 557.6545
515
- 9400 train 6.525542 (lr=1.0830e-05) (hash(x)=48560169)
516
- 7700 val loss 6.4761
517
- 7700 val perplexity 649.4601
518
- 7700 train 6.682161 (lr=3.2531e-05) (hash(x)=57550318)
519
- 9500 val loss 6.3229
520
- 9500 val perplexity 557.1821
521
- 9500 train 6.435276 (lr=1.0577e-05) (hash(x)=50936392)
522
- 7800 val loss 6.4727
523
- 7800 val perplexity 647.2555
524
- 7800 train 6.342023 (lr=3.1102e-05) (hash(x)=47485210)
525
- 9600 val loss 6.3189
526
- 9600 val perplexity 554.9551
527
- 9600 train 6.341009 (lr=1.0369e-05) (hash(x)=50651714)
528
- 9700 val loss 6.3178
529
- 9700 val perplexity 554.3513
530
- 9700 train 6.391021 (lr=1.0208e-05) (hash(x)=47311384)
531
- 7900 val loss 6.4673
532
- 7900 val perplexity 643.7279
533
- 7900 train 6.802589 (lr=2.9726e-05) (hash(x)=53228688)
534
- 8000 val loss 6.4706
535
- 8000 val perplexity 645.8663
536
- 8000 train 6.519435 (lr=2.8405e-05) (hash(x)=52018673)
537
- 9800 val loss 6.3160
538
- 9800 val perplexity 553.3610
539
- 9800 train 6.418117 (lr=1.0092e-05) (hash(x)=50921139)
540
- 8100 val loss 6.4637
541
- 8100 val perplexity 641.4133
542
- 8100 train 6.298305 (lr=2.7138e-05) (hash(x)=47079349)
543
- 9900 val loss 6.3219
544
- 9900 val perplexity 556.6038
545
- 9900 train 6.467185 (lr=1.0023e-05) (hash(x)=48142455)
546
- 8200 val loss 6.4578
547
- 8200 val perplexity 637.6469
548
- 8200 train 6.539501 (lr=2.5929e-05) (hash(x)=57921563)
549
- 9999 val loss 6.3137
550
- 9999 val perplexity 552.0605
551
- 8300 val loss 6.4564
552
- 8300 val perplexity 636.7405
553
- 8300 train 6.004556 (lr=2.4778e-05) (hash(x)=45038933)
554
- 8400 val loss 6.4678
555
- 8400 val perplexity 644.0859
556
- 8400 train 6.313585 (lr=2.3686e-05) (hash(x)=47763246)
557
- 8500 val loss 6.4619
558
- 8500 val perplexity 640.2839
559
- 8500 train 6.524305 (lr=2.2655e-05) (hash(x)=56176595)
560
- 8600 val loss 6.4634
561
- 8600 val perplexity 641.2393
562
- 8600 train 6.531406 (lr=2.1685e-05) (hash(x)=55184249)
563
- 8700 val loss 6.4605
564
- 8700 val perplexity 639.3777
565
- 8700 train 6.405812 (lr=2.0777e-05) (hash(x)=46471646)
566
- 8800 val loss 6.4620
567
- 8800 val perplexity 640.3541
568
- 8800 train 6.169169 (lr=1.9933e-05) (hash(x)=46233162)
569
- 8900 val loss 6.4575
570
- 8900 val perplexity 637.4733
571
- 8900 train 6.387800 (lr=1.9153e-05) (hash(x)=47233684)
572
- 9000 val loss 6.4577
573
- 9000 val perplexity 637.6104
574
- 9000 train 6.348251 (lr=1.8439e-05) (hash(x)=48374529)
575
- 9100 val loss 6.4384
576
- 9100 val perplexity 625.4025
577
- 9100 train 6.387834 (lr=1.7790e-05) (hash(x)=48065371)
578
- 9200 val loss 6.4323
579
- 9200 val perplexity 621.5872
580
- 9200 train 6.400970 (lr=1.7208e-05) (hash(x)=47408078)
581
- 9300 val loss 6.4267
582
- 9300 val perplexity 618.1290
583
- 9300 train 6.410742 (lr=1.6692e-05) (hash(x)=50749781)
584
- 9400 val loss 6.4225
585
- 9400 val perplexity 615.5146
586
- 9400 train 6.621217 (lr=1.6245e-05) (hash(x)=48560169)
587
- 9500 val loss 6.4212
588
- 9500 val perplexity 614.7256
589
- 9500 train 6.548310 (lr=1.5865e-05) (hash(x)=50936392)
590
- 9600 val loss 6.4178
591
- 9600 val perplexity 612.6835
592
- 9600 train 6.430169 (lr=1.5554e-05) (hash(x)=50651714)
593
- 9700 val loss 6.4189
594
- 9700 val perplexity 613.3140
595
- 9700 train 6.469123 (lr=1.5312e-05) (hash(x)=47311384)
596
- 9800 val loss 6.4171
597
- 9800 val perplexity 612.1956
598
- 9800 train 6.518755 (lr=1.5139e-05) (hash(x)=50921139)
599
- 9900 val loss 6.4295
600
- 9900 val perplexity 619.8542
601
- 9900 train 6.563539 (lr=1.5035e-05) (hash(x)=48142455)
602
- 9999 val loss 6.4144
603
- 9999 val perplexity 610.5499
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ max_steps: 50000
2
+ 38300 val loss 5.6784
3
+ 38300 val perplexity 292.4702
4
+ 38300 train 5.395936 (lr=1.0855e-05) (hash(x)=49878964)
 
5
  0 val loss 11.2766
6
  0 val perplexity 78951.9062
7
+ 38400 val loss 5.6700
8
+ 38400 val perplexity 290.0423
9
+ 38400 train 5.604312 (lr=1.0760e-05) (hash(x)=52855313)
10
+ 38500 val loss 5.6699
11
+ 38500 val perplexity 289.9914
12
+ 38500 train 5.250736 (lr=1.0666e-05) (hash(x)=47213252)
13
+ 0 train 11.297240 (lr=5.0000e-07) (hash(x)=55241167)
14
+ 38600 val loss 5.6694
15
+ 38600 val perplexity 289.8528
16
+ 38600 train 5.830087 (lr=1.0572e-05) (hash(x)=50001498)
17
+ 100 val loss 9.4564
18
+ 100 val perplexity 12789.4766
19
+ 100 train 9.441436 (lr=5.0500e-05) (hash(x)=46387919)
20
+ 38700 val loss 5.6726
21
+ 38700 val perplexity 290.7901
22
+ 38700 train 5.401573 (lr=1.0479e-05) (hash(x)=49541736)
23
+ 200 val loss 7.8121
24
+ 200 val perplexity 2470.3337
25
+ 200 train 7.900826 (lr=1.0000e-04) (hash(x)=51276369)
26
+ 38800 val loss 5.6685
27
+ 38800 val perplexity 289.6135
28
+ 38800 train 5.601834 (lr=1.0386e-05) (hash(x)=49712820)
29
+ 300 val loss 7.6376
30
+ 300 val perplexity 2074.7930
31
+ 300 train 7.534567 (lr=9.9999e-05) (hash(x)=48572728)
32
+ 38900 val loss 5.6613
33
+ 38900 val perplexity 287.5113
34
+ 38900 train 5.612003 (lr=1.0294e-05) (hash(x)=50913051)
35
+ 400 val loss 7.5966
36
+ 400 val perplexity 1991.4926
37
+ 400 train 7.331833 (lr=9.9996e-05) (hash(x)=44943400)
38
+ 39000 val loss 5.6624
39
+ 39000 val perplexity 287.8408
40
+ 39000 train 5.621553 (lr=1.0203e-05) (hash(x)=46110703)
41
+ 500 val loss 7.5399
42
+ 500 val perplexity 1881.5571
43
+ 500 train 7.449239 (lr=9.9992e-05) (hash(x)=48434590)
44
+ 39100 val loss 5.6597
45
+ 39100 val perplexity 287.0684
46
+ 39100 train 5.493328 (lr=1.0113e-05) (hash(x)=43887045)
47
+ 600 val loss 7.4966
48
+ 600 val perplexity 1801.9823
49
+ 600 train 7.344361 (lr=9.9986e-05) (hash(x)=47442266)
50
+ 39200 val loss 5.6595
51
+ 39200 val perplexity 287.0032
52
+ 39200 train 5.897973 (lr=1.0023e-05) (hash(x)=53102477)
53
+ 700 val loss 7.4837
54
+ 700 val perplexity 1778.8489
55
+ 700 train 7.543903 (lr=9.9978e-05) (hash(x)=56495712)
56
+ 39300 val loss 5.6569
57
+ 39300 val perplexity 286.2463
58
+ 39300 train 5.512180 (lr=9.9341e-06) (hash(x)=44607212)
59
+ 800 val loss 7.4533
60
+ 800 val perplexity 1725.5752
61
+ 800 train 7.440802 (lr=9.9968e-05) (hash(x)=50093738)
62
+ 39400 val loss 5.6587
63
+ 39400 val perplexity 286.7726
64
+ 39400 train 5.844805 (lr=9.8458e-06) (hash(x)=55054109)
65
+ 900 val loss 7.4095
66
+ 900 val perplexity 1651.6809
67
+ 900 train 7.374321 (lr=9.9956e-05) (hash(x)=49436061)
68
+ 39500 val loss 5.6560
69
+ 39500 val perplexity 286.0097
70
+ 39500 train 5.519652 (lr=9.7581e-06) (hash(x)=47529853)
71
+ 1000 val loss 7.3944
72
+ 1000 val perplexity 1626.8689
73
+ 1000 train 7.331343 (lr=9.9943e-05) (hash(x)=48818282)
74
+ 39600 val loss 5.6537
75
+ 39600 val perplexity 285.3348
76
+ 39600 train 5.425322 (lr=9.6712e-06) (hash(x)=50783698)
77
+ 1100 val loss 7.3509
78
+ 1100 val perplexity 1557.6047
79
+ 1100 train 7.407344 (lr=9.9927e-05) (hash(x)=55387425)
80
+ 39700 val loss 5.6570
81
+ 39700 val perplexity 286.2832
82
+ 39700 train 5.444355 (lr=9.5849e-06) (hash(x)=47104160)
83
+ 1200 val loss 7.5253
84
+ 1200 val perplexity 1854.3712
85
+ 1200 train 7.421935 (lr=9.9910e-05) (hash(x)=48328378)
86
+ 39800 val loss 5.6608
87
+ 39800 val perplexity 287.3673
88
+ 39800 train 5.592721 (lr=9.4994e-06) (hash(x)=50941478)
89
+ 1300 val loss 7.3479
90
+ 1300 val perplexity 1553.0095
91
+ 1300 train 7.264929 (lr=9.9892e-05) (hash(x)=48956717)
92
+ 39900 val loss 5.6565
93
+ 39900 val perplexity 286.1535
94
+ 39900 train 5.388857 (lr=9.4146e-06) (hash(x)=47374830)
95
+ 1400 val loss 7.3611
96
+ 1400 val perplexity 1573.6216
97
+ 1400 train 7.274777 (lr=9.9871e-05) (hash(x)=49782571)
98
+ 40000 val loss 5.6581
99
+ 40000 val perplexity 286.6104
100
+ 40000 train 5.779648 (lr=9.3305e-06) (hash(x)=56749414)
101
+ 1500 val loss 7.3527
102
+ 1500 val perplexity 1560.3909
103
+ 1500 train 7.145610 (lr=9.9849e-05) (hash(x)=51081994)
104
+ 40100 val loss 5.6620
105
+ 40100 val perplexity 287.7258
106
+ 40100 train 5.607458 (lr=9.2472e-06) (hash(x)=49256359)
107
+ 1600 val loss 7.3263
108
+ 1600 val perplexity 1519.7659
109
+ 1600 train 7.390820 (lr=9.9825e-05) (hash(x)=48791204)
110
+ 40200 val loss 5.6555
111
+ 40200 val perplexity 285.8730
112
+ 40200 train 5.576644 (lr=9.1646e-06) (hash(x)=47932192)
113
+ 1700 val loss 7.2937
114
+ 1700 val perplexity 1471.0563
115
+ 1700 train 7.009460 (lr=9.9799e-05) (hash(x)=46425011)
116
+ 40300 val loss 5.6568
117
+ 40300 val perplexity 286.2384
118
+ 40300 train 5.476855 (lr=9.0827e-06) (hash(x)=42473499)
119
+ 1800 val loss 7.3241
120
+ 1800 val perplexity 1516.4382
121
+ 1800 train 6.985591 (lr=9.9771e-05) (hash(x)=44919141)
122
+ 40400 val loss 5.6550
123
+ 40400 val perplexity 285.7186
124
+ 40400 train 5.456766 (lr=9.0015e-06) (hash(x)=50469946)
125
+ 1900 val loss 7.2486
126
+ 1900 val perplexity 1406.1027
127
+ 1900 train 7.351365 (lr=9.9741e-05) (hash(x)=48299675)
128
+ 40500 val loss 5.6501
129
+ 40500 val perplexity 284.3304
130
+ 40500 train 5.743055 (lr=8.9211e-06) (hash(x)=48818656)
131
+ 2000 val loss 7.2173
132
+ 2000 val perplexity 1362.7544
133
+ 2000 train 8.040308 (lr=9.9710e-05) (hash(x)=61804797)
134
+ 40600 val loss 5.6457
135
+ 40600 val perplexity 283.0702
136
+ 40600 train 5.488149 (lr=8.8414e-06) (hash(x)=45215748)
137
+ 2100 val loss 7.2014
138
+ 2100 val perplexity 1341.2904
139
+ 2100 train 7.292386 (lr=9.9677e-05) (hash(x)=53638030)
140
+ 40700 val loss 5.6458
141
+ 40700 val perplexity 283.0904
142
+ 40700 train 5.773078 (lr=8.7624e-06) (hash(x)=48057577)
143
+ 2200 val loss 7.1685
144
+ 2200 val perplexity 1297.8553
145
+ 2200 train 7.388167 (lr=9.9642e-05) (hash(x)=56364593)
146
+ 40800 val loss 5.6444
147
+ 40800 val perplexity 282.7088
148
+ 40800 train 5.605831 (lr=8.6842e-06) (hash(x)=50103136)
149
+ 2300 val loss 7.1717
150
+ 2300 val perplexity 1302.0543
151
+ 2300 train 7.251419 (lr=9.9606e-05) (hash(x)=52945580)
152
+ 40900 val loss 5.6433
153
+ 40900 val perplexity 282.3985
154
+ 40900 train 5.499018 (lr=8.6068e-06) (hash(x)=45764202)
155
+ 2400 val loss 7.1734
156
+ 2400 val perplexity 1304.2727
157
+ 2400 train 7.251097 (lr=9.9567e-05) (hash(x)=54595211)
158
+ 41000 val loss 5.6416
159
+ 41000 val perplexity 281.9000
160
+ 41000 train 5.758846 (lr=8.5301e-06) (hash(x)=50724130)
161
+ 2500 val loss 7.1699
162
+ 2500 val perplexity 1299.7263
163
+ 2500 train 7.189177 (lr=9.9527e-05) (hash(x)=52323363)
164
+ 41100 val loss 5.6407
165
+ 41100 val perplexity 281.6723
166
+ 41100 train 5.572755 (lr=8.4541e-06) (hash(x)=48739125)
167
+ 2600 val loss 7.1373
168
+ 2600 val perplexity 1257.9912
169
+ 2600 train 6.959945 (lr=9.9485e-05) (hash(x)=42652707)
170
+ 41200 val loss 5.6379
171
+ 41200 val perplexity 280.8844
172
+ 41200 train 5.594127 (lr=8.3789e-06) (hash(x)=47473129)
173
+ 2700 val loss 7.1502
174
+ 2700 val perplexity 1274.4105
175
+ 2700 train 6.867459 (lr=9.9442e-05) (hash(x)=45687855)
176
+ 41300 val loss 5.6379
177
+ 41300 val perplexity 280.8649
178
+ 41300 train 5.563573 (lr=8.3045e-06) (hash(x)=51032823)
179
+ 2800 val loss 7.1220
180
+ 2800 val perplexity 1238.9615
181
+ 2800 train 7.126947 (lr=9.9396e-05) (hash(x)=49962830)
182
+ 41400 val loss 5.6520
183
+ 41400 val perplexity 284.8671
184
+ 41400 train 5.698008 (lr=8.2308e-06) (hash(x)=44411781)
185
+ 2900 val loss 7.1669
186
+ 2900 val perplexity 1295.7585
187
+ 2900 train 7.199690 (lr=9.9349e-05) (hash(x)=52958451)
188
+ 41500 val loss 5.6380
189
+ 41500 val perplexity 280.8879
190
+ 41500 train 5.683953 (lr=8.1579e-06) (hash(x)=48630357)
191
+ 3000 val loss 7.1216
192
+ 3000 val perplexity 1238.4033
193
+ 3000 train 7.069978 (lr=9.9300e-05) (hash(x)=48896990)
194
+ 41600 val loss 5.6353
195
+ 41600 val perplexity 280.1527
196
+ 41600 train 5.446326 (lr=8.0858e-06) (hash(x)=47836869)
197
+ 3100 val loss 7.1046
198
+ 3100 val perplexity 1217.5770
199
+ 3100 train 7.054947 (lr=9.9249e-05) (hash(x)=47064930)
200
+ 41700 val loss 5.6361
201
+ 41700 val perplexity 280.3648
202
+ 41700 train 5.515561 (lr=8.0144e-06) (hash(x)=49143658)
203
+ 3200 val loss 7.0866
204
+ 3200 val perplexity 1195.8274
205
+ 3200 train 6.860336 (lr=9.9197e-05) (hash(x)=45145921)
206
+ 41800 val loss 5.6389
207
+ 41800 val perplexity 281.1574
208
+ 41800 train 5.532392 (lr=7.9438e-06) (hash(x)=50426446)
209
+ 3300 val loss 7.0858
210
+ 3300 val perplexity 1194.9000
211
+ 3300 train 6.935767 (lr=9.9142e-05) (hash(x)=48085018)
212
+ 41900 val loss 5.6362
213
+ 41900 val perplexity 280.3896
214
+ 41900 train 5.521365 (lr=7.8740e-06) (hash(x)=48134303)
215
+ 3400 val loss 7.0704
216
+ 3400 val perplexity 1176.6060
217
+ 3400 train 7.080620 (lr=9.9086e-05) (hash(x)=49596053)
218
+ 42000 val loss 5.6366
219
+ 42000 val perplexity 280.5153
220
+ 42000 train 5.442455 (lr=7.8050e-06) (hash(x)=48151525)
221
+ 3500 val loss 7.0368
222
+ 3500 val perplexity 1137.7137
223
+ 3500 train 6.621675 (lr=9.9028e-05) (hash(x)=41135634)
224
+ 42100 val loss 5.6404
225
+ 42100 val perplexity 281.5832
226
+ 42100 train 5.503111 (lr=7.7368e-06) (hash(x)=49984437)
227
+ 3600 val loss 7.0387
228
+ 3600 val perplexity 1139.8794
229
+ 3600 train 7.045468 (lr=9.8969e-05) (hash(x)=54286330)
230
+ 42200 val loss 5.6393
231
+ 42200 val perplexity 281.2754
232
+ 42200 train 5.540201 (lr=7.6693e-06) (hash(x)=49103965)
233
+ 3700 val loss 7.0348
234
+ 3700 val perplexity 1135.4819
235
+ 3700 train 6.938444 (lr=9.8908e-05) (hash(x)=57061993)
236
+ 42300 val loss 5.6361
237
+ 42300 val perplexity 280.3770
238
+ 42300 train 5.343730 (lr=7.6027e-06) (hash(x)=52134795)
239
+ 3800 val loss 7.0378
240
+ 3800 val perplexity 1138.8237
241
+ 3800 train 6.616735 (lr=9.8845e-05) (hash(x)=46544884)
242
+ 42400 val loss 5.6382
243
+ 42400 val perplexity 280.9585
244
+ 42400 train 5.463866 (lr=7.5368e-06) (hash(x)=47289074)
245
+ 3900 val loss 7.0345
246
+ 3900 val perplexity 1135.1008
247
+ 3900 train 6.881027 (lr=9.8780e-05) (hash(x)=48937435)
248
+ 42500 val loss 5.6371
249
+ 42500 val perplexity 280.6571
250
+ 42500 train 6.041627 (lr=7.4717e-06) (hash(x)=54213272)
251
+ 4000 val loss 7.0398
252
+ 4000 val perplexity 1141.1144
253
+ 4000 train 6.977884 (lr=9.8713e-05) (hash(x)=54466186)
254
+ 42600 val loss 5.6364
255
+ 42600 val perplexity 280.4386
256
+ 42600 train 5.344518 (lr=7.4074e-06) (hash(x)=49347326)
257
+ 4100 val loss 7.0053
258
+ 4100 val perplexity 1102.4150
259
+ 4100 train 6.836554 (lr=9.8645e-05) (hash(x)=51079995)
260
+ 42700 val loss 5.6367
261
+ 42700 val perplexity 280.5423
262
+ 42700 train 5.406415 (lr=7.3440e-06) (hash(x)=45762383)
263
+ 4200 val loss 6.9973
264
+ 4200 val perplexity 1093.6879
265
+ 4200 train 7.157095 (lr=9.8575e-05) (hash(x)=56495581)
266
+ 42800 val loss 5.6380
267
+ 42800 val perplexity 280.8899
268
+ 42800 train 5.719070 (lr=7.2813e-06) (hash(x)=50377506)
269
+ 4300 val loss 6.9613
270
+ 4300 val perplexity 1055.0035
271
+ 4300 train 6.850510 (lr=9.8503e-05) (hash(x)=45095478)
272
+ 42900 val loss 5.6290
273
+ 42900 val perplexity 278.3965
274
+ 42900 train 5.998972 (lr=7.2194e-06) (hash(x)=57332431)
275
+ 4400 val loss 6.9297
276
+ 4400 val perplexity 1022.1531
277
+ 4400 train 6.888327 (lr=9.8430e-05) (hash(x)=49954028)
278
+ 43000 val loss 5.6294
279
+ 43000 val perplexity 278.5089
280
+ 43000 train 5.674274 (lr=7.1583e-06) (hash(x)=50599951)
281
+ 4500 val loss 6.9208
282
+ 4500 val perplexity 1013.0889
283
+ 4500 train 6.849075 (lr=9.8355e-05) (hash(x)=48975821)
284
+ 43100 val loss 5.6275
285
+ 43100 val perplexity 277.9758
286
+ 43100 train 5.593292 (lr=7.0981e-06) (hash(x)=51139362)
287
+ 4600 val loss 6.8843
288
+ 4600 val perplexity 976.8253
289
+ 4600 train 6.973327 (lr=9.8278e-05) (hash(x)=49293028)
290
+ 43200 val loss 5.6260
291
+ 43200 val perplexity 277.5468
292
+ 43200 train 5.478598 (lr=7.0386e-06) (hash(x)=45759314)
293
+ 4700 val loss 6.8725
294
+ 4700 val perplexity 965.3833
295
+ 4700 train 6.789083 (lr=9.8199e-05) (hash(x)=48077753)
296
+ 43300 val loss 5.6245
297
+ 43300 val perplexity 277.1455
298
+ 43300 train 5.677057 (lr=6.9800e-06) (hash(x)=48251106)
299
+ 4800 val loss 6.8344
300
+ 4800 val perplexity 929.3071
301
+ 4800 train 6.655324 (lr=9.8119e-05) (hash(x)=45037879)
302
+ 43400 val loss 5.6273
303
+ 43400 val perplexity 277.9023
304
+ 43400 train 5.983092 (lr=6.9222e-06) (hash(x)=49941825)
305
+ 4900 val loss 6.7952
306
+ 4900 val perplexity 893.5521
307
+ 4900 train 6.717526 (lr=9.8036e-05) (hash(x)=50320154)
308
+ 5000 val loss 6.8113
309
+ 5000 val perplexity 908.0694
310
+ 43500 val loss 5.6248
311
+ 43500 val perplexity 277.2100
312
+ 43500 train 6.025346 (lr=6.8652e-06) (hash(x)=47282597)
313
+ 5000 train 7.030609 (lr=9.7953e-05) (hash(x)=55024523)
314
+ 5100 val loss 6.7562
315
+ 5100 val perplexity 859.3580
316
+ 5100 train 6.689296 (lr=9.7867e-05) (hash(x)=47570607)
317
+ 43600 val loss 5.6236
318
+ 43600 val perplexity 276.8875
319
+ 43600 train 5.733875 (lr=6.8090e-06) (hash(x)=47311813)
320
+ 5200 val loss 6.7385
321
+ 5200 val perplexity 844.3349
322
+ 5200 train 6.866223 (lr=9.7780e-05) (hash(x)=54311349)
323
+ 43700 val loss 5.6253
324
+ 43700 val perplexity 277.3623
325
+ 43700 train 5.692357 (lr=6.7537e-06) (hash(x)=51739445)
326
+ 5300 val loss 6.7112
327
+ 5300 val perplexity 821.5640
328
+ 5300 train 6.617697 (lr=9.7691e-05) (hash(x)=47178524)
329
+ 43800 val loss 5.6239
330
+ 43800 val perplexity 276.9691
331
+ 43800 train 5.663078 (lr=6.6992e-06) (hash(x)=58597156)
332
+ 5400 val loss 6.6875
333
+ 5400 val perplexity 802.3312
334
+ 5400 train 6.683461 (lr=9.7600e-05) (hash(x)=49425088)
335
+ 43900 val loss 5.6227
336
+ 43900 val perplexity 276.6289
337
+ 43900 train 5.352713 (lr=6.6455e-06) (hash(x)=45351304)
338
+ 5500 val loss 6.6716
339
+ 5500 val perplexity 789.6293
340
+ 5500 train 6.500800 (lr=9.7508e-05) (hash(x)=46383189)
341
+ 44000 val loss 5.6236
342
+ 44000 val perplexity 276.8810
343
+ 44000 train 5.404914 (lr=6.5926e-06) (hash(x)=46603503)
344
+ 5600 val loss 6.6700
345
+ 5600 val perplexity 788.3792
346
+ 5600 train 6.856142 (lr=9.7414e-05) (hash(x)=54084990)
347
+ 44100 val loss 5.6214
348
+ 44100 val perplexity 276.2868
349
+ 44100 train 5.590413 (lr=6.5406e-06) (hash(x)=52189096)
350
+ 5700 val loss 6.6305
351
+ 5700 val perplexity 757.8840
352
+ 5700 train 6.303418 (lr=9.7318e-05) (hash(x)=47384182)
353
+ 44200 val loss 5.6225
354
+ 44200 val perplexity 276.5793
355
+ 44200 train 5.789032 (lr=6.4894e-06) (hash(x)=51527792)
356
+ 5800 val loss 6.6221
357
+ 5800 val perplexity 751.5416
358
+ 5800 train 6.522151 (lr=9.7221e-05) (hash(x)=51683744)
359
+ 44300 val loss 5.6208
360
+ 44300 val perplexity 276.1172
361
+ 44300 train 5.475013 (lr=6.4390e-06) (hash(x)=47982628)
362
+ 5900 val loss 6.6169
363
+ 5900 val perplexity 747.5950
364
+ 5900 train 6.624623 (lr=9.7122e-05) (hash(x)=52798836)
365
+ 44400 val loss 5.6230
366
+ 44400 val perplexity 276.7267
367
+ 44400 train 5.611701 (lr=6.3895e-06) (hash(x)=54902968)
368
+ 6000 val loss 6.6217
369
+ 6000 val perplexity 751.2009
370
+ 6000 train 6.511095 (lr=9.7021e-05) (hash(x)=50945000)
371
+ 44500 val loss 5.6205
372
+ 44500 val perplexity 276.0341
373
+ 44500 train 5.537436 (lr=6.3408e-06) (hash(x)=49008493)
374
+ 6100 val loss 6.6186
375
+ 6100 val perplexity 748.9037
376
+ 6100 train 6.304089 (lr=9.6919e-05) (hash(x)=48964427)
377
+ 44600 val loss 5.6222
378
+ 44600 val perplexity 276.4967
379
+ 44600 train 5.353870 (lr=6.2929e-06) (hash(x)=43643824)
380
+ 6200 val loss 6.6284
381
+ 6200 val perplexity 756.2642
382
+ 6200 train 6.390724 (lr=9.6815e-05) (hash(x)=47148610)
383
+ 44700 val loss 5.6218
384
+ 44700 val perplexity 276.3996
385
+ 44700 train 5.470081 (lr=6.2459e-06) (hash(x)=50465872)
386
+ 6300 val loss 6.5986
387
+ 6300 val perplexity 734.0524
388
+ 6300 train 6.511916 (lr=9.6709e-05) (hash(x)=50860553)
389
+ 44800 val loss 5.6218
390
+ 44800 val perplexity 276.3798
391
+ 44800 train 5.378158 (lr=6.1998e-06) (hash(x)=51962160)
392
+ 6400 val loss 6.5951
393
+ 6400 val perplexity 731.4692
394
+ 6400 train 6.364479 (lr=9.6602e-05) (hash(x)=50553992)
395
+ 44900 val loss 5.6235
396
+ 44900 val perplexity 276.8581
397
+ 44900 train 5.763238 (lr=6.1545e-06) (hash(x)=57370039)
398
+ 6500 val loss 6.5914
399
+ 6500 val perplexity 728.8076
400
+ 6500 train 6.389678 (lr=9.6493e-05) (hash(x)=52521332)
401
+ 45000 val loss 5.6243
402
+ 45000 val perplexity 277.0884
403
+ 45000 train 5.469142 (lr=6.1100e-06) (hash(x)=52648130)
404
+ 6600 val loss 6.5748
405
+ 6600 val perplexity 716.8139
406
+ 6600 train 6.929626 (lr=9.6382e-05) (hash(x)=52609843)
407
+ 45100 val loss 5.6554
408
+ 45100 val perplexity 285.8250
409
+ 45100 train 5.327322 (lr=6.0664e-06) (hash(x)=45155459)
410
+ 6700 val loss 6.5304
411
+ 6700 val perplexity 685.6540
412
+ 6700 train 6.558313 (lr=9.6270e-05) (hash(x)=53632957)
413
+ 45200 val loss 5.6170
414
+ 45200 val perplexity 275.0746
415
+ 45200 train 5.611628 (lr=6.0237e-06) (hash(x)=52304992)
416
+ 6800 val loss 6.5309
417
+ 6800 val perplexity 685.9938
418
+ 6800 train 6.539653 (lr=9.6156e-05) (hash(x)=46927608)
419
+ 45300 val loss 5.6149
420
+ 45300 val perplexity 274.4989
421
+ 45300 train 5.468201 (lr=5.9818e-06) (hash(x)=46456407)
422
+ 6900 val loss 6.5383
423
+ 6900 val perplexity 691.1232
424
+ 6900 train 6.552355 (lr=9.6040e-05) (hash(x)=54483057)
425
+ 45400 val loss 5.6139
426
+ 45400 val perplexity 274.2122
427
+ 45400 train 5.607281 (lr=5.9407e-06) (hash(x)=48325493)
428
+ 7000 val loss 6.5042
429
+ 7000 val perplexity 667.9302
430
+ 7000 train 6.676917 (lr=9.5923e-05) (hash(x)=51493884)
431
+ 45500 val loss 5.6116
432
+ 45500 val perplexity 273.5927
433
+ 45500 train 5.583129 (lr=5.9005e-06) (hash(x)=48869064)
434
+ 7100 val loss 6.4774
435
+ 7100 val perplexity 650.3052
436
+ 7100 train 6.625372 (lr=9.5804e-05) (hash(x)=53303341)
437
+ 7200 val loss 6.4821
438
+ 7200 val perplexity 653.3736
439
+ 7200 train 6.203289 (lr=9.5683e-05) (hash(x)=45272178)
440
+ 45600 val loss 5.6123
441
+ 45600 val perplexity 273.7640
442
+ 45600 train 5.690610 (lr=5.8612e-06) (hash(x)=46077805)
443
+ 7300 val loss 6.4693
444
+ 7300 val perplexity 645.0018
445
+ 7300 train 6.416029 (lr=9.5561e-05) (hash(x)=50389872)
446
+ 45700 val loss 5.6112
447
+ 45700 val perplexity 273.4644
448
+ 45700 train 5.849724 (lr=5.8227e-06) (hash(x)=56831165)
449
+ 7400 val loss 6.4705
450
+ 7400 val perplexity 645.8201
451
+ 7400 train 6.127152 (lr=9.5437e-05) (hash(x)=43796301)
452
+ 45800 val loss 5.6116
453
+ 45800 val perplexity 273.5795
454
+ 45800 train 5.459206 (lr=5.7851e-06) (hash(x)=50124921)
455
+ 7500 val loss 6.4612
456
+ 7500 val perplexity 639.8350
457
+ 7500 train 6.404037 (lr=9.5312e-05) (hash(x)=47808686)
458
+ 45900 val loss 5.6117
459
+ 45900 val perplexity 273.6022
460
+ 45900 train 5.222868 (lr=5.7484e-06) (hash(x)=43227400)
461
+ 7600 val loss 6.4687
462
+ 7600 val perplexity 644.6377
463
+ 7600 train 6.201755 (lr=9.5185e-05) (hash(x)=41936898)
464
+ 46000 val loss 5.6104
465
+ 46000 val perplexity 273.2413
466
+ 46000 train 5.602647 (lr=5.7125e-06) (hash(x)=58308887)
467
+ 7700 val loss 6.5138
468
+ 7700 val perplexity 674.3773
469
+ 7700 train 6.732550 (lr=9.5057e-05) (hash(x)=57550318)
470
+ 46100 val loss 5.6094
471
+ 46100 val perplexity 272.9798
472
+ 46100 train 5.677024 (lr=5.6775e-06) (hash(x)=48571632)
473
+ 7800 val loss 6.4561
474
+ 7800 val perplexity 636.5423
475
+ 7800 train 6.328799 (lr=9.4926e-05) (hash(x)=47485210)
476
+ 46200 val loss 5.6103
477
+ 46200 val perplexity 273.2153
478
+ 46200 train 5.874604 (lr=5.6434e-06) (hash(x)=46368862)
479
+ 7900 val loss 6.4544
480
+ 7900 val perplexity 635.5160
481
+ 7900 train 6.812044 (lr=9.4795e-05) (hash(x)=53228688)
482
+ 46300 val loss 5.6123
483
+ 46300 val perplexity 273.7760
484
+ 46300 train 5.376794 (lr=5.6101e-06) (hash(x)=52820317)
485
+ 8000 val loss 6.4555
486
+ 8000 val perplexity 636.1951
487
+ 8000 train 6.513896 (lr=9.4661e-05) (hash(x)=52018673)
488
+ 46400 val loss 5.6107
489
+ 46400 val perplexity 273.3253
490
+ 46400 train 5.410422 (lr=5.5777e-06) (hash(x)=51871487)
491
+ 8100 val loss 6.4554
492
+ 8100 val perplexity 636.1124
493
+ 8100 train 6.303450 (lr=9.4526e-05) (hash(x)=47079349)
494
+ 46500 val loss 5.6121
495
+ 46500 val perplexity 273.7071
496
+ 46500 train 5.565622 (lr=5.5462e-06) (hash(x)=47867973)
497
+ 8200 val loss 6.4537
498
+ 8200 val perplexity 635.0416
499
+ 8200 train 6.513351 (lr=9.4390e-05) (hash(x)=57921563)
500
+ 46600 val loss 5.6095
501
+ 46600 val perplexity 272.9956
502
+ 46600 train 5.271016 (lr=5.5156e-06) (hash(x)=50685713)
503
+ 8300 val loss 6.4452
504
+ 8300 val perplexity 629.6801
505
+ 8300 train 5.972620 (lr=9.4252e-05) (hash(x)=45038933)
506
+ 46700 val loss 5.6092
507
+ 46700 val perplexity 272.9158
508
+ 46700 train 5.732937 (lr=5.4858e-06) (hash(x)=50975526)
509
+ 8400 val loss 6.4808
510
+ 8400 val perplexity 652.5134
511
+ 8400 train 6.312977 (lr=9.4112e-05) (hash(x)=47763246)
512
+ 46800 val loss 5.6107
513
+ 46800 val perplexity 273.3414
514
+ 46800 train 5.750267 (lr=5.4569e-06) (hash(x)=51012129)
515
+ 8500 val loss 6.4612
516
+ 8500 val perplexity 639.8493
517
+ 8500 train 6.493088 (lr=9.3971e-05) (hash(x)=56176595)
518
+ 46900 val loss 5.6143
519
+ 46900 val perplexity 274.3083
520
+ 46900 train 5.333468 (lr=5.4289e-06) (hash(x)=51205353)
521
+ 8600 val loss 6.4510
522
+ 8600 val perplexity 633.3370
523
+ 8600 train 6.529971 (lr=9.3828e-05) (hash(x)=55184249)
524
+ 47000 val loss 5.6131
525
+ 47000 val perplexity 274.0013
526
+ 47000 train 5.759924 (lr=5.4017e-06) (hash(x)=48939661)
527
+ 8700 val loss 6.4552
528
+ 8700 val perplexity 636.0110
529
+ 8700 train 6.379639 (lr=9.3684e-05) (hash(x)=46471646)
530
+ 47100 val loss 5.6096
531
+ 47100 val perplexity 273.0318
532
+ 47100 train 5.531120 (lr=5.3755e-06) (hash(x)=51002557)
533
+ 8800 val loss 6.4945
534
+ 8800 val perplexity 661.4907
535
+ 8800 train 6.198891 (lr=9.3538e-05) (hash(x)=46233162)
536
+ 47200 val loss 5.6103
537
+ 47200 val perplexity 273.2244
538
+ 47200 train 5.444582 (lr=5.3501e-06) (hash(x)=43422209)
539
+ 8900 val loss 6.5133
540
+ 8900 val perplexity 674.0529
541
+ 8900 train 6.432613 (lr=9.3391e-05) (hash(x)=47233684)
542
+ 47300 val loss 5.6093
543
+ 47300 val perplexity 272.9432
544
+ 47300 train 5.365450 (lr=5.3256e-06) (hash(x)=45870552)
545
+ 9000 val loss 6.4790
546
+ 9000 val perplexity 651.3448
547
+ 9000 train 6.380252 (lr=9.3242e-05) (hash(x)=48374529)
548
+ 47400 val loss 5.6096
549
+ 47400 val perplexity 273.0215
550
+ 47400 train 5.453257 (lr=5.3020e-06) (hash(x)=47001810)
551
+ 9100 val loss 6.4612
552
+ 9100 val perplexity 639.8325
553
+ 9100 train 6.426319 (lr=9.3092e-05) (hash(x)=48065371)
554
+ 47500 val loss 5.6062
555
+ 47500 val perplexity 272.1136
556
+ 47500 train 5.676814 (lr=5.2792e-06) (hash(x)=49146619)
557
+ 47600 val loss 5.6047
558
+ 47600 val perplexity 271.6933
559
+ 47600 train 5.688634 (lr=5.2574e-06) (hash(x)=53742853)
560
+ 9200 val loss 6.4337
561
+ 9200 val perplexity 622.4777
562
+ 9200 train 6.365408 (lr=9.2940e-05) (hash(x)=47408078)
563
+ 47700 val loss 5.6023
564
+ 47700 val perplexity 271.0357
565
+ 47700 train 5.858260 (lr=5.2364e-06) (hash(x)=53019458)
566
+ 9300 val loss 6.4346
567
+ 9300 val perplexity 623.0532
568
+ 9300 train 6.420694 (lr=9.2786e-05) (hash(x)=50749781)
569
+ 47800 val loss 5.6015
570
+ 47800 val perplexity 270.8320
571
+ 47800 train 5.688690 (lr=5.2163e-06) (hash(x)=54497914)
572
+ 47900 val loss 5.6040
573
+ 47900 val perplexity 271.5063
574
+ 47900 train 5.530357 (lr=5.1972e-06) (hash(x)=47109012)
575
+ 9400 val loss 6.4372
576
+ 9400 val perplexity 624.6458
577
+ 9400 train 6.634754 (lr=9.2632e-05) (hash(x)=48560169)
578
+ 48000 val loss 5.6005
579
+ 48000 val perplexity 270.5672
580
+ 48000 train 5.590864 (lr=5.1788e-06) (hash(x)=48429816)
581
+ 9500 val loss 6.4296
582
+ 9500 val perplexity 619.9107
583
+ 9500 train 6.521097 (lr=9.2475e-05) (hash(x)=50936392)
584
+ 48100 val loss 5.6008
585
+ 48100 val perplexity 270.6499
586
+ 48100 train 5.544402 (lr=5.1614e-06) (hash(x)=46500465)
587
+ 48200 val loss 5.5982
588
+ 48200 val perplexity 269.9372
589
+ 48200 train 5.762817 (lr=5.1449e-06) (hash(x)=50888195)
590
+ 9600 val loss 6.4141
591
+ 9600 val perplexity 610.3770
592
+ 9600 train 6.430016 (lr=9.2317e-05) (hash(x)=50651714)
593
+ 48300 val loss 5.5988
594
+ 48300 val perplexity 270.1088
595
+ 48300 train 5.666016 (lr=5.1293e-06) (hash(x)=56406586)
596
+ 9700 val loss 6.4110
597
+ 9700 val perplexity 608.4774
598
+ 9700 train 6.487106 (lr=9.2158e-05) (hash(x)=47311384)
599
+ 48400 val loss 5.5989
600
+ 48400 val perplexity 270.1341
601
+ 48400 train 5.751607 (lr=5.1145e-06) (hash(x)=48713969)
602
+ 48500 val loss 5.5980
603
+ 48500 val perplexity 269.8947
604
+ 48500 train 5.026878 (lr=5.1007e-06) (hash(x)=37691696)
605
+ 9800 val loss 6.4147
606
+ 9800 val perplexity 610.7438
607
+ 9800 train 6.526601 (lr=9.1997e-05) (hash(x)=50921139)
608
+ 48600 val loss 5.5988
609
+ 48600 val perplexity 270.1123
610
+ 48600 train 5.892519 (lr=5.0877e-06) (hash(x)=48712321)
611
+ 48700 val loss 5.5987
612
+ 48700 val perplexity 270.0880
613
+ 9900 val loss 6.4657
614
+ 9900 val perplexity 642.6958
615
+ 48700 train 6.032611 (lr=5.0756e-06) (hash(x)=63513096)
616
+ 9900 train 6.607420 (lr=9.1835e-05) (hash(x)=48142455)
617
+ 48800 val loss 5.6003
618
+ 48800 val perplexity 270.4991
619
+ 48800 train 5.218384 (lr=5.0644e-06) (hash(x)=44917721)
620
+ 10000 val loss 6.4405
621
+ 10000 val perplexity 626.7271
622
+ 10000 train 6.596187 (lr=9.1671e-05) (hash(x)=50757167)
623
+ 48900 val loss 5.5981
624
+ 48900 val perplexity 269.9094
625
+ 48900 train 5.816957 (lr=5.0542e-06) (hash(x)=51426559)
626
+ 49000 val loss 5.5992
627
+ 49000 val perplexity 270.2160
628
+ 49000 train 5.508775 (lr=5.0448e-06) (hash(x)=47909690)
629
+ 10100 val loss 6.4674
630
+ 10100 val perplexity 643.8083
631
+ 10100 train 6.442811 (lr=9.1506e-05) (hash(x)=46128585)
632
+ 49100 val loss 5.5971
633
+ 49100 val perplexity 269.6422
634
+ 49100 train 5.500440 (lr=5.0363e-06) (hash(x)=48329226)
635
+ 10200 val loss 6.4787
636
+ 10200 val perplexity 651.1212
637
+ 10200 train 6.703011 (lr=9.1339e-05) (hash(x)=57016296)
638
+ 49200 val loss 5.5991
639
+ 49200 val perplexity 270.1703
640
+ 49200 train 5.600343 (lr=5.0286e-06) (hash(x)=49512472)
641
+ 49300 val loss 5.5989
642
+ 49300 val perplexity 270.1354
643
+ 49300 train 5.452957 (lr=5.0219e-06) (hash(x)=46541146)
644
+ 10300 val loss 6.5369
645
+ 10300 val perplexity 690.1228
646
+ 10300 train 6.600325 (lr=9.1171e-05) (hash(x)=49839464)
647
+ 49400 val loss 5.6009
648
+ 49400 val perplexity 270.6745
649
+ 49400 train 5.543798 (lr=5.0161e-06) (hash(x)=52192792)
650
+ 10400 val loss 6.5206
651
+ 10400 val perplexity 678.9719
652
+ 10400 train 6.516890 (lr=9.1001e-05) (hash(x)=50237852)
653
+ 49500 val loss 5.5995
654
+ 49500 val perplexity 270.2928
655
+ 49500 train 5.386661 (lr=5.0112e-06) (hash(x)=48954331)
656
+ 49600 val loss 5.5985
657
+ 49600 val perplexity 270.0147
658
+ 49600 train 5.711846 (lr=5.0072e-06) (hash(x)=50364098)
659
+ 10500 val loss 6.5185
660
+ 10500 val perplexity 677.5903
661
+ 10500 train 6.626068 (lr=9.0830e-05) (hash(x)=48003282)
662
+ 49700 val loss 5.5968
663
+ 49700 val perplexity 269.5677
664
+ 49700 train 5.554048 (lr=5.0040e-06) (hash(x)=52016774)
665
+ 49800 val loss 5.5979
666
+ 49800 val perplexity 269.8479
667
+ 49800 train 5.830290 (lr=5.0018e-06) (hash(x)=54182957)
668
+ 10600 val loss 6.5319
669
+ 10600 val perplexity 686.7331
670
+ 10600 train 6.466997 (lr=9.0658e-05) (hash(x)=53088427)
671
+ 49900 val loss 5.5940
672
+ 49900 val perplexity 268.8109
673
+ 49900 train 5.332826 (lr=5.0004e-06) (hash(x)=48188126)
674
+ 10700 val loss 6.5500
675
+ 10700 val perplexity 699.2339
676
+ 10700 train 6.378216 (lr=9.0484e-05) (hash(x)=47687508)
677
+ 49999 val loss 5.6180
678
+ 49999 val perplexity 275.3266
679
+ 10800 val loss 6.5394
680
+ 10800 val perplexity 691.8988
681
+ 10800 train 6.397013 (lr=9.0308e-05) (hash(x)=47658698)
682
+ 10900 val loss 6.4993
683
+ 10900 val perplexity 664.6454
684
+ 10900 train 6.503762 (lr=9.0132e-05) (hash(x)=52957012)
685
+ 11000 val loss 6.4866
686
+ 11000 val perplexity 656.2906
687
+ 11000 train 6.625940 (lr=8.9954e-05) (hash(x)=54632498)
688
+ 11100 val loss 6.5099
689
+ 11100 val perplexity 671.7436
690
+ 11100 train 6.986665 (lr=8.9774e-05) (hash(x)=54028593)
691
+ 11200 val loss 6.4837
692
+ 11200 val perplexity 654.3682
693
+ 11200 train 6.616082 (lr=8.9593e-05) (hash(x)=54084412)
694
+ 11300 val loss 6.4871
695
+ 11300 val perplexity 656.6074
696
+ 11300 train 6.309471 (lr=8.9411e-05) (hash(x)=48170961)
697
+ 11400 val loss 6.4986
698
+ 11400 val perplexity 664.2282
699
+ 11400 train 6.540001 (lr=8.9227e-05) (hash(x)=52124243)
attention_kindselective_n_heads4_seed1339/model_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ad93158bdfd32071e25dc456532ed3fd36c61df4cc86f865449960dcdf6fdfcb
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2ec7853f39c020bf0817d8a9424f0786ad4b93fc0839e1f8734dc36c9b628b3c
3
  size 92843394
attention_kindselective_n_heads4_seed1339/model_05000.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6aa78e6516bbc5459cdc3154c9b778836f34c2c1dbac08076e7043de9bd5fe9c
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7a3ea4733ac190d2c44f57c3fc804dc59f7571ad4f028f02fe4775d0b66df1b
3
  size 92843394
attention_kindselective_n_heads4_seed1339/model_07500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:20de18d92ff73af4207c8758d2c668cb193ef56f2e7857b8391859c03bfce23d
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d9cba9958a987856e63371998d573139f4d7e12bf83d1dc6741e08c1656ae1e
3
  size 92843394
attention_kindselective_n_heads4_seed1339/model_10000.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:75c9a83ca6838f6643993a01ade7f926d7dccd9aaabbaa9d1cc052a66d45ad6a
3
+ size 92843394
attention_kindselective_n_heads4_seed1339/model_40000.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:78a8fd9d6a750808245efe7c72bed034116984b1a3d236604a1e986d60e7ed58
3
+ size 92843394
attention_kindselective_n_heads4_seed1339/model_42500.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c3363e83947e45c3c64503ecbdcc0b270aa74e74aa9ec5438a596bd3c8156913
3
+ size 92843394
attention_kindselective_n_heads4_seed1339/model_45000.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:652267ce4a3c3eb24b3fdd1182cbeaad71435a28d6533b8d65f60dfd783e5411
3
+ size 92843394
attention_kindselective_n_heads4_seed1339/model_47500.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d198491e4dd117689476940ce9f230be59d99c41399afc7f16477ab0abe13f73
3
+ size 92843394
attention_kindselective_n_heads4_seed1339/model_49999.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb07662f40aa23921523b0c43b25a496ce763a42d93ca236f9ab6e69e0f9407d
3
+ size 92843394
attention_kindselective_n_heads4_seed1339/optimizer_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:edeb87f6d21d339a0559282c4a2db7e96080f40d421400d4df3816f05159bf21
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6d0daaea20dfea170f7f2a3011fad8c68fd0d34d7837cfc7e1b82e10c8afac2e
3
  size 179406214
attention_kindselective_n_heads4_seed1339/optimizer_05000.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3ef24c1b7a4309998b53d8f8c3106922fe90cd2c76ffb6673a28994960c96400
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6b399697378d1367510c9f4104e498e29bd1617ea27a7878564f423ee87160d
3
  size 179406214
attention_kindselective_n_heads4_seed1339/optimizer_07500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e1171237d3a16e4a7ecf8a131c6ba1d82e5ffafb1c91d7d035a9b67b6ae1398e
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:827fa057c0c4bae82594e7d86c915d39ef2208d8da842bda634d1d07f8a6f14c
3
  size 179406214
attention_kindselective_n_heads4_seed1339/optimizer_10000.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bdb56e2388c4d739ff044d95f0f99d11044dc7064e262ea9e0da4677e4b7369f
3
+ size 179406214
attention_kindselective_n_heads4_seed1339/optimizer_40000.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ecc8125e65f3bc259ac039c34b488e49892d0778ea59a14ffd86b25c567c0ac
3
+ size 179406214
attention_kindselective_n_heads4_seed1339/optimizer_42500.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a22bc9fe743fba8fc4b4560471a341de420ff16d5fb5457b184c26c331fefcdb
3
+ size 179406214
attention_kindselective_n_heads4_seed1339/optimizer_45000.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f9b0a4a35f4948303d8fd2c63401b8e68948d5d7ffed4e82c8947d42f9eace1f
3
+ size 179406214
attention_kindselective_n_heads4_seed1339/optimizer_47500.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ca09c067aab98fdf7e31137500fc5459871aa21e3a7a17c7d424ef21a92d9905
3
+ size 179406214
attention_kindselective_n_heads4_seed1339/optimizer_49999.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67ac5a0dfae5b7e50ccf39b379788bfee75cb4871c0743d86aacdb6f3b54b940
3
+ size 179406214