andrew-healey commited on
Commit
6f20b97
·
verified ·
1 Parent(s): 2edc727

Upload folder using huggingface_hub

Browse files
attention_kindselective_n_heads4_seed1339/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_9/attention_kindselective_n_heads4_seed1339", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_9", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1339, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 5.5e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "5.5e-5_61440_4_1339", "n_embd": 256}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_9/attention_kindselective_n_heads4_seed1339", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_9", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1339, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 3e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "3e-5_61440_4_1339", "n_embd": 256}
attention_kindselective_n_heads4_seed1339/log2.txt CHANGED
@@ -1,267 +1,502 @@
1
  max_steps: 8750
 
2
  0 val loss 11.2808
3
  0 val perplexity 79287.6797
4
- 0 train 11.289297 (lr=1.1000e-07) (hash(x)=150724848)
5
- 100 val loss 9.9926
6
- 100 val perplexity 21863.0391
7
- 100 train 10.007814 (lr=1.1110e-05) (hash(x)=149910534)
8
- 200 val loss 9.1223
9
- 200 val perplexity 9157.1182
10
- 200 train 9.148949 (lr=2.2110e-05) (hash(x)=148123706)
11
- 300 val loss 7.9004
12
- 300 val perplexity 2698.2358
13
- 300 train 7.756021 (lr=3.3110e-05) (hash(x)=146678221)
14
- 400 val loss 7.5196
15
- 400 val perplexity 1843.8901
16
- 400 train 7.404154 (lr=4.4110e-05) (hash(x)=151700982)
17
- 500 val loss 7.3670
18
- 500 val perplexity 1582.9152
19
- 500 train 7.450415 (lr=5.5000e-05) (hash(x)=156182087)
20
- 600 val loss 7.2367
21
- 600 val perplexity 1389.4574
22
- 600 train 7.212444 (lr=5.4982e-05) (hash(x)=149318660)
23
- 700 val loss 7.1474
24
- 700 val perplexity 1270.7859
25
- 700 train 7.189300 (lr=5.4928e-05) (hash(x)=150482428)
26
- 800 val loss 7.0296
27
- 800 val perplexity 1129.5702
28
- 800 train 6.903787 (lr=5.4839e-05) (hash(x)=143268605)
29
- 900 val loss 6.9340
30
- 900 val perplexity 1026.5736
31
- 900 train 6.897569 (lr=5.4713e-05) (hash(x)=152322423)
32
- 1000 val loss 6.8391
33
- 1000 val perplexity 933.6412
34
- 1000 train 6.705533 (lr=5.4553e-05) (hash(x)=147904298)
35
- 1100 val loss 6.7474
36
- 1100 val perplexity 851.8519
37
- 1100 train 6.950415 (lr=5.4357e-05) (hash(x)=154343147)
38
- 1200 val loss 6.6458
39
- 1200 val perplexity 769.5319
40
- 1200 train 6.633022 (lr=5.4126e-05) (hash(x)=141843115)
41
- 1300 val loss 6.5715
42
- 1300 val perplexity 714.4750
43
- 1300 train 6.459829 (lr=5.3860e-05) (hash(x)=145279030)
44
- 1400 val loss 6.5204
45
- 1400 val perplexity 678.8162
46
- 1400 train 6.464448 (lr=5.3561e-05) (hash(x)=152507639)
47
- 1500 val loss 6.4720
48
- 1500 val perplexity 646.7606
49
- 1500 train 6.506417 (lr=5.3227e-05) (hash(x)=148473774)
50
- 1600 val loss 6.4094
51
- 1600 val perplexity 607.5082
52
- 1600 train 6.450625 (lr=5.2860e-05) (hash(x)=151117002)
53
- 1700 val loss 6.3770
54
- 1700 val perplexity 588.1442
55
- 1700 train 6.323629 (lr=5.2461e-05) (hash(x)=138011335)
56
- 1800 val loss 6.3356
57
- 1800 val perplexity 564.3019
58
- 1800 train 6.464371 (lr=5.2029e-05) (hash(x)=171180926)
59
- 1900 val loss 6.2924
60
- 1900 val perplexity 540.4733
61
- 1900 train 6.228439 (lr=5.1565e-05) (hash(x)=141769419)
62
- 2000 val loss 6.2590
63
- 2000 val perplexity 522.6740
64
- 2000 train 6.211834 (lr=5.1071e-05) (hash(x)=151963443)
65
- 2100 val loss 6.2432
66
- 2100 val perplexity 514.5130
67
- 2100 train 6.295941 (lr=5.0547e-05) (hash(x)=162947470)
68
- 2200 val loss 6.1986
69
- 2200 val perplexity 492.0483
70
- 2200 train 6.424762 (lr=4.9993e-05) (hash(x)=154954810)
71
- 2300 val loss 6.1768
72
- 2300 val perplexity 481.4570
73
- 2300 train 6.154170 (lr=4.9410e-05) (hash(x)=151878111)
74
- 2400 val loss 6.1559
75
- 2400 val perplexity 471.4818
76
- 2400 train 6.050483 (lr=4.8800e-05) (hash(x)=158661057)
77
- 2500 val loss 6.1194
78
- 2500 val perplexity 454.5914
79
- 2500 train 6.141537 (lr=4.8162e-05) (hash(x)=150925584)
80
- 2600 val loss 6.0965
81
- 2600 val perplexity 444.3190
82
- 2600 train 6.004150 (lr=4.7499e-05) (hash(x)=144515755)
83
- 2700 val loss 6.0754
84
- 2700 val perplexity 435.0298
85
- 2700 train 6.059896 (lr=4.6811e-05) (hash(x)=153109144)
86
- 2800 val loss 6.0466
87
- 2800 val perplexity 422.6543
88
- 2800 train 5.952908 (lr=4.6099e-05) (hash(x)=151152897)
89
- 2900 val loss 6.0262
90
- 2900 val perplexity 414.1284
91
- 2900 train 5.969955 (lr=4.5364e-05) (hash(x)=145800210)
92
- 3000 val loss 6.0130
93
- 3000 val perplexity 408.7055
94
- 3000 train 5.818487 (lr=4.4606e-05) (hash(x)=141997485)
95
- 3100 val loss 5.9902
96
- 3100 val perplexity 399.5090
97
- 3100 train 5.910625 (lr=4.3828e-05) (hash(x)=154049740)
98
- 3200 val loss 5.9697
99
- 3200 val perplexity 391.3949
100
- 3200 train 5.898226 (lr=4.3031e-05) (hash(x)=150471842)
101
- 3300 val loss 5.9616
102
- 3300 val perplexity 388.2483
103
- 3300 train 5.906397 (lr=4.2215e-05) (hash(x)=149048126)
104
- 3400 val loss 5.9343
105
- 3400 val perplexity 377.7688
106
- 3400 train 6.056663 (lr=4.1381e-05) (hash(x)=161261339)
107
- 3500 val loss 5.9248
108
- 3500 val perplexity 374.2032
109
- 3500 train 5.890368 (lr=4.0532e-05) (hash(x)=157495564)
110
- 3600 val loss 5.9106
111
- 3600 val perplexity 368.9154
112
- 3600 train 5.813707 (lr=3.9667e-05) (hash(x)=144352932)
113
- 3700 val loss 5.8902
114
- 3700 val perplexity 361.4862
115
- 3700 train 5.860925 (lr=3.8789e-05) (hash(x)=149389012)
116
- 3800 val loss 5.8681
117
- 3800 val perplexity 353.5889
118
- 3800 train 5.809790 (lr=3.7898e-05) (hash(x)=146607620)
119
- 3900 val loss 5.8539
120
- 3900 val perplexity 348.6085
121
- 3900 train 5.772669 (lr=3.6996e-05) (hash(x)=143754617)
122
- 4000 val loss 5.8410
123
- 4000 val perplexity 344.1350
124
- 4000 train 5.813826 (lr=3.6085e-05) (hash(x)=156930722)
125
- 4100 val loss 5.8236
126
- 4100 val perplexity 338.1763
127
- 4100 train 5.653216 (lr=3.5165e-05) (hash(x)=147500519)
128
- 4200 val loss 5.8148
129
- 4200 val perplexity 335.2222
130
- 4200 train 5.652660 (lr=3.4238e-05) (hash(x)=143232237)
131
- 4300 val loss 5.8024
132
- 4300 val perplexity 331.0877
133
- 4300 train 5.709325 (lr=3.3305e-05) (hash(x)=146811670)
134
- 4400 val loss 5.7871
135
- 4400 val perplexity 326.0585
136
- 4400 train 5.821728 (lr=3.2368e-05) (hash(x)=158418746)
137
- 4500 val loss 5.7772
138
- 4500 val perplexity 322.8669
139
- 4500 train 5.743765 (lr=3.1428e-05) (hash(x)=156695778)
140
- 4600 val loss 5.7656
141
- 4600 val perplexity 319.1193
142
- 4600 train 5.785645 (lr=3.0486e-05) (hash(x)=147791497)
143
- 4700 val loss 5.7494
144
- 4700 val perplexity 314.0002
145
- 4700 train 5.637969 (lr=2.9543e-05) (hash(x)=155533088)
146
- 4800 val loss 5.7457
147
- 4800 val perplexity 312.8485
148
- 4800 train 5.597758 (lr=2.8602e-05) (hash(x)=138350044)
149
- 4900 val loss 5.7309
150
- 4900 val perplexity 308.2330
151
- 4900 train 5.726795 (lr=2.7663e-05) (hash(x)=143735284)
152
- 5000 val loss 5.7179
153
- 5000 val perplexity 304.2539
154
- 5000 train 5.672445 (lr=2.6728e-05) (hash(x)=154976463)
155
- 5100 val loss 5.7154
156
- 5100 val perplexity 303.5133
157
- 5100 train 5.600068 (lr=2.5798e-05) (hash(x)=149894982)
158
- 5200 val loss 5.7096
159
- 5200 val perplexity 301.7632
160
- 5200 train 5.481579 (lr=2.4874e-05) (hash(x)=159326689)
161
- 5300 val loss 5.6972
162
- 5300 val perplexity 298.0291
163
- 5300 train 5.708793 (lr=2.3958e-05) (hash(x)=159484800)
164
- 5400 val loss 5.6821
165
- 5400 val perplexity 293.5782
166
- 5400 train 5.585687 (lr=2.3051e-05) (hash(x)=140385615)
167
- 5500 val loss 5.6753
168
- 5500 val perplexity 291.5740
169
- 5500 train 5.498550 (lr=2.2155e-05) (hash(x)=148498335)
170
- 5600 val loss 5.6751
171
- 5600 val perplexity 291.5265
172
- 5600 train 5.412326 (lr=2.1271e-05) (hash(x)=151907614)
173
- 5700 val loss 5.6657
174
- 5700 val perplexity 288.7810
175
- 5700 train 5.637896 (lr=2.0399e-05) (hash(x)=155192267)
176
- 5800 val loss 5.6533
177
- 5800 val perplexity 285.2272
178
- 5800 train 5.701861 (lr=1.9542e-05) (hash(x)=153132158)
179
- 5900 val loss 5.6519
180
- 5900 val perplexity 284.8352
181
- 5900 train 5.619544 (lr=1.8700e-05) (hash(x)=161446764)
182
- 6000 val loss 5.6450
183
- 6000 val perplexity 282.8669
184
- 6000 train 5.439802 (lr=1.7875e-05) (hash(x)=151512446)
185
- 6100 val loss 5.6340
186
- 6100 val perplexity 279.7804
187
- 6100 train 5.747126 (lr=1.7068e-05) (hash(x)=188094053)
188
- 6200 val loss 5.6256
189
- 6200 val perplexity 277.4352
190
- 6200 train 5.508698 (lr=1.6280e-05) (hash(x)=149389789)
191
- 6300 val loss 5.6217
192
- 6300 val perplexity 276.3475
193
- 6300 train 5.438232 (lr=1.5512e-05) (hash(x)=138212820)
194
- 6400 val loss 5.6208
195
- 6400 val perplexity 276.1091
196
- 6400 train 5.405768 (lr=1.4766e-05) (hash(x)=146535423)
197
- 6500 val loss 5.6109
198
- 6500 val perplexity 273.3779
199
- 6500 train 5.479463 (lr=1.4042e-05) (hash(x)=145950843)
200
- 6600 val loss 5.6075
201
- 6600 val perplexity 272.4713
202
- 6600 train 5.507493 (lr=1.3342e-05) (hash(x)=141162902)
203
- 6700 val loss 5.6048
204
- 6700 val perplexity 271.7363
205
- 6700 train 5.515503 (lr=1.2666e-05) (hash(x)=153018737)
206
- 6800 val loss 5.5980
207
- 6800 val perplexity 269.8820
208
- 6800 train 5.513815 (lr=1.2016e-05) (hash(x)=155640155)
209
- 6900 val loss 5.5941
210
- 6900 val perplexity 268.8358
211
- 6900 train 5.567899 (lr=1.1392e-05) (hash(x)=153722115)
212
- 7000 val loss 5.5878
213
- 7000 val perplexity 267.1516
214
- 7000 train 5.493548 (lr=1.0795e-05) (hash(x)=146953450)
215
- 7100 val loss 5.5874
216
- 7100 val perplexity 267.0288
217
- 7100 train 5.427116 (lr=1.0227e-05) (hash(x)=137663885)
218
- 7200 val loss 5.5790
219
- 7200 val perplexity 264.7992
220
- 7200 train 5.650048 (lr=9.6875e-06) (hash(x)=146172950)
221
- 7300 val loss 5.5747
222
- 7300 val perplexity 263.6632
223
- 7300 train 5.460082 (lr=9.1780e-06) (hash(x)=150018163)
224
- 7400 val loss 5.5715
225
- 7400 val perplexity 262.8262
226
- 7400 train 5.576471 (lr=8.6990e-06) (hash(x)=145351166)
227
- 7500 val loss 5.5722
228
- 7500 val perplexity 263.0211
229
- 7500 train 5.347385 (lr=8.2513e-06) (hash(x)=145292116)
230
- 7600 val loss 5.5646
231
- 7600 val perplexity 261.0114
232
- 7600 train 5.485201 (lr=7.8355e-06) (hash(x)=150235132)
233
- 7700 val loss 5.5624
234
- 7700 val perplexity 260.4434
235
- 7700 train 5.463830 (lr=7.4522e-06) (hash(x)=154543455)
236
- 7800 val loss 5.5601
237
- 7800 val perplexity 259.8475
238
- 7800 train 5.462539 (lr=7.1019e-06) (hash(x)=142456852)
239
- 7900 val loss 5.5605
240
- 7900 val perplexity 259.9470
241
- 7900 train 5.337718 (lr=6.7852e-06) (hash(x)=147363479)
242
- 8000 val loss 5.5541
243
- 8000 val perplexity 258.2981
244
- 8000 train 5.583329 (lr=6.5025e-06) (hash(x)=156122973)
245
- 8100 val loss 5.5513
246
- 8100 val perplexity 257.5828
247
- 8100 train 5.597711 (lr=6.2543e-06) (hash(x)=156153179)
248
- 8200 val loss 5.5499
249
- 8200 val perplexity 257.2050
250
- 8200 train 5.596151 (lr=6.0408e-06) (hash(x)=146430698)
251
- 8300 val loss 5.5482
252
- 8300 val perplexity 256.7670
253
- 8300 train 5.411896 (lr=5.8625e-06) (hash(x)=143507257)
254
- 8400 val loss 5.5431
255
- 8400 val perplexity 255.4713
256
- 8400 train 5.553337 (lr=5.7195e-06) (hash(x)=166272643)
257
- 8500 val loss 5.5421
258
- 8500 val perplexity 255.2123
259
- 8500 train 5.447274 (lr=5.6121e-06) (hash(x)=143887848)
260
- 8600 val loss 5.5419
261
- 8600 val perplexity 255.1584
262
- 8600 train 5.574839 (lr=5.5404e-06) (hash(x)=156900341)
263
- 8700 val loss 5.5367
264
- 8700 val perplexity 253.8504
265
- 8700 train 5.774979 (lr=5.5045e-06) (hash(x)=146417632)
266
- 8749 val loss 5.5365
267
- 8749 val perplexity 253.7859
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  max_steps: 8750
2
+ max_steps: 8750
3
  0 val loss 11.2808
4
  0 val perplexity 79287.6797
5
+ 0 val loss 11.2808
6
+ 0 val perplexity 79287.5234
7
+ 0 train 11.289339 (lr=6.0000e-08) (hash(x)=150724848)
8
+ 0 train 11.289339 (lr=7.0000e-08) (hash(x)=150724848)
9
+ 100 val loss 10.1249
10
+ 100 val perplexity 24955.8281
11
+ 100 train 10.136632 (lr=7.0700e-06) (hash(x)=149910534)
12
+ 100 val loss 10.1868
13
+ 100 val perplexity 26549.2832
14
+ 100 train 10.199195 (lr=6.0600e-06) (hash(x)=149910534)
15
+ 200 val loss 9.5679
16
+ 200 val perplexity 14297.8906
17
+ 200 train 9.597668 (lr=1.4070e-05) (hash(x)=148123706)
18
+ 200 val loss 9.6580
19
+ 200 val perplexity 15646.8330
20
+ 200 train 9.688334 (lr=1.2060e-05) (hash(x)=148123706)
21
+ 300 val loss 8.4851
22
+ 300 val perplexity 4841.9927
23
+ 300 train 8.403298 (lr=2.1070e-05) (hash(x)=146678221)
24
+ 300 val loss 8.7199
25
+ 300 val perplexity 6123.2988
26
+ 300 train 8.652300 (lr=1.8060e-05) (hash(x)=146678221)
27
+ 400 val loss 7.7157
28
+ 400 val perplexity 2243.2341
29
+ 400 train 7.622499 (lr=2.8070e-05) (hash(x)=151700982)
30
+ 400 val loss 7.8706
31
+ 400 val perplexity 2619.0610
32
+ 400 train 7.789970 (lr=2.4060e-05) (hash(x)=151700982)
33
+ 500 val loss 7.4807
34
+ 500 val perplexity 1773.4902
35
+ 500 train 7.566009 (lr=3.5000e-05) (hash(x)=156182087)
36
+ 500 val loss 7.5420
37
+ 500 val perplexity 1885.5305
38
+ 500 train 7.629263 (lr=3.0000e-05) (hash(x)=156182087)
39
+ 600 val loss 7.3659
40
+ 600 val perplexity 1581.0912
41
+ 600 train 7.345613 (lr=3.4989e-05) (hash(x)=149318660)
42
+ 600 val loss 7.4137
43
+ 600 val perplexity 1658.5233
44
+ 600 train 7.403398 (lr=2.9990e-05) (hash(x)=149318660)
45
+ 700 val loss 7.2842
46
+ 700 val perplexity 1457.1127
47
+ 700 train 7.324867 (lr=3.4954e-05) (hash(x)=150482428)
48
+ 800 val loss 7.1606
49
+ 800 val perplexity 1287.6595
50
+ 800 train 7.034153 (lr=3.4897e-05) (hash(x)=143268605)
51
+ 700 val loss 7.3358
52
+ 700 val perplexity 1534.3062
53
+ 700 train 7.378386 (lr=2.9961e-05) (hash(x)=150482428)
54
+ 900 val loss 7.0731
55
+ 900 val perplexity 1179.7480
56
+ 900 train 7.036097 (lr=3.4818e-05) (hash(x)=152322423)
57
+ 800 val loss 7.2311
58
+ 800 val perplexity 1381.7266
59
+ 800 train 7.106897 (lr=2.9912e-05) (hash(x)=143268605)
60
+ 1000 val loss 7.0052
61
+ 1000 val perplexity 1102.3225
62
+ 1000 train 6.872244 (lr=3.4715e-05) (hash(x)=147904298)
63
+ 900 val loss 7.1499
64
+ 900 val perplexity 1273.9365
65
+ 900 train 7.112676 (lr=2.9844e-05) (hash(x)=152322423)
66
+ 1100 val loss 6.9231
67
+ 1100 val perplexity 1015.4374
68
+ 1100 train 7.109494 (lr=3.4591e-05) (hash(x)=154343147)
69
+ 1000 val loss 7.0856
70
+ 1000 val perplexity 1194.6191
71
+ 1000 train 6.954570 (lr=2.9756e-05) (hash(x)=147904298)
72
+ 1200 val loss 6.8335
73
+ 1200 val perplexity 928.4275
74
+ 1200 train 6.822800 (lr=3.4444e-05) (hash(x)=141843115)
75
+ 1100 val loss 7.0176
76
+ 1100 val perplexity 1116.0782
77
+ 1100 train 7.204097 (lr=2.9649e-05) (hash(x)=154343147)
78
+ 1300 val loss 6.7614
79
+ 1300 val perplexity 863.8369
80
+ 1300 train 6.647630 (lr=3.4275e-05) (hash(x)=145279030)
81
+ 1200 val loss 6.9380
82
+ 1200 val perplexity 1030.7321
83
+ 1200 train 6.928824 (lr=2.9523e-05) (hash(x)=141843115)
84
+ 1400 val loss 6.7027
85
+ 1400 val perplexity 814.5911
86
+ 1400 train 6.650844 (lr=3.4084e-05) (hash(x)=152507639)
87
+ 1300 val loss 6.8701
88
+ 1300 val perplexity 963.0128
89
+ 1300 train 6.756791 (lr=2.9378e-05) (hash(x)=145279030)
90
+ 1500 val loss 6.6486
91
+ 1500 val perplexity 771.6702
92
+ 1500 train 6.681236 (lr=3.3872e-05) (hash(x)=148473774)
93
+ 1600 val loss 6.5658
94
+ 1600 val perplexity 710.3974
95
+ 1600 train 6.604722 (lr=3.3638e-05) (hash(x)=151117002)
96
+ 1400 val loss 6.8108
97
+ 1400 val perplexity 907.5634
98
+ 1400 train 6.767691 (lr=2.9215e-05) (hash(x)=152507639)
99
+ 1700 val loss 6.5219
100
+ 1700 val perplexity 679.8840
101
+ 1700 train 6.456680 (lr=3.3384e-05) (hash(x)=138011335)
102
+ 1500 val loss 6.7521
103
+ 1500 val perplexity 855.8281
104
+ 1500 train 6.789307 (lr=2.9033e-05) (hash(x)=148473774)
105
+ 1800 val loss 6.4774
106
+ 1800 val perplexity 650.2515
107
+ 1800 train 6.617409 (lr=3.3109e-05) (hash(x)=171180926)
108
+ 1600 val loss 6.6831
109
+ 1600 val perplexity 798.8001
110
+ 1600 train 6.716653 (lr=2.8833e-05) (hash(x)=151117002)
111
+ 1900 val loss 6.4318
112
+ 1900 val perplexity 621.3072
113
+ 1900 train 6.355664 (lr=3.2814e-05) (hash(x)=141769419)
114
+ 1700 val loss 6.6285
115
+ 1700 val perplexity 756.3746
116
+ 1700 train 6.554468 (lr=2.8615e-05) (hash(x)=138011335)
117
+ 2000 val loss 6.3922
118
+ 2000 val perplexity 597.1834
119
+ 2000 train 6.329804 (lr=3.2500e-05) (hash(x)=151963443)
120
+ 1800 val loss 6.5782
121
+ 1800 val perplexity 719.2757
122
+ 1800 train 6.725643 (lr=2.8379e-05) (hash(x)=171180926)
123
+ 2100 val loss 6.3737
124
+ 2100 val perplexity 586.1983
125
+ 2100 train 6.427935 (lr=3.2166e-05) (hash(x)=162947470)
126
+ 1900 val loss 6.5278
127
+ 1900 val perplexity 683.8735
128
+ 1900 train 6.439614 (lr=2.8127e-05) (hash(x)=141769419)
129
+ 2200 val loss 6.3265
130
+ 2200 val perplexity 559.2034
131
+ 2200 train 6.572281 (lr=3.1813e-05) (hash(x)=154954810)
132
+ 2000 val loss 6.4865
133
+ 2000 val perplexity 656.2449
134
+ 2000 train 6.420126 (lr=2.7857e-05) (hash(x)=151963443)
135
+ 2300 val loss 6.3022
136
+ 2300 val perplexity 545.7751
137
+ 2300 train 6.293201 (lr=3.1443e-05) (hash(x)=151878111)
138
+ 2400 val loss 6.2787
139
+ 2400 val perplexity 533.1205
140
+ 2400 train 6.169630 (lr=3.1054e-05) (hash(x)=158661057)
141
+ 2100 val loss 6.4588
142
+ 2100 val perplexity 638.2729
143
+ 2100 train 6.514755 (lr=2.7571e-05) (hash(x)=162947470)
144
+ 2500 val loss 6.2441
145
+ 2500 val perplexity 514.9452
146
+ 2500 train 6.266413 (lr=3.0649e-05) (hash(x)=150925584)
147
+ 2200 val loss 6.4110
148
+ 2200 val perplexity 608.5148
149
+ 2200 train 6.664954 (lr=2.7269e-05) (hash(x)=154954810)
150
+ 2600 val loss 6.2241
151
+ 2600 val perplexity 504.7674
152
+ 2600 train 6.132840 (lr=3.0227e-05) (hash(x)=144515755)
153
+ 2300 val loss 6.3852
154
+ 2300 val perplexity 593.0313
155
+ 2300 train 6.376594 (lr=2.6951e-05) (hash(x)=151878111)
156
+ 2700 val loss 6.2047
157
+ 2700 val perplexity 495.0883
158
+ 2700 train 6.193225 (lr=2.9789e-05) (hash(x)=153109144)
159
+ 2400 val loss 6.3573
160
+ 2400 val perplexity 576.6909
161
+ 2400 train 6.246971 (lr=2.6618e-05) (hash(x)=158661057)
162
+ 2800 val loss 6.1741
163
+ 2800 val perplexity 480.1690
164
+ 2800 train 6.078516 (lr=2.9336e-05) (hash(x)=151152897)
165
+ 2500 val loss 6.3226
166
+ 2500 val perplexity 557.0036
167
+ 2500 train 6.344519 (lr=2.6270e-05) (hash(x)=150925584)
168
+ 2900 val loss 6.1540
169
+ 2900 val perplexity 470.5836
170
+ 2900 train 6.096991 (lr=2.8868e-05) (hash(x)=145800210)
171
+ 2600 val loss 6.3012
172
+ 2600 val perplexity 545.2101
173
+ 2600 train 6.207327 (lr=2.5909e-05) (hash(x)=144515755)
174
+ 3000 val loss 6.1405
175
+ 3000 val perplexity 464.3056
176
+ 3000 train 5.938540 (lr=2.8386e-05) (hash(x)=141997485)
177
+ 2700 val loss 6.2843
178
+ 2700 val perplexity 536.0944
179
+ 2700 train 6.276767 (lr=2.5533e-05) (hash(x)=153109144)
180
+ 3100 val loss 6.1141
181
+ 3100 val perplexity 452.2033
182
+ 3100 train 6.023589 (lr=2.7891e-05) (hash(x)=154049740)
183
+ 3200 val loss 6.0990
184
+ 3200 val perplexity 445.3922
185
+ 3200 train 6.033494 (lr=2.7383e-05) (hash(x)=150471842)
186
+ 2800 val loss 6.2540
187
+ 2800 val perplexity 520.0851
188
+ 2800 train 6.155468 (lr=2.5145e-05) (hash(x)=151152897)
189
+ 3300 val loss 6.0854
190
+ 3300 val perplexity 439.4073
191
+ 3300 train 6.027749 (lr=2.6864e-05) (hash(x)=149048126)
192
+ 2900 val loss 6.2297
193
+ 2900 val perplexity 507.5872
194
+ 2900 train 6.172598 (lr=2.4744e-05) (hash(x)=145800210)
195
+ 3400 val loss 6.0636
196
+ 3400 val perplexity 429.9261
197
+ 3400 train 6.185919 (lr=2.6333e-05) (hash(x)=161261339)
198
+ 3000 val loss 6.2173
199
+ 3000 val perplexity 501.3599
200
+ 3000 train 6.008087 (lr=2.4331e-05) (hash(x)=141997485)
201
+ 3500 val loss 6.0527
202
+ 3500 val perplexity 425.2686
203
+ 3500 train 6.020202 (lr=2.5793e-05) (hash(x)=157495564)
204
+ 3100 val loss 6.1885
205
+ 3100 val perplexity 487.1272
206
+ 3100 train 6.090694 (lr=2.3906e-05) (hash(x)=154049740)
207
+ 3600 val loss 6.0389
208
+ 3600 val perplexity 419.4500
209
+ 3600 train 5.936598 (lr=2.5243e-05) (hash(x)=144352932)
210
+ 3200 val loss 6.1727
211
+ 3200 val perplexity 479.4807
212
+ 3200 train 6.118541 (lr=2.3471e-05) (hash(x)=150471842)
213
+ 3700 val loss 6.0198
214
+ 3700 val perplexity 411.4905
215
+ 3700 train 6.000595 (lr=2.4684e-05) (hash(x)=149389012)
216
+ 3300 val loss 6.1624
217
+ 3300 val perplexity 474.5893
218
+ 3300 train 6.099824 (lr=2.3026e-05) (hash(x)=149048126)
219
+ 3800 val loss 6.0058
220
+ 3800 val perplexity 405.7742
221
+ 3800 train 5.947519 (lr=2.4117e-05) (hash(x)=146607620)
222
+ 3400 val loss 6.1404
223
+ 3400 val perplexity 464.2498
224
+ 3400 train 6.267547 (lr=2.2572e-05) (hash(x)=161261339)
225
+ 3900 val loss 5.9940
226
+ 3900 val perplexity 401.0047
227
+ 3900 train 5.909438 (lr=2.3543e-05) (hash(x)=143754617)
228
+ 3500 val loss 6.1301
229
+ 3500 val perplexity 459.4873
230
+ 3500 train 6.102514 (lr=2.2108e-05) (hash(x)=157495564)
231
+ 4000 val loss 5.9769
232
+ 4000 val perplexity 394.2093
233
+ 4000 train 5.956694 (lr=2.2963e-05) (hash(x)=156930722)
234
+ 4100 val loss 5.9657
235
+ 4100 val perplexity 389.8218
236
+ 4100 train 5.784899 (lr=2.2378e-05) (hash(x)=147500519)
237
+ 3600 val loss 6.1144
238
+ 3600 val perplexity 452.3314
239
+ 3600 train 6.008862 (lr=2.1637e-05) (hash(x)=144352932)
240
+ 4200 val loss 5.9562
241
+ 4200 val perplexity 386.1222
242
+ 4200 train 5.796459 (lr=2.1788e-05) (hash(x)=143232237)
243
+ 3700 val loss 6.0986
244
+ 3700 val perplexity 445.2134
245
+ 3700 train 6.078876 (lr=2.1158e-05) (hash(x)=149389012)
246
+ 4300 val loss 5.9433
247
+ 4300 val perplexity 381.2018
248
+ 4300 train 5.855104 (lr=2.1194e-05) (hash(x)=146811670)
249
+ 3800 val loss 6.0828
250
+ 3800 val perplexity 438.2579
251
+ 3800 train 6.024745 (lr=2.0672e-05) (hash(x)=146607620)
252
+ 4400 val loss 5.9278
253
+ 4400 val perplexity 375.3206
254
+ 4400 train 5.963883 (lr=2.0598e-05) (hash(x)=158418746)
255
+ 3900 val loss 6.0744
256
+ 3900 val perplexity 434.6102
257
+ 3900 train 5.987369 (lr=2.0180e-05) (hash(x)=143754617)
258
+ 4500 val loss 5.9224
259
+ 4500 val perplexity 373.2968
260
+ 4500 train 5.888974 (lr=1.9999e-05) (hash(x)=156695778)
261
+ 4000 val loss 6.0584
262
+ 4000 val perplexity 427.6898
263
+ 4000 train 6.038101 (lr=1.9683e-05) (hash(x)=156930722)
264
+ 4600 val loss 5.9099
265
+ 4600 val perplexity 368.6750
266
+ 4600 train 5.934970 (lr=1.9400e-05) (hash(x)=147791497)
267
+ 4100 val loss 6.0466
268
+ 4100 val perplexity 422.6849
269
+ 4100 train 5.858241 (lr=1.9181e-05) (hash(x)=147500519)
270
+ 4700 val loss 5.8958
271
+ 4700 val perplexity 363.5157
272
+ 4700 train 5.775061 (lr=1.8800e-05) (hash(x)=155533088)
273
+ 4200 val loss 6.0418
274
+ 4200 val perplexity 420.6553
275
+ 4200 train 5.880073 (lr=1.8675e-05) (hash(x)=143232237)
276
+ 4800 val loss 5.8900
277
+ 4800 val perplexity 361.4218
278
+ 4800 train 5.741070 (lr=1.8201e-05) (hash(x)=138350044)
279
+ 4300 val loss 6.0258
280
+ 4300 val perplexity 413.9633
281
+ 4300 train 5.939585 (lr=1.8166e-05) (hash(x)=146811670)
282
+ 4900 val loss 5.8761
283
+ 4900 val perplexity 356.4004
284
+ 4900 train 5.877145 (lr=1.7604e-05) (hash(x)=143735284)
285
+ 5000 val loss 5.8655
286
+ 5000 val perplexity 352.6599
287
+ 5000 train 5.829748 (lr=1.7009e-05) (hash(x)=154976463)
288
+ 4400 val loss 6.0129
289
+ 4400 val perplexity 408.6630
290
+ 4400 train 6.047963 (lr=1.7655e-05) (hash(x)=158418746)
291
+ 5100 val loss 5.8599
292
+ 5100 val perplexity 350.6802
293
+ 5100 train 5.741970 (lr=1.6417e-05) (hash(x)=149894982)
294
+ 4500 val loss 6.0092
295
+ 4500 val perplexity 407.1400
296
+ 4500 train 5.970369 (lr=1.7142e-05) (hash(x)=156695778)
297
+ 5200 val loss 5.8555
298
+ 5200 val perplexity 349.1558
299
+ 5200 train 5.647627 (lr=1.5829e-05) (hash(x)=159326689)
300
+ 4600 val loss 5.9965
301
+ 4600 val perplexity 402.0312
302
+ 4600 train 6.023885 (lr=1.6629e-05) (hash(x)=147791497)
303
+ 5300 val loss 5.8430
304
+ 5300 val perplexity 344.8275
305
+ 5300 train 5.846123 (lr=1.5246e-05) (hash(x)=159484800)
306
+ 4700 val loss 5.9832
307
+ 4700 val perplexity 396.7080
308
+ 4700 train 5.854864 (lr=1.6114e-05) (hash(x)=155533088)
309
+ 5400 val loss 5.8314
310
+ 5400 val perplexity 340.8238
311
+ 5400 train 5.731322 (lr=1.4669e-05) (hash(x)=140385615)
312
+ 4800 val loss 5.9774
313
+ 4800 val perplexity 394.3975
314
+ 4800 train 5.825758 (lr=1.5601e-05) (hash(x)=138350044)
315
+ 5500 val loss 5.8240
316
+ 5500 val perplexity 338.3344
317
+ 5500 train 5.657810 (lr=1.4099e-05) (hash(x)=148498335)
318
+ 4900 val loss 5.9659
319
+ 4900 val perplexity 389.9207
320
+ 4900 train 5.970723 (lr=1.5089e-05) (hash(x)=143735284)
321
+ 5600 val loss 5.8222
322
+ 5600 val perplexity 337.7233
323
+ 5600 train 5.556942 (lr=1.3536e-05) (hash(x)=151907614)
324
+ 5000 val loss 5.9558
325
+ 5000 val perplexity 385.9966
326
+ 5000 train 5.919913 (lr=1.4579e-05) (hash(x)=154976463)
327
+ 5700 val loss 5.8141
328
+ 5700 val perplexity 334.9905
329
+ 5700 train 5.779860 (lr=1.2981e-05) (hash(x)=155192267)
330
+ 5800 val loss 5.8026
331
+ 5800 val perplexity 331.1440
332
+ 5800 train 5.849533 (lr=1.2436e-05) (hash(x)=153132158)
333
+ 5100 val loss 5.9527
334
+ 5100 val perplexity 384.7841
335
+ 5100 train 5.830037 (lr=1.4071e-05) (hash(x)=149894982)
336
+ 5900 val loss 5.7982
337
+ 5900 val perplexity 329.7009
338
+ 5900 train 5.772695 (lr=1.1900e-05) (hash(x)=161446764)
339
+ 5200 val loss 5.9438
340
+ 5200 val perplexity 381.3849
341
+ 5200 train 5.748489 (lr=1.3568e-05) (hash(x)=159326689)
342
+ 6000 val loss 5.7969
343
+ 6000 val perplexity 329.2916
344
+ 6000 train 5.613126 (lr=1.1375e-05) (hash(x)=151512446)
345
+ 5300 val loss 5.9373
346
+ 5300 val perplexity 378.8938
347
+ 5300 train 5.933059 (lr=1.3068e-05) (hash(x)=159484800)
348
+ 6100 val loss 5.7852
349
+ 6100 val perplexity 325.4444
350
+ 6100 train 5.915392 (lr=1.0861e-05) (hash(x)=188094053)
351
+ 5400 val loss 5.9255
352
+ 5400 val perplexity 374.4638
353
+ 5400 train 5.812152 (lr=1.2573e-05) (hash(x)=140385615)
354
+ 6200 val loss 5.7787
355
+ 6200 val perplexity 323.3293
356
+ 6200 train 5.666321 (lr=1.0360e-05) (hash(x)=149389789)
357
+ 5500 val loss 5.9199
358
+ 5500 val perplexity 372.3792
359
+ 5500 train 5.751431 (lr=1.2085e-05) (hash(x)=148498335)
360
+ 6300 val loss 5.7735
361
+ 6300 val perplexity 321.6672
362
+ 6300 train 5.593270 (lr=9.8715e-06) (hash(x)=138212820)
363
+ 5600 val loss 5.9173
364
+ 5600 val perplexity 371.4073
365
+ 5600 train 5.648520 (lr=1.1602e-05) (hash(x)=151907614)
366
+ 6400 val loss 5.7708
367
+ 6400 val perplexity 320.7969
368
+ 6400 train 5.569121 (lr=9.3966e-06) (hash(x)=146535423)
369
+ 5700 val loss 5.9118
370
+ 5700 val perplexity 369.3690
371
+ 5700 train 5.872494 (lr=1.1127e-05) (hash(x)=155192267)
372
+ 6500 val loss 5.7620
373
+ 6500 val perplexity 317.9776
374
+ 6500 train 5.634068 (lr=8.9359e-06) (hash(x)=145950843)
375
+ 6600 val loss 5.7580
376
+ 6600 val perplexity 316.7046
377
+ 6600 train 5.652801 (lr=8.4903e-06) (hash(x)=141162902)
378
+ 5800 val loss 5.9006
379
+ 5800 val perplexity 365.2532
380
+ 5800 train 5.951333 (lr=1.0659e-05) (hash(x)=153132158)
381
+ 6700 val loss 5.7561
382
+ 6700 val perplexity 316.1148
383
+ 6700 train 5.670326 (lr=8.0602e-06) (hash(x)=153018737)
384
+ 5900 val loss 5.8966
385
+ 5900 val perplexity 363.7935
386
+ 5900 train 5.868530 (lr=1.0200e-05) (hash(x)=161446764)
387
+ 6800 val loss 5.7493
388
+ 6800 val perplexity 313.9696
389
+ 6800 train 5.676184 (lr=7.6463e-06) (hash(x)=155640155)
390
+ 6000 val loss 5.8915
391
+ 6000 val perplexity 361.9300
392
+ 6000 train 5.715796 (lr=9.7500e-06) (hash(x)=151512446)
393
+ 6900 val loss 5.7452
394
+ 6900 val perplexity 312.6940
395
+ 6900 train 5.712743 (lr=7.2493e-06) (hash(x)=153722115)
396
+ 6100 val loss 5.8828
397
+ 6100 val perplexity 358.8126
398
+ 6100 train 6.019364 (lr=9.3098e-06) (hash(x)=188094053)
399
+ 7000 val loss 5.7401
400
+ 7000 val perplexity 311.0814
401
+ 7000 train 5.646478 (lr=6.8697e-06) (hash(x)=146953450)
402
+ 6200 val loss 5.8757
403
+ 6200 val perplexity 356.2753
404
+ 6200 train 5.769423 (lr=8.8800e-06) (hash(x)=149389789)
405
+ 7100 val loss 5.7406
406
+ 7100 val perplexity 311.2574
407
+ 7100 train 5.574067 (lr=6.5080e-06) (hash(x)=137663885)
408
+ 6300 val loss 5.8715
409
+ 6300 val perplexity 354.7726
410
+ 6300 train 5.685650 (lr=8.4613e-06) (hash(x)=138212820)
411
+ 7200 val loss 5.7313
412
+ 7200 val perplexity 308.3727
413
+ 7200 train 5.794419 (lr=6.1648e-06) (hash(x)=146172950)
414
+ 6400 val loss 5.8699
415
+ 6400 val perplexity 354.1973
416
+ 6400 train 5.674243 (lr=8.0542e-06) (hash(x)=146535423)
417
+ 7300 val loss 5.7278
418
+ 7300 val perplexity 307.2869
419
+ 7300 train 5.623385 (lr=5.8405e-06) (hash(x)=150018163)
420
+ 6500 val loss 5.8620
421
+ 6500 val perplexity 351.4325
422
+ 6500 train 5.728920 (lr=7.6594e-06) (hash(x)=145950843)
423
+ 7400 val loss 5.7251
424
+ 7400 val perplexity 306.4714
425
+ 7400 train 5.720573 (lr=5.5357e-06) (hash(x)=145351166)
426
+ 7500 val loss 5.7261
427
+ 7500 val perplexity 306.7808
428
+ 7500 train 5.509435 (lr=5.2508e-06) (hash(x)=145292116)
429
+ 6600 val loss 5.8585
430
+ 6600 val perplexity 350.2053
431
+ 6600 train 5.750317 (lr=7.2774e-06) (hash(x)=141162902)
432
+ 7600 val loss 5.7189
433
+ 7600 val perplexity 304.5720
434
+ 7600 train 5.639998 (lr=4.9862e-06) (hash(x)=150235132)
435
+ 6700 val loss 5.8564
436
+ 6700 val perplexity 349.4663
437
+ 6700 train 5.760009 (lr=6.9087e-06) (hash(x)=153018737)
438
+ 7700 val loss 5.7165
439
+ 7700 val perplexity 303.8320
440
+ 7700 train 5.625147 (lr=4.7423e-06) (hash(x)=154543455)
441
+ 6800 val loss 5.8492
442
+ 6800 val perplexity 346.9674
443
+ 6800 train 5.780306 (lr=6.5540e-06) (hash(x)=155640155)
444
+ 7800 val loss 5.7141
445
+ 7800 val perplexity 303.1216
446
+ 7800 train 5.606357 (lr=4.5194e-06) (hash(x)=142456852)
447
+ 6900 val loss 5.8453
448
+ 6900 val perplexity 345.5936
449
+ 6900 train 5.812240 (lr=6.2137e-06) (hash(x)=153722115)
450
+ 7900 val loss 5.7148
451
+ 7900 val perplexity 303.3319
452
+ 7900 train 5.495040 (lr=4.3179e-06) (hash(x)=147363479)
453
+ 7000 val loss 5.8411
454
+ 7000 val perplexity 344.1611
455
+ 7000 train 5.742386 (lr=5.8883e-06) (hash(x)=146953450)
456
+ 8000 val loss 5.7078
457
+ 8000 val perplexity 301.2154
458
+ 8000 train 5.742621 (lr=4.1380e-06) (hash(x)=156122973)
459
+ 7100 val loss 5.8403
460
+ 7100 val perplexity 343.8915
461
+ 7100 train 5.669028 (lr=5.5783e-06) (hash(x)=137663885)
462
+ 8100 val loss 5.7063
463
+ 8100 val perplexity 300.7574
464
+ 8100 train 5.747419 (lr=3.9800e-06) (hash(x)=156153179)
465
+ 7200 val loss 5.8320
466
+ 7200 val perplexity 341.0402
467
+ 7200 train 5.886730 (lr=5.2841e-06) (hash(x)=146172950)
468
+ 8200 val loss 5.7053
469
+ 8200 val perplexity 300.4537
470
+ 8200 train 5.748656 (lr=3.8442e-06) (hash(x)=146430698)
471
+ 8300 val loss 5.7037
472
+ 8300 val perplexity 299.9784
473
+ 8300 train 5.569663 (lr=3.7307e-06) (hash(x)=143507257)
474
+ 7300 val loss 5.8294
475
+ 7300 val perplexity 340.1391
476
+ 7300 train 5.728073 (lr=5.0062e-06) (hash(x)=150018163)
477
+ 8400 val loss 5.6993
478
+ 8400 val perplexity 298.6586
479
+ 8400 train 5.759250 (lr=3.6397e-06) (hash(x)=166272643)
480
+ 7400 val loss 5.8271
481
+ 7400 val perplexity 339.3649
482
+ 7400 train 5.802515 (lr=4.7449e-06) (hash(x)=145351166)
483
+ 8500 val loss 5.6977
484
+ 8500 val perplexity 298.1721
485
+ 8500 train 5.598984 (lr=3.5713e-06) (hash(x)=143887848)
486
+ 7500 val loss 5.8267
487
+ 7500 val perplexity 339.2248
488
+ 7500 train 5.605633 (lr=4.5007e-06) (hash(x)=145292116)
489
+ 8600 val loss 5.6975
490
+ 8600 val perplexity 298.1066
491
+ 8600 train 5.740758 (lr=3.5257e-06) (hash(x)=156900341)
492
+ 7600 val loss 5.8205
493
+ 7600 val perplexity 337.1429
494
+ 7600 train 5.732724 (lr=4.2739e-06) (hash(x)=150235132)
495
+ 8700 val loss 5.6928
496
+ 8700 val perplexity 296.7115
497
+ 8700 train 5.918503 (lr=3.5029e-06) (hash(x)=146417632)
498
+ 8749 val loss 5.6925
499
+ 8749 val perplexity 296.6473
500
+ 7700 val loss 5.8191
501
+ 7700 val perplexity 336.6639
502
+ 7700 train 5.747572 (lr=4.0648e-06) (hash(x)=154543455)
attention_kindselective_n_heads4_seed1339/model_08749.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0799e271014bd3a93b4c15f8a90dc002dc00df4c2d676c29130f65bddb5be45b
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5f9f086e5bc9016de004cf97e0e1fe7e0a7ef5565d49ea076873db395a988fe1
3
  size 92843394
attention_kindselective_n_heads4_seed1339/optimizer_08749.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5efbfc12e221b8e452d6c7051c9f70c4a071a6f63d0982e0f3688b6b2e0170f2
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:22c2cc84339b65454afc89bf4f0daaa707e2a4500376081f0b13ded666f36934
3
  size 179406214