andrew-healey commited on
Commit
4a3fb16
·
verified ·
1 Parent(s): ab67a35

Upload folder using huggingface_hub

Browse files
12_head_baseline_lr_40e-4_head_dim_22/args.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "logs/repro_selective_pattern_rankings/12_head_baseline_lr_40e-4_head_dim_22", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 12, "n_embd": 264, "head_dim": 22, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 4375, "warmup_steps": 250, "group": "repro_selective_pattern_rankings", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1339, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 64, "total_batch_size": 131072, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.004, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "12_head_baseline_lr_40e-4"}
12_head_baseline_lr_40e-4_head_dim_22/dataloader_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8ec6682852ed60a9d42cb4047300f65a04f87328c0ad7a4516be84d11b28f216
3
+ size 964
12_head_baseline_lr_40e-4_head_dim_22/log2.txt ADDED
@@ -0,0 +1,529 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ max_steps: 4375
2
+ 0 val loss 10.9076
3
+ 0 val perplexity 54591.7773
4
+ 0 train 10.908087 (lr=5.5944e-06) (hash(x)=93356070)
5
+ 10 train 10.458488 (lr=6.1538e-05) (hash(x)=91253010)
6
+ 20 train 10.184336 (lr=1.1748e-04) (hash(x)=74934453)
7
+ 30 train 9.785275 (lr=1.7343e-04) (hash(x)=79278034)
8
+ 40 train 9.218511 (lr=2.2937e-04) (hash(x)=80107892)
9
+ 50 train 8.671330 (lr=2.8531e-04) (hash(x)=70745428)
10
+ 60 train 8.218834 (lr=3.4126e-04) (hash(x)=80077589)
11
+ 70 train 7.673330 (lr=3.9720e-04) (hash(x)=76213766)
12
+ 80 train 7.557312 (lr=4.5315e-04) (hash(x)=83218328)
13
+ 90 train 7.384906 (lr=5.0909e-04) (hash(x)=74855845)
14
+ 100 val loss 7.2485
15
+ 100 val perplexity 1406.0477
16
+ 100 train 7.110302 (lr=5.6503e-04) (hash(x)=82814902)
17
+ 110 train 7.033719 (lr=6.2098e-04) (hash(x)=84907741)
18
+ 120 train 6.885352 (lr=6.7692e-04) (hash(x)=82613223)
19
+ 130 train 6.700798 (lr=7.3287e-04) (hash(x)=83540876)
20
+ 140 train 6.655601 (lr=7.8881e-04) (hash(x)=75095216)
21
+ 150 train 6.698576 (lr=8.4476e-04) (hash(x)=97190944)
22
+ 160 train 6.623221 (lr=9.0070e-04) (hash(x)=82117809)
23
+ 170 train 6.521166 (lr=9.5664e-04) (hash(x)=70514724)
24
+ 180 train 6.444358 (lr=1.0126e-03) (hash(x)=81029624)
25
+ 190 train 6.364469 (lr=1.0685e-03) (hash(x)=78996841)
26
+ 200 val loss 6.3171
27
+ 200 val perplexity 553.9697
28
+ 200 train 6.320910 (lr=1.1245e-03) (hash(x)=79845097)
29
+ 210 train 6.206264 (lr=1.1804e-03) (hash(x)=78997683)
30
+ 220 train 6.119081 (lr=1.2364e-03) (hash(x)=74895865)
31
+ 230 train 6.272306 (lr=1.2923e-03) (hash(x)=80933276)
32
+ 240 train 5.975600 (lr=1.3483e-03) (hash(x)=77664606)
33
+ 250 train 6.005166 (lr=1.4042e-03) (hash(x)=75903930)
34
+ 260 train 5.916791 (lr=1.4601e-03) (hash(x)=82996853)
35
+ 270 train 5.832993 (lr=1.5161e-03) (hash(x)=73269568)
36
+ 280 train 5.766723 (lr=1.5720e-03) (hash(x)=82906427)
37
+ 290 train 5.690261 (lr=1.6280e-03) (hash(x)=71797895)
38
+ 300 val loss 5.8385
39
+ 300 val perplexity 343.2719
40
+ 300 train 5.551917 (lr=1.6839e-03) (hash(x)=85232249)
41
+ 310 train 5.491411 (lr=1.7399e-03) (hash(x)=75252489)
42
+ 320 train 5.580354 (lr=1.7958e-03) (hash(x)=74394644)
43
+ 330 train 5.666135 (lr=1.8517e-03) (hash(x)=82772910)
44
+ 340 train 5.716442 (lr=1.9077e-03) (hash(x)=81627464)
45
+ 350 train 5.697394 (lr=1.9636e-03) (hash(x)=84678053)
46
+ 360 train 5.645057 (lr=2.0196e-03) (hash(x)=81884128)
47
+ 370 train 5.644568 (lr=2.0755e-03) (hash(x)=76379242)
48
+ 380 train 5.444513 (lr=2.1315e-03) (hash(x)=98182875)
49
+ 390 train 5.427341 (lr=2.1874e-03) (hash(x)=79710436)
50
+ 400 val loss 5.4796
51
+ 400 val perplexity 239.7587
52
+ 400 train 5.424941 (lr=2.2434e-03) (hash(x)=79841071)
53
+ 410 train 5.367428 (lr=2.2993e-03) (hash(x)=75844151)
54
+ 420 train 5.364619 (lr=2.3552e-03) (hash(x)=73125036)
55
+ 430 train 5.395781 (lr=2.4112e-03) (hash(x)=84214858)
56
+ 440 train 5.289318 (lr=2.4671e-03) (hash(x)=80456994)
57
+ 450 train 5.274330 (lr=2.5231e-03) (hash(x)=76735962)
58
+ 460 train 5.186294 (lr=2.5790e-03) (hash(x)=81845446)
59
+ 470 train 4.987415 (lr=2.6350e-03) (hash(x)=76094689)
60
+ 480 train 5.129744 (lr=2.6909e-03) (hash(x)=83806686)
61
+ 490 train 4.932065 (lr=2.7469e-03) (hash(x)=84690227)
62
+ 500 val loss 5.2855
63
+ 500 val perplexity 197.4609
64
+ 500 train 5.022814 (lr=2.8028e-03) (hash(x)=71851938)
65
+ 510 train 5.126309 (lr=2.8587e-03) (hash(x)=77159346)
66
+ 520 train 5.183241 (lr=2.9147e-03) (hash(x)=80755753)
67
+ 530 train 5.293306 (lr=2.9706e-03) (hash(x)=75379679)
68
+ 540 train 5.199385 (lr=3.0266e-03) (hash(x)=82458619)
69
+ 550 train 5.162796 (lr=3.0825e-03) (hash(x)=86366396)
70
+ 560 train 5.188817 (lr=3.1385e-03) (hash(x)=85095044)
71
+ 570 train 5.158607 (lr=3.1944e-03) (hash(x)=78385159)
72
+ 580 train 5.088471 (lr=3.2503e-03) (hash(x)=79342394)
73
+ 590 train 5.114386 (lr=3.3063e-03) (hash(x)=70782192)
74
+ 600 val loss 5.1006
75
+ 600 val perplexity 164.1205
76
+ 600 train 5.084810 (lr=3.3622e-03) (hash(x)=76130353)
77
+ 610 train 4.999259 (lr=3.4182e-03) (hash(x)=74778440)
78
+ 620 train 4.967265 (lr=3.4741e-03) (hash(x)=79129709)
79
+ 630 train 4.871732 (lr=3.5301e-03) (hash(x)=76469962)
80
+ 640 train 4.874993 (lr=3.5860e-03) (hash(x)=78288049)
81
+ 650 train 4.835431 (lr=3.6420e-03) (hash(x)=76641388)
82
+ 660 train 4.751196 (lr=3.6979e-03) (hash(x)=75937906)
83
+ 670 train 4.735569 (lr=3.7538e-03) (hash(x)=74807157)
84
+ 680 train 4.635958 (lr=3.8098e-03) (hash(x)=77490144)
85
+ 690 train 4.678259 (lr=3.8657e-03) (hash(x)=72900124)
86
+ 700 val loss 4.9404
87
+ 700 val perplexity 139.8202
88
+ 700 train 4.634887 (lr=3.9217e-03) (hash(x)=73424218)
89
+ 710 train 4.920900 (lr=3.9776e-03) (hash(x)=86052345)
90
+ 720 train 4.890616 (lr=4.0000e-03) (hash(x)=77462613)
91
+ 730 train 4.889611 (lr=3.9999e-03) (hash(x)=79027471)
92
+ 740 train 4.847482 (lr=3.9996e-03) (hash(x)=78149992)
93
+ 750 train 4.785507 (lr=3.9992e-03) (hash(x)=89147499)
94
+ 760 train 4.820312 (lr=3.9987e-03) (hash(x)=74931538)
95
+ 770 train 4.768336 (lr=3.9980e-03) (hash(x)=89648204)
96
+ 780 train 4.818522 (lr=3.9972e-03) (hash(x)=83708293)
97
+ 790 train 4.822966 (lr=3.9963e-03) (hash(x)=84336512)
98
+ 800 val loss 4.7773
99
+ 800 val perplexity 118.7841
100
+ 800 train 4.700686 (lr=3.9952e-03) (hash(x)=75025285)
101
+ 810 train 4.609047 (lr=3.9940e-03) (hash(x)=79436536)
102
+ 820 train 4.617548 (lr=3.9927e-03) (hash(x)=90467390)
103
+ 830 train 4.653180 (lr=3.9912e-03) (hash(x)=77292020)
104
+ 840 train 4.581813 (lr=3.9896e-03) (hash(x)=75568927)
105
+ 850 train 4.614418 (lr=3.9879e-03) (hash(x)=79671781)
106
+ 860 train 4.692541 (lr=3.9861e-03) (hash(x)=84280943)
107
+ 870 train 4.680172 (lr=3.9841e-03) (hash(x)=80901294)
108
+ 880 train 4.791514 (lr=3.9820e-03) (hash(x)=81437584)
109
+ 890 train 4.655227 (lr=3.9797e-03) (hash(x)=73893778)
110
+ 900 val loss 4.6634
111
+ 900 val perplexity 105.9958
112
+ 900 train 4.597544 (lr=3.9774e-03) (hash(x)=74987794)
113
+ 910 train 4.615208 (lr=3.9748e-03) (hash(x)=93721374)
114
+ 920 train 4.597638 (lr=3.9722e-03) (hash(x)=79149678)
115
+ 930 train 4.561288 (lr=3.9694e-03) (hash(x)=83179387)
116
+ 940 train 4.561318 (lr=3.9665e-03) (hash(x)=89009978)
117
+ 950 train 4.549408 (lr=3.9635e-03) (hash(x)=76483735)
118
+ 960 train 4.482023 (lr=3.9603e-03) (hash(x)=78638850)
119
+ 970 train 4.514319 (lr=3.9571e-03) (hash(x)=90415206)
120
+ 980 train 4.454084 (lr=3.9536e-03) (hash(x)=90819195)
121
+ 990 train 4.583511 (lr=3.9501e-03) (hash(x)=85938852)
122
+ 1000 val loss 4.5648
123
+ 1000 val perplexity 96.0438
124
+ 1000 train 4.617029 (lr=3.9464e-03) (hash(x)=82436789)
125
+ 1010 train 4.670533 (lr=3.9426e-03) (hash(x)=63022149)
126
+ 1020 train 4.587653 (lr=3.9387e-03) (hash(x)=83501199)
127
+ 1030 train 4.563540 (lr=3.9346e-03) (hash(x)=77093326)
128
+ 1040 train 4.442442 (lr=3.9304e-03) (hash(x)=73437559)
129
+ 1050 train 4.538055 (lr=3.9261e-03) (hash(x)=84550388)
130
+ 1060 train 4.535672 (lr=3.9216e-03) (hash(x)=86184566)
131
+ 1070 train 4.522191 (lr=3.9171e-03) (hash(x)=85644922)
132
+ 1080 train 4.453305 (lr=3.9124e-03) (hash(x)=86179801)
133
+ 1090 train 4.435273 (lr=3.9076e-03) (hash(x)=82868303)
134
+ 1100 val loss 4.5028
135
+ 1100 val perplexity 90.2676
136
+ 1100 train 4.526828 (lr=3.9026e-03) (hash(x)=96780388)
137
+ 1110 train 4.467020 (lr=3.8975e-03) (hash(x)=87223122)
138
+ 1120 train 4.437481 (lr=3.8923e-03) (hash(x)=77292786)
139
+ 1130 train 4.454478 (lr=3.8870e-03) (hash(x)=88761375)
140
+ 1140 train 4.374186 (lr=3.8815e-03) (hash(x)=80956468)
141
+ 1150 train 4.552345 (lr=3.8760e-03) (hash(x)=84725462)
142
+ 1160 train 4.535572 (lr=3.8703e-03) (hash(x)=76098113)
143
+ 1170 train 4.506839 (lr=3.8645e-03) (hash(x)=84387685)
144
+ 1180 train 4.457309 (lr=3.8585e-03) (hash(x)=76120321)
145
+ 1190 train 4.414488 (lr=3.8524e-03) (hash(x)=85974065)
146
+ 1200 val loss 4.4370
147
+ 1200 val perplexity 84.5184
148
+ 1200 train 4.322309 (lr=3.8463e-03) (hash(x)=79340644)
149
+ 1210 train 4.421555 (lr=3.8400e-03) (hash(x)=88345025)
150
+ 1220 train 4.326811 (lr=3.8335e-03) (hash(x)=86406230)
151
+ 1230 train 4.311566 (lr=3.8270e-03) (hash(x)=80608975)
152
+ 1240 train 4.360624 (lr=3.8203e-03) (hash(x)=81635225)
153
+ 1250 train 4.202559 (lr=3.8135e-03) (hash(x)=82126376)
154
+ 1260 train 4.381113 (lr=3.8066e-03) (hash(x)=83737972)
155
+ 1270 train 4.295012 (lr=3.7996e-03) (hash(x)=81132345)
156
+ 1280 train 4.384928 (lr=3.7924e-03) (hash(x)=85850404)
157
+ 1290 train 4.393001 (lr=3.7852e-03) (hash(x)=83490640)
158
+ 1300 val loss 4.3955
159
+ 1300 val perplexity 81.0878
160
+ 1300 train 4.368717 (lr=3.7778e-03) (hash(x)=77545187)
161
+ 1310 train 4.412107 (lr=3.7703e-03) (hash(x)=86412685)
162
+ 1320 train 4.308740 (lr=3.7627e-03) (hash(x)=86429640)
163
+ 1330 train 4.445072 (lr=3.7550e-03) (hash(x)=79645281)
164
+ 1340 train 4.401803 (lr=3.7471e-03) (hash(x)=82668541)
165
+ 1350 train 4.263761 (lr=3.7392e-03) (hash(x)=73627248)
166
+ 1360 train 4.363901 (lr=3.7311e-03) (hash(x)=74845133)
167
+ 1370 train 4.272449 (lr=3.7229e-03) (hash(x)=77850497)
168
+ 1380 train 4.301913 (lr=3.7146e-03) (hash(x)=76143954)
169
+ 1390 train 4.310223 (lr=3.7062e-03) (hash(x)=80675544)
170
+ 1400 val loss 4.3781
171
+ 1400 val perplexity 79.6856
172
+ 1400 train 4.284654 (lr=3.6977e-03) (hash(x)=76010938)
173
+ 1410 train 4.339575 (lr=3.6891e-03) (hash(x)=81260300)
174
+ 1420 train 4.450355 (lr=3.6804e-03) (hash(x)=83803629)
175
+ 1430 train 4.278694 (lr=3.6715e-03) (hash(x)=83840110)
176
+ 1440 train 4.465683 (lr=3.6626e-03) (hash(x)=97711831)
177
+ 1450 train 4.341757 (lr=3.6535e-03) (hash(x)=81110139)
178
+ 1460 train 4.266294 (lr=3.6443e-03) (hash(x)=90942839)
179
+ 1470 train 4.304605 (lr=3.6351e-03) (hash(x)=80721564)
180
+ 1480 train 4.400560 (lr=3.6257e-03) (hash(x)=77852059)
181
+ 1490 train 4.292363 (lr=3.6162e-03) (hash(x)=80011365)
182
+ 1500 val loss 4.3336
183
+ 1500 val perplexity 76.2197
184
+ 1500 train 4.322636 (lr=3.6066e-03) (hash(x)=84496142)
185
+ 1510 train 4.261234 (lr=3.5969e-03) (hash(x)=70186729)
186
+ 1520 train 4.287668 (lr=3.5871e-03) (hash(x)=74854227)
187
+ 1530 train 4.193581 (lr=3.5772e-03) (hash(x)=77468161)
188
+ 1540 train 4.208623 (lr=3.5672e-03) (hash(x)=87853059)
189
+ 1550 train 4.141577 (lr=3.5571e-03) (hash(x)=71225436)
190
+ 1560 train 4.241508 (lr=3.5469e-03) (hash(x)=84072783)
191
+ 1570 train 4.296432 (lr=3.5366e-03) (hash(x)=72723098)
192
+ 1580 train 4.312276 (lr=3.5262e-03) (hash(x)=90409866)
193
+ 1590 train 4.382561 (lr=3.5157e-03) (hash(x)=87481378)
194
+ 1600 val loss 4.3100
195
+ 1600 val perplexity 74.4386
196
+ 1600 train 4.370471 (lr=3.5051e-03) (hash(x)=77643862)
197
+ 1610 train 4.255655 (lr=3.4945e-03) (hash(x)=88862575)
198
+ 1620 train 4.267348 (lr=3.4837e-03) (hash(x)=84612581)
199
+ 1630 train 4.304303 (lr=3.4728e-03) (hash(x)=87075989)
200
+ 1640 train 4.198313 (lr=3.4618e-03) (hash(x)=88277361)
201
+ 1650 train 4.310437 (lr=3.4508e-03) (hash(x)=78750236)
202
+ 1660 train 4.236474 (lr=3.4396e-03) (hash(x)=82604581)
203
+ 1670 train 4.070506 (lr=3.4284e-03) (hash(x)=68482265)
204
+ 1680 train 4.194980 (lr=3.4170e-03) (hash(x)=75088835)
205
+ 1690 train 4.139318 (lr=3.4056e-03) (hash(x)=66060989)
206
+ 1700 val loss 4.3186
207
+ 1700 val perplexity 75.0848
208
+ 1700 train 4.309287 (lr=3.3941e-03) (hash(x)=79986754)
209
+ 1710 train 4.292383 (lr=3.3825e-03) (hash(x)=83657930)
210
+ 1720 train 4.238027 (lr=3.3708e-03) (hash(x)=81754135)
211
+ 1730 train 4.321444 (lr=3.3590e-03) (hash(x)=78904427)
212
+ 1740 train 4.216912 (lr=3.3471e-03) (hash(x)=85920177)
213
+ 1750 train 4.270732 (lr=3.3352e-03) (hash(x)=86573211)
214
+ 1760 train 4.228909 (lr=3.3231e-03) (hash(x)=81737128)
215
+ 1770 train 4.220685 (lr=3.3110e-03) (hash(x)=77400968)
216
+ 1780 train 4.232032 (lr=3.2988e-03) (hash(x)=73545497)
217
+ 1790 train 4.481085 (lr=3.2866e-03) (hash(x)=71641943)
218
+ 1800 val loss 4.2691
219
+ 1800 val perplexity 71.4543
220
+ 1800 train 4.332547 (lr=3.2742e-03) (hash(x)=87819781)
221
+ 1810 train 4.185174 (lr=3.2618e-03) (hash(x)=86870770)
222
+ 1820 train 4.191788 (lr=3.2492e-03) (hash(x)=82522211)
223
+ 1830 train 4.278109 (lr=3.2367e-03) (hash(x)=61947437)
224
+ 1840 train 4.161554 (lr=3.2240e-03) (hash(x)=79865406)
225
+ 1850 train 4.124757 (lr=3.2112e-03) (hash(x)=79828721)
226
+ 1860 train 4.273605 (lr=3.1984e-03) (hash(x)=80869571)
227
+ 1870 train 4.253980 (lr=3.1855e-03) (hash(x)=73780971)
228
+ 1880 train 4.218853 (lr=3.1726e-03) (hash(x)=79249549)
229
+ 1890 train 4.274049 (lr=3.1595e-03) (hash(x)=81041904)
230
+ 1900 val loss 4.2426
231
+ 1900 val perplexity 69.5879
232
+ 1900 train 4.340029 (lr=3.1464e-03) (hash(x)=82456430)
233
+ 1910 train 4.110319 (lr=3.1332e-03) (hash(x)=82222135)
234
+ 1920 train 4.145608 (lr=3.1200e-03) (hash(x)=70033249)
235
+ 1930 train 4.130219 (lr=3.1067e-03) (hash(x)=72887360)
236
+ 1940 train 4.183333 (lr=3.0933e-03) (hash(x)=83251100)
237
+ 1950 train 4.221889 (lr=3.0799e-03) (hash(x)=79660266)
238
+ 1960 train 4.099637 (lr=3.0663e-03) (hash(x)=78919068)
239
+ 1970 train 4.199788 (lr=3.0528e-03) (hash(x)=94690431)
240
+ 1980 train 4.038556 (lr=3.0391e-03) (hash(x)=88153756)
241
+ 1990 train 4.091207 (lr=3.0254e-03) (hash(x)=77195688)
242
+ 2000 val loss 4.2221
243
+ 2000 val perplexity 68.1795
244
+ 2000 train 4.187912 (lr=3.0117e-03) (hash(x)=81308591)
245
+ 2010 train 4.235891 (lr=2.9979e-03) (hash(x)=80205479)
246
+ 2020 train 4.155325 (lr=2.9840e-03) (hash(x)=81008704)
247
+ 2030 train 4.221498 (lr=2.9700e-03) (hash(x)=82914358)
248
+ 2040 train 4.228296 (lr=2.9560e-03) (hash(x)=88064399)
249
+ 2050 train 4.237135 (lr=2.9420e-03) (hash(x)=82889390)
250
+ 2060 train 4.305842 (lr=2.9279e-03) (hash(x)=80913554)
251
+ 2070 train 4.146793 (lr=2.9137e-03) (hash(x)=78121791)
252
+ 2080 train 4.280697 (lr=2.8995e-03) (hash(x)=75356657)
253
+ 2090 train 4.106748 (lr=2.8853e-03) (hash(x)=74048412)
254
+ 2100 val loss 4.1993
255
+ 2100 val perplexity 66.6396
256
+ 2100 train 4.024669 (lr=2.8709e-03) (hash(x)=68928225)
257
+ 2110 train 4.075489 (lr=2.8566e-03) (hash(x)=93734745)
258
+ 2120 train 4.073512 (lr=2.8422e-03) (hash(x)=72878164)
259
+ 2130 train 4.205133 (lr=2.8277e-03) (hash(x)=71508226)
260
+ 2140 train 4.164807 (lr=2.8132e-03) (hash(x)=84168671)
261
+ 2150 train 4.097293 (lr=2.7987e-03) (hash(x)=74673639)
262
+ 2160 train 4.242332 (lr=2.7841e-03) (hash(x)=75470031)
263
+ 2170 train 4.205914 (lr=2.7694e-03) (hash(x)=74307890)
264
+ 2180 train 4.318648 (lr=2.7548e-03) (hash(x)=77214245)
265
+ 2190 train 4.100481 (lr=2.7400e-03) (hash(x)=88628359)
266
+ 2200 val loss 4.1808
267
+ 2200 val perplexity 65.4159
268
+ 2200 train 4.124396 (lr=2.7253e-03) (hash(x)=74779126)
269
+ 2210 train 4.156603 (lr=2.7105e-03) (hash(x)=79817976)
270
+ 2220 train 4.106953 (lr=2.6957e-03) (hash(x)=75258996)
271
+ 2230 train 4.084330 (lr=2.6808e-03) (hash(x)=88164047)
272
+ 2240 train 4.067251 (lr=2.6659e-03) (hash(x)=78580686)
273
+ 2250 train 4.007695 (lr=2.6509e-03) (hash(x)=76510617)
274
+ 2260 train 4.080310 (lr=2.6360e-03) (hash(x)=76043966)
275
+ 2270 train 4.057000 (lr=2.6210e-03) (hash(x)=73062098)
276
+ 2280 train 4.281871 (lr=2.6059e-03) (hash(x)=81885909)
277
+ 2290 train 4.220579 (lr=2.5908e-03) (hash(x)=81481741)
278
+ 2300 val loss 4.1598
279
+ 2300 val perplexity 64.0600
280
+ 2300 train 4.079860 (lr=2.5758e-03) (hash(x)=82104275)
281
+ 2310 train 4.128578 (lr=2.5606e-03) (hash(x)=82137309)
282
+ 2320 train 4.106791 (lr=2.5455e-03) (hash(x)=86741167)
283
+ 2330 train 4.120911 (lr=2.5303e-03) (hash(x)=84796263)
284
+ 2340 train 4.101315 (lr=2.5151e-03) (hash(x)=169966529)
285
+ 2350 train 3.967781 (lr=2.4999e-03) (hash(x)=80006061)
286
+ 2360 train 4.024746 (lr=2.4846e-03) (hash(x)=71090999)
287
+ 2370 train 4.157403 (lr=2.4694e-03) (hash(x)=86641850)
288
+ 2380 train 4.056834 (lr=2.4541e-03) (hash(x)=84573253)
289
+ 2390 train 4.085881 (lr=2.4388e-03) (hash(x)=74483764)
290
+ 2400 val loss 4.1556
291
+ 2400 val perplexity 63.7886
292
+ 2400 train 4.059066 (lr=2.4235e-03) (hash(x)=78327659)
293
+ 2410 train 4.127447 (lr=2.4081e-03) (hash(x)=77273627)
294
+ 2420 train 3.990860 (lr=2.3928e-03) (hash(x)=76938049)
295
+ 2430 train 3.991381 (lr=2.3774e-03) (hash(x)=98449442)
296
+ 2440 train 3.854030 (lr=2.3620e-03) (hash(x)=78157797)
297
+ 2450 train 3.894112 (lr=2.3466e-03) (hash(x)=80637582)
298
+ 2460 train 3.887185 (lr=2.3312e-03) (hash(x)=80023854)
299
+ 2470 train 3.949803 (lr=2.3158e-03) (hash(x)=86678884)
300
+ 2480 train 4.174103 (lr=2.3004e-03) (hash(x)=84086469)
301
+ 2490 train 4.192038 (lr=2.2849e-03) (hash(x)=83131141)
302
+ 2500 val loss 4.1272
303
+ 2500 val perplexity 62.0034
304
+ 2500 train 4.107065 (lr=2.2695e-03) (hash(x)=82583497)
305
+ 2510 train 4.164256 (lr=2.2541e-03) (hash(x)=79164326)
306
+ 2520 train 4.075881 (lr=2.2386e-03) (hash(x)=76456503)
307
+ 2530 train 4.135474 (lr=2.2232e-03) (hash(x)=82001228)
308
+ 2540 train 4.166557 (lr=2.2077e-03) (hash(x)=75615595)
309
+ 2550 train 4.110672 (lr=2.1923e-03) (hash(x)=78397869)
310
+ 2560 train 4.117221 (lr=2.1768e-03) (hash(x)=75547032)
311
+ 2570 train 4.026905 (lr=2.1614e-03) (hash(x)=89201025)
312
+ 2580 train 4.016556 (lr=2.1459e-03) (hash(x)=89856704)
313
+ 2590 train 4.024228 (lr=2.1305e-03) (hash(x)=82175682)
314
+ 2600 val loss 4.1113
315
+ 2600 val perplexity 61.0234
316
+ 2600 train 4.029932 (lr=2.1151e-03) (hash(x)=78312826)
317
+ 2610 train 4.021157 (lr=2.0996e-03) (hash(x)=77066588)
318
+ 2620 train 3.864421 (lr=2.0842e-03) (hash(x)=78666061)
319
+ 2630 train 3.862624 (lr=2.0688e-03) (hash(x)=93762143)
320
+ 2640 train 3.877935 (lr=2.0534e-03) (hash(x)=83191587)
321
+ 2650 train 3.795954 (lr=2.0380e-03) (hash(x)=87169585)
322
+ 2660 train 4.023151 (lr=2.0226e-03) (hash(x)=86426388)
323
+ 2670 train 4.155124 (lr=2.0072e-03) (hash(x)=76692638)
324
+ 2680 train 4.069413 (lr=1.9919e-03) (hash(x)=77446063)
325
+ 2690 train 4.069152 (lr=1.9765e-03) (hash(x)=79809050)
326
+ 2700 val loss 4.1057
327
+ 2700 val perplexity 60.6860
328
+ 2700 train 3.996182 (lr=1.9612e-03) (hash(x)=83116823)
329
+ 2710 train 4.110099 (lr=1.9459e-03) (hash(x)=75622148)
330
+ 2720 train 4.042389 (lr=1.9306e-03) (hash(x)=80690512)
331
+ 2730 train 4.107502 (lr=1.9154e-03) (hash(x)=78009984)
332
+ 2740 train 4.062438 (lr=1.9001e-03) (hash(x)=74205488)
333
+ 2750 train 4.068693 (lr=1.8849e-03) (hash(x)=91013332)
334
+ 2760 train 3.989962 (lr=1.8697e-03) (hash(x)=76954961)
335
+ 2770 train 4.004441 (lr=1.8545e-03) (hash(x)=79390317)
336
+ 2780 train 3.870897 (lr=1.8394e-03) (hash(x)=70168783)
337
+ 2790 train 4.030679 (lr=1.8242e-03) (hash(x)=76028417)
338
+ 2800 val loss 4.0851
339
+ 2800 val perplexity 59.4475
340
+ 2800 train 3.852032 (lr=1.8092e-03) (hash(x)=77656050)
341
+ 2810 train 3.897044 (lr=1.7941e-03) (hash(x)=94248216)
342
+ 2820 train 3.965987 (lr=1.7790e-03) (hash(x)=78305078)
343
+ 2830 train 3.879869 (lr=1.7640e-03) (hash(x)=79948848)
344
+ 2840 train 3.739035 (lr=1.7491e-03) (hash(x)=85341024)
345
+ 2850 train 4.052204 (lr=1.7341e-03) (hash(x)=78735170)
346
+ 2860 train 4.305175 (lr=1.7192e-03) (hash(x)=71616419)
347
+ 2870 train 4.071728 (lr=1.7043e-03) (hash(x)=78656517)
348
+ 2880 train 4.082227 (lr=1.6895e-03) (hash(x)=80073987)
349
+ 2890 train 4.014472 (lr=1.6747e-03) (hash(x)=76894809)
350
+ 2900 val loss 4.0595
351
+ 2900 val perplexity 57.9470
352
+ 2900 train 3.995089 (lr=1.6600e-03) (hash(x)=80499838)
353
+ 2910 train 4.045251 (lr=1.6452e-03) (hash(x)=72673354)
354
+ 2920 train 4.027093 (lr=1.6306e-03) (hash(x)=84265768)
355
+ 2930 train 4.054072 (lr=1.6159e-03) (hash(x)=79612060)
356
+ 2940 train 3.907456 (lr=1.6013e-03) (hash(x)=74970087)
357
+ 2950 train 4.065562 (lr=1.5868e-03) (hash(x)=84166818)
358
+ 2960 train 4.052482 (lr=1.5723e-03) (hash(x)=89410221)
359
+ 2970 train 3.994113 (lr=1.5578e-03) (hash(x)=75672566)
360
+ 2980 train 3.845670 (lr=1.5434e-03) (hash(x)=81760314)
361
+ 2990 train 3.818416 (lr=1.5291e-03) (hash(x)=80605200)
362
+ 3000 val loss 4.0582
363
+ 3000 val perplexity 57.8704
364
+ 3000 train 3.814762 (lr=1.5147e-03) (hash(x)=83804735)
365
+ 3010 train 3.741226 (lr=1.5005e-03) (hash(x)=77015303)
366
+ 3020 train 3.807526 (lr=1.4863e-03) (hash(x)=81464523)
367
+ 3030 train 4.005996 (lr=1.4721e-03) (hash(x)=80168230)
368
+ 3040 train 3.951922 (lr=1.4580e-03) (hash(x)=84817006)
369
+ 3050 train 4.060794 (lr=1.4440e-03) (hash(x)=71601811)
370
+ 3060 train 3.980760 (lr=1.4300e-03) (hash(x)=85499733)
371
+ 3070 train 4.038463 (lr=1.4160e-03) (hash(x)=78670408)
372
+ 3080 train 3.930373 (lr=1.4021e-03) (hash(x)=77120468)
373
+ 3090 train 4.035828 (lr=1.3883e-03) (hash(x)=77927426)
374
+ 3100 val loss 4.0364
375
+ 3100 val perplexity 56.6216
376
+ 3100 train 4.051445 (lr=1.3746e-03) (hash(x)=83998606)
377
+ 3110 train 3.953995 (lr=1.3609e-03) (hash(x)=77990218)
378
+ 3120 train 3.868207 (lr=1.3472e-03) (hash(x)=81623970)
379
+ 3130 train 3.837437 (lr=1.3337e-03) (hash(x)=75014781)
380
+ 3140 train 3.890389 (lr=1.3201e-03) (hash(x)=72591250)
381
+ 3150 train 3.902428 (lr=1.3067e-03) (hash(x)=81421847)
382
+ 3160 train 3.956516 (lr=1.2933e-03) (hash(x)=75112631)
383
+ 3170 train 4.053855 (lr=1.2800e-03) (hash(x)=87518033)
384
+ 3180 train 3.906461 (lr=1.2668e-03) (hash(x)=78648348)
385
+ 3190 train 4.015678 (lr=1.2536e-03) (hash(x)=78270029)
386
+ 3200 val loss 4.0168
387
+ 3200 val perplexity 55.5204
388
+ 3200 train 4.087307 (lr=1.2405e-03) (hash(x)=83380714)
389
+ 3210 train 4.274594 (lr=1.2274e-03) (hash(x)=67804991)
390
+ 3220 train 4.000900 (lr=1.2145e-03) (hash(x)=76968804)
391
+ 3230 train 3.937055 (lr=1.2016e-03) (hash(x)=75798670)
392
+ 3240 train 4.008497 (lr=1.1888e-03) (hash(x)=83192811)
393
+ 3250 train 3.945548 (lr=1.1760e-03) (hash(x)=81659789)
394
+ 3260 train 3.991395 (lr=1.1633e-03) (hash(x)=80622502)
395
+ 3270 train 3.958820 (lr=1.1508e-03) (hash(x)=85436511)
396
+ 3280 train 3.903336 (lr=1.1382e-03) (hash(x)=80589180)
397
+ 3290 train 4.040409 (lr=1.1258e-03) (hash(x)=85747193)
398
+ 3300 val loss 4.0114
399
+ 3300 val perplexity 55.2241
400
+ 3300 train 3.908028 (lr=1.1134e-03) (hash(x)=74635692)
401
+ 3310 train 3.893753 (lr=1.1012e-03) (hash(x)=82045455)
402
+ 3320 train 3.763941 (lr=1.0890e-03) (hash(x)=73249173)
403
+ 3330 train 3.724732 (lr=1.0769e-03) (hash(x)=71492338)
404
+ 3340 train 4.056436 (lr=1.0648e-03) (hash(x)=76374071)
405
+ 3350 train 4.025647 (lr=1.0529e-03) (hash(x)=80474064)
406
+ 3360 train 3.936754 (lr=1.0410e-03) (hash(x)=84390892)
407
+ 3370 train 3.982184 (lr=1.0292e-03) (hash(x)=83399949)
408
+ 3380 train 4.038645 (lr=1.0175e-03) (hash(x)=77648059)
409
+ 3390 train 4.006914 (lr=1.0059e-03) (hash(x)=80152701)
410
+ 3400 val loss 3.9901
411
+ 3400 val perplexity 54.0584
412
+ 3400 train 4.054577 (lr=9.9442e-04) (hash(x)=81952545)
413
+ 3410 train 4.015197 (lr=9.8299e-04) (hash(x)=80908993)
414
+ 3420 train 4.055999 (lr=9.7165e-04) (hash(x)=79610037)
415
+ 3430 train 3.913090 (lr=9.6040e-04) (hash(x)=87624382)
416
+ 3440 train 3.980425 (lr=9.4924e-04) (hash(x)=82336381)
417
+ 3450 train 3.957674 (lr=9.3818e-04) (hash(x)=87687835)
418
+ 3460 train 3.958360 (lr=9.2721e-04) (hash(x)=76376135)
419
+ 3470 train 3.872755 (lr=9.1633e-04) (hash(x)=79331391)
420
+ 3480 train 3.777411 (lr=9.0555e-04) (hash(x)=107002681)
421
+ 3490 train 3.858063 (lr=8.9486e-04) (hash(x)=77465514)
422
+ 3500 val loss 3.9866
423
+ 3500 val perplexity 53.8725
424
+ 3500 train 3.820212 (lr=8.8427e-04) (hash(x)=88237229)
425
+ 3510 train 3.888991 (lr=8.7377e-04) (hash(x)=81011739)
426
+ 3520 train 3.937476 (lr=8.6337e-04) (hash(x)=64643427)
427
+ 3530 train 4.219550 (lr=8.5307e-04) (hash(x)=78029539)
428
+ 3540 train 4.041986 (lr=8.4288e-04) (hash(x)=83188968)
429
+ 3550 train 4.063608 (lr=8.3278e-04) (hash(x)=86104185)
430
+ 3560 train 3.989103 (lr=8.2278e-04) (hash(x)=81993629)
431
+ 3570 train 3.961450 (lr=8.1288e-04) (hash(x)=72719368)
432
+ 3580 train 3.970108 (lr=8.0308e-04) (hash(x)=90091487)
433
+ 3590 train 3.857526 (lr=7.9339e-04) (hash(x)=77393152)
434
+ 3600 val loss 3.9657
435
+ 3600 val perplexity 52.7559
436
+ 3600 train 3.964663 (lr=7.8380e-04) (hash(x)=73103504)
437
+ 3610 train 3.971438 (lr=7.7432e-04) (hash(x)=84429400)
438
+ 3620 train 3.948587 (lr=7.6494e-04) (hash(x)=77021795)
439
+ 3630 train 3.897702 (lr=7.5567e-04) (hash(x)=92830605)
440
+ 3640 train 4.096931 (lr=7.4650e-04) (hash(x)=78313175)
441
+ 3650 train 3.894235 (lr=7.3744e-04) (hash(x)=85395549)
442
+ 3660 train 3.689048 (lr=7.2849e-04) (hash(x)=78114459)
443
+ 3670 train 3.816170 (lr=7.1964e-04) (hash(x)=74968316)
444
+ 3680 train 3.739744 (lr=7.1091e-04) (hash(x)=73358737)
445
+ 3690 train 3.848307 (lr=7.0228e-04) (hash(x)=76399442)
446
+ 3700 val loss 3.9654
447
+ 3700 val perplexity 52.7406
448
+ 3700 train 3.850952 (lr=6.9377e-04) (hash(x)=74039273)
449
+ 3710 train 4.025910 (lr=6.8536e-04) (hash(x)=79299680)
450
+ 3720 train 3.939267 (lr=6.7707e-04) (hash(x)=94701498)
451
+ 3730 train 4.016736 (lr=6.6889e-04) (hash(x)=75352071)
452
+ 3740 train 3.900523 (lr=6.6082e-04) (hash(x)=77909487)
453
+ 3750 train 4.006707 (lr=6.5287e-04) (hash(x)=73986730)
454
+ 3760 train 3.958333 (lr=6.4503e-04) (hash(x)=79325763)
455
+ 3770 train 4.009053 (lr=6.3731e-04) (hash(x)=72457818)
456
+ 3780 train 4.134195 (lr=6.2970e-04) (hash(x)=71775590)
457
+ 3790 train 4.023207 (lr=6.2220e-04) (hash(x)=82638943)
458
+ 3800 val loss 3.9483
459
+ 3800 val perplexity 51.8492
460
+ 3800 train 4.006841 (lr=6.1482e-04) (hash(x)=79965893)
461
+ 3810 train 3.937461 (lr=6.0756e-04) (hash(x)=72598235)
462
+ 3820 train 3.920625 (lr=6.0042e-04) (hash(x)=83113889)
463
+ 3830 train 3.981739 (lr=5.9339e-04) (hash(x)=74434590)
464
+ 3840 train 3.895181 (lr=5.8648e-04) (hash(x)=82860348)
465
+ 3850 train 3.835139 (lr=5.7970e-04) (hash(x)=78067565)
466
+ 3860 train 3.898750 (lr=5.7303e-04) (hash(x)=82592498)
467
+ 3870 train 3.830106 (lr=5.6648e-04) (hash(x)=81820733)
468
+ 3880 train 3.847902 (lr=5.6005e-04) (hash(x)=87709040)
469
+ 3890 train 3.889149 (lr=5.5374e-04) (hash(x)=70379093)
470
+ 3900 val loss 3.9410
471
+ 3900 val perplexity 51.4700
472
+ 3900 train 4.016358 (lr=5.4755e-04) (hash(x)=76597431)
473
+ 3910 train 3.900545 (lr=5.4149e-04) (hash(x)=90490716)
474
+ 3920 train 4.005455 (lr=5.3554e-04) (hash(x)=81970659)
475
+ 3930 train 3.986632 (lr=5.2972e-04) (hash(x)=81496334)
476
+ 3940 train 3.967860 (lr=5.2402e-04) (hash(x)=75717605)
477
+ 3950 train 3.916333 (lr=5.1845e-04) (hash(x)=82749357)
478
+ 3960 train 3.942010 (lr=5.1300e-04) (hash(x)=89355157)
479
+ 3970 train 3.897540 (lr=5.0767e-04) (hash(x)=78980403)
480
+ 3980 train 3.873543 (lr=5.0247e-04) (hash(x)=76627217)
481
+ 3990 train 3.872087 (lr=4.9740e-04) (hash(x)=72412879)
482
+ 4000 val loss 3.9350
483
+ 4000 val perplexity 51.1637
484
+ 4000 train 3.880417 (lr=4.9245e-04) (hash(x)=83018142)
485
+ 4010 train 3.895417 (lr=4.8762e-04) (hash(x)=81272436)
486
+ 4020 train 3.824379 (lr=4.8292e-04) (hash(x)=85497482)
487
+ 4030 train 3.896827 (lr=4.7835e-04) (hash(x)=85241734)
488
+ 4040 train 3.866812 (lr=4.7391e-04) (hash(x)=77925307)
489
+ 4050 train 3.866089 (lr=4.6959e-04) (hash(x)=84826179)
490
+ 4060 train 3.919898 (lr=4.6540e-04) (hash(x)=83606764)
491
+ 4070 train 3.836742 (lr=4.6133e-04) (hash(x)=80567590)
492
+ 4080 train 3.888546 (lr=4.5740e-04) (hash(x)=76860998)
493
+ 4090 train 3.925953 (lr=4.5359e-04) (hash(x)=74902328)
494
+ 4100 val loss 3.9224
495
+ 4100 val perplexity 50.5201
496
+ 4100 train 3.967728 (lr=4.4991e-04) (hash(x)=82832041)
497
+ 4110 train 3.847012 (lr=4.4637e-04) (hash(x)=79143262)
498
+ 4120 train 3.876610 (lr=4.4295e-04) (hash(x)=77038149)
499
+ 4130 train 4.059258 (lr=4.3966e-04) (hash(x)=86339074)
500
+ 4140 train 4.119776 (lr=4.3650e-04) (hash(x)=76686216)
501
+ 4150 train 3.893715 (lr=4.3347e-04) (hash(x)=70522682)
502
+ 4160 train 3.858359 (lr=4.3056e-04) (hash(x)=90958555)
503
+ 4170 train 3.886288 (lr=4.2780e-04) (hash(x)=91463532)
504
+ 4180 train 3.871219 (lr=4.2516e-04) (hash(x)=81959329)
505
+ 4190 train 3.829409 (lr=4.2265e-04) (hash(x)=83146752)
506
+ 4200 val loss 3.9259
507
+ 4200 val perplexity 50.6963
508
+ 4200 train 3.815902 (lr=4.2027e-04) (hash(x)=78361715)
509
+ 4210 train 3.961839 (lr=4.1802e-04) (hash(x)=87364889)
510
+ 4220 train 3.819268 (lr=4.1591e-04) (hash(x)=70465156)
511
+ 4230 train 3.969079 (lr=4.1392e-04) (hash(x)=84524081)
512
+ 4240 train 3.924062 (lr=4.1207e-04) (hash(x)=77824868)
513
+ 4250 train 3.937927 (lr=4.1035e-04) (hash(x)=81710711)
514
+ 4260 train 3.920286 (lr=4.0876e-04) (hash(x)=76362728)
515
+ 4270 train 3.909662 (lr=4.0731e-04) (hash(x)=83115208)
516
+ 4280 train 3.823408 (lr=4.0598e-04) (hash(x)=87218314)
517
+ 4290 train 3.982368 (lr=4.0479e-04) (hash(x)=74582673)
518
+ 4300 val loss 3.9190
519
+ 4300 val perplexity 50.3512
520
+ 4300 train 3.858708 (lr=4.0373e-04) (hash(x)=77379615)
521
+ 4310 train 3.835918 (lr=4.0280e-04) (hash(x)=78669579)
522
+ 4320 train 3.950632 (lr=4.0201e-04) (hash(x)=83066608)
523
+ 4330 train 3.913462 (lr=4.0134e-04) (hash(x)=83037340)
524
+ 4340 train 3.946310 (lr=4.0081e-04) (hash(x)=82849771)
525
+ 4350 train 3.891886 (lr=4.0041e-04) (hash(x)=76693985)
526
+ 4360 train 3.976914 (lr=4.0015e-04) (hash(x)=77745394)
527
+ 4370 train 3.938921 (lr=4.0002e-04) (hash(x)=79954388)
528
+ 4374 val loss 3.9123
529
+ 4374 val perplexity 50.0115
12_head_baseline_lr_40e-4_head_dim_22/model_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6b613bae06f9301acbb99a78feceeca95f1324ca9356d42375ab5fd83ac7143
3
+ size 96858242
12_head_baseline_lr_40e-4_head_dim_22/optimizer_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce76ba7cfdc342afe6fad6b666a714f99e0dcfbe5bc605527c002ba287fdca5c
3
+ size 187435910