andrew-healey commited on
Commit
968e9da
·
verified ·
1 Parent(s): 196ecd8

Upload folder using huggingface_hub

Browse files
12_head_baseline_lr_8e-4_head_dim_22_fixed/args.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "logs/repro_selective_pattern_rankings/12_head_baseline_lr_8e-4_head_dim_22_fixed", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 12, "n_embd": 264, "head_dim": 22, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 4375, "warmup_steps": 250, "group": "repro_selective_pattern_rankings", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1339, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 64, "total_batch_size": 131072, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.0008, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "12_head_baseline_lr_8e-4_fixed"}
12_head_baseline_lr_8e-4_head_dim_22_fixed/dataloader_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8ec6682852ed60a9d42cb4047300f65a04f87328c0ad7a4516be84d11b28f216
3
+ size 964
12_head_baseline_lr_8e-4_head_dim_22_fixed/log2.txt ADDED
@@ -0,0 +1,529 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ max_steps: 4375
2
+ 0 val loss 11.4043
3
+ 0 val perplexity 89707.0391
4
+ 0 train 11.388923 (lr=1.1189e-06) (hash(x)=93356070)
5
+ 10 train 10.462421 (lr=1.2308e-05) (hash(x)=91253010)
6
+ 20 train 9.955700 (lr=2.3497e-05) (hash(x)=74934453)
7
+ 30 train 9.740845 (lr=3.4685e-05) (hash(x)=79278034)
8
+ 40 train 9.385580 (lr=4.5874e-05) (hash(x)=80107892)
9
+ 50 train 8.995117 (lr=5.7063e-05) (hash(x)=70745428)
10
+ 60 train 8.666317 (lr=6.8252e-05) (hash(x)=80077589)
11
+ 70 train 8.131595 (lr=7.9441e-05) (hash(x)=76213766)
12
+ 80 train 7.883564 (lr=9.0629e-05) (hash(x)=83218328)
13
+ 90 train 7.670492 (lr=1.0182e-04) (hash(x)=74855845)
14
+ 100 val loss 7.5677
15
+ 100 val perplexity 1934.7378
16
+ 100 train 7.439291 (lr=1.1301e-04) (hash(x)=82814902)
17
+ 110 train 7.377951 (lr=1.2420e-04) (hash(x)=84907741)
18
+ 120 train 7.278985 (lr=1.3538e-04) (hash(x)=82613223)
19
+ 130 train 7.124608 (lr=1.4657e-04) (hash(x)=83540876)
20
+ 140 train 7.105237 (lr=1.5776e-04) (hash(x)=75095216)
21
+ 150 train 7.163971 (lr=1.6895e-04) (hash(x)=97190944)
22
+ 160 train 7.069863 (lr=1.8014e-04) (hash(x)=82117809)
23
+ 170 train 6.954305 (lr=1.9133e-04) (hash(x)=70514724)
24
+ 180 train 6.854761 (lr=2.0252e-04) (hash(x)=81029624)
25
+ 190 train 6.748855 (lr=2.1371e-04) (hash(x)=78996841)
26
+ 200 val loss 6.6903
27
+ 200 val perplexity 804.6004
28
+ 200 train 6.695497 (lr=2.2490e-04) (hash(x)=79845097)
29
+ 210 train 6.598349 (lr=2.3608e-04) (hash(x)=78997683)
30
+ 220 train 6.464209 (lr=2.4727e-04) (hash(x)=74895865)
31
+ 230 train 6.621018 (lr=2.5846e-04) (hash(x)=80933276)
32
+ 240 train 6.331894 (lr=2.6965e-04) (hash(x)=77664606)
33
+ 250 train 6.348096 (lr=2.8084e-04) (hash(x)=75903930)
34
+ 260 train 6.254451 (lr=2.9203e-04) (hash(x)=82996853)
35
+ 270 train 6.183151 (lr=3.0322e-04) (hash(x)=73269568)
36
+ 280 train 6.121641 (lr=3.1441e-04) (hash(x)=82906427)
37
+ 290 train 6.040687 (lr=3.2559e-04) (hash(x)=71797895)
38
+ 300 val loss 6.1844
39
+ 300 val perplexity 485.1360
40
+ 300 train 5.911830 (lr=3.3678e-04) (hash(x)=85232249)
41
+ 310 train 5.850702 (lr=3.4797e-04) (hash(x)=75252489)
42
+ 320 train 5.953071 (lr=3.5916e-04) (hash(x)=74394644)
43
+ 330 train 6.002592 (lr=3.7035e-04) (hash(x)=82772910)
44
+ 340 train 6.045619 (lr=3.8154e-04) (hash(x)=81627464)
45
+ 350 train 6.028246 (lr=3.9273e-04) (hash(x)=84678053)
46
+ 360 train 5.967566 (lr=4.0392e-04) (hash(x)=81884128)
47
+ 370 train 5.959550 (lr=4.1510e-04) (hash(x)=76379242)
48
+ 380 train 5.737064 (lr=4.2629e-04) (hash(x)=98182875)
49
+ 390 train 5.732515 (lr=4.3748e-04) (hash(x)=79710436)
50
+ 400 val loss 5.7809
51
+ 400 val perplexity 324.0513
52
+ 400 train 5.733783 (lr=4.4867e-04) (hash(x)=79841071)
53
+ 410 train 5.654058 (lr=4.5986e-04) (hash(x)=75844151)
54
+ 420 train 5.652066 (lr=4.7105e-04) (hash(x)=73125036)
55
+ 430 train 5.688927 (lr=4.8224e-04) (hash(x)=84214858)
56
+ 440 train 5.576507 (lr=4.9343e-04) (hash(x)=80456994)
57
+ 450 train 5.543094 (lr=5.0462e-04) (hash(x)=76735962)
58
+ 460 train 5.481975 (lr=5.1580e-04) (hash(x)=81845446)
59
+ 470 train 5.316375 (lr=5.2699e-04) (hash(x)=76094689)
60
+ 480 train 5.416342 (lr=5.3818e-04) (hash(x)=83806686)
61
+ 490 train 5.242144 (lr=5.4937e-04) (hash(x)=84690227)
62
+ 500 val loss 5.5484
63
+ 500 val perplexity 256.8339
64
+ 500 train 5.310766 (lr=5.6056e-04) (hash(x)=71851938)
65
+ 510 train 5.387978 (lr=5.7175e-04) (hash(x)=77159346)
66
+ 520 train 5.454401 (lr=5.8294e-04) (hash(x)=80755753)
67
+ 530 train 5.543831 (lr=5.9413e-04) (hash(x)=75379679)
68
+ 540 train 5.463854 (lr=6.0531e-04) (hash(x)=82458619)
69
+ 550 train 5.437819 (lr=6.1650e-04) (hash(x)=86366396)
70
+ 560 train 5.414557 (lr=6.2769e-04) (hash(x)=85095044)
71
+ 570 train 5.398359 (lr=6.3888e-04) (hash(x)=78385159)
72
+ 580 train 5.349250 (lr=6.5007e-04) (hash(x)=79342394)
73
+ 590 train 5.349400 (lr=6.6126e-04) (hash(x)=70782192)
74
+ 600 val loss 5.3490
75
+ 600 val perplexity 210.3880
76
+ 600 train 5.324270 (lr=6.7245e-04) (hash(x)=76130353)
77
+ 610 train 5.223341 (lr=6.8364e-04) (hash(x)=74778440)
78
+ 620 train 5.211050 (lr=6.9483e-04) (hash(x)=79129709)
79
+ 630 train 5.114326 (lr=7.0601e-04) (hash(x)=76469962)
80
+ 640 train 5.111544 (lr=7.1720e-04) (hash(x)=78288049)
81
+ 650 train 5.070465 (lr=7.2839e-04) (hash(x)=76641388)
82
+ 660 train 4.989712 (lr=7.3958e-04) (hash(x)=75937906)
83
+ 670 train 4.988499 (lr=7.5077e-04) (hash(x)=74807157)
84
+ 680 train 4.895653 (lr=7.6196e-04) (hash(x)=77490144)
85
+ 690 train 4.934090 (lr=7.7315e-04) (hash(x)=72900124)
86
+ 700 val loss 5.1880
87
+ 700 val perplexity 179.1159
88
+ 700 train 4.891424 (lr=7.8434e-04) (hash(x)=73424218)
89
+ 710 train 5.172663 (lr=7.9552e-04) (hash(x)=86052345)
90
+ 720 train 5.129795 (lr=8.0000e-04) (hash(x)=77462613)
91
+ 730 train 5.138889 (lr=7.9997e-04) (hash(x)=79027471)
92
+ 740 train 5.111305 (lr=7.9992e-04) (hash(x)=78149992)
93
+ 750 train 5.089380 (lr=7.9984e-04) (hash(x)=89147499)
94
+ 760 train 5.078344 (lr=7.9973e-04) (hash(x)=74931538)
95
+ 770 train 5.028081 (lr=7.9960e-04) (hash(x)=89648204)
96
+ 780 train 5.072010 (lr=7.9944e-04) (hash(x)=83708293)
97
+ 790 train 5.080956 (lr=7.9925e-04) (hash(x)=84336512)
98
+ 800 val loss 5.0401
99
+ 800 val perplexity 154.4910
100
+ 800 train 4.961507 (lr=7.9904e-04) (hash(x)=75025285)
101
+ 810 train 4.890038 (lr=7.9880e-04) (hash(x)=79436536)
102
+ 820 train 4.874693 (lr=7.9854e-04) (hash(x)=90467390)
103
+ 830 train 4.930626 (lr=7.9825e-04) (hash(x)=77292020)
104
+ 840 train 4.839594 (lr=7.9793e-04) (hash(x)=75568927)
105
+ 850 train 4.856942 (lr=7.9759e-04) (hash(x)=79671781)
106
+ 860 train 4.948665 (lr=7.9722e-04) (hash(x)=84280943)
107
+ 870 train 4.911723 (lr=7.9682e-04) (hash(x)=80901294)
108
+ 880 train 5.053764 (lr=7.9640e-04) (hash(x)=81437584)
109
+ 890 train 4.903955 (lr=7.9595e-04) (hash(x)=73893778)
110
+ 900 val loss 4.9112
111
+ 900 val perplexity 135.8005
112
+ 900 train 4.828310 (lr=7.9547e-04) (hash(x)=74987794)
113
+ 910 train 4.853667 (lr=7.9497e-04) (hash(x)=93721374)
114
+ 920 train 4.839195 (lr=7.9444e-04) (hash(x)=79149678)
115
+ 930 train 4.785468 (lr=7.9389e-04) (hash(x)=83179387)
116
+ 940 train 4.787247 (lr=7.9331e-04) (hash(x)=89009978)
117
+ 950 train 4.741975 (lr=7.9270e-04) (hash(x)=76483735)
118
+ 960 train 4.679790 (lr=7.9207e-04) (hash(x)=78638850)
119
+ 970 train 4.705347 (lr=7.9141e-04) (hash(x)=90415206)
120
+ 980 train 4.637120 (lr=7.9073e-04) (hash(x)=90819195)
121
+ 990 train 4.774509 (lr=7.9002e-04) (hash(x)=85938852)
122
+ 1000 val loss 4.7558
123
+ 1000 val perplexity 116.2588
124
+ 1000 train 4.811062 (lr=7.8928e-04) (hash(x)=82436789)
125
+ 1010 train 4.803025 (lr=7.8852e-04) (hash(x)=63022149)
126
+ 1020 train 4.752833 (lr=7.8773e-04) (hash(x)=83501199)
127
+ 1030 train 4.726783 (lr=7.8692e-04) (hash(x)=77093326)
128
+ 1040 train 4.589956 (lr=7.8608e-04) (hash(x)=73437559)
129
+ 1050 train 4.687597 (lr=7.8522e-04) (hash(x)=84550388)
130
+ 1060 train 4.686138 (lr=7.8433e-04) (hash(x)=86184566)
131
+ 1070 train 4.673692 (lr=7.8342e-04) (hash(x)=85644922)
132
+ 1080 train 4.606560 (lr=7.8248e-04) (hash(x)=86179801)
133
+ 1090 train 4.577961 (lr=7.8151e-04) (hash(x)=82868303)
134
+ 1100 val loss 4.6420
135
+ 1100 val perplexity 103.7554
136
+ 1100 train 4.677928 (lr=7.8052e-04) (hash(x)=96780388)
137
+ 1110 train 4.608566 (lr=7.7951e-04) (hash(x)=87223122)
138
+ 1120 train 4.575022 (lr=7.7847e-04) (hash(x)=77292786)
139
+ 1130 train 4.594349 (lr=7.7740e-04) (hash(x)=88761375)
140
+ 1140 train 4.508932 (lr=7.7631e-04) (hash(x)=80956468)
141
+ 1150 train 4.687494 (lr=7.7520e-04) (hash(x)=84725462)
142
+ 1160 train 4.656981 (lr=7.7406e-04) (hash(x)=76098113)
143
+ 1170 train 4.631224 (lr=7.7289e-04) (hash(x)=84387685)
144
+ 1180 train 4.569960 (lr=7.7170e-04) (hash(x)=76120321)
145
+ 1190 train 4.530944 (lr=7.7049e-04) (hash(x)=85974065)
146
+ 1200 val loss 4.5507
147
+ 1200 val perplexity 94.6945
148
+ 1200 train 4.429749 (lr=7.6925e-04) (hash(x)=79340644)
149
+ 1210 train 4.521499 (lr=7.6799e-04) (hash(x)=88345025)
150
+ 1220 train 4.438153 (lr=7.6670e-04) (hash(x)=86406230)
151
+ 1230 train 4.421066 (lr=7.6539e-04) (hash(x)=80608975)
152
+ 1240 train 4.475980 (lr=7.6406e-04) (hash(x)=81635225)
153
+ 1250 train 4.315457 (lr=7.6270e-04) (hash(x)=82126376)
154
+ 1260 train 4.481150 (lr=7.6132e-04) (hash(x)=83737972)
155
+ 1270 train 4.388452 (lr=7.5992e-04) (hash(x)=81132345)
156
+ 1280 train 4.482712 (lr=7.5849e-04) (hash(x)=85850404)
157
+ 1290 train 4.494557 (lr=7.5704e-04) (hash(x)=83490640)
158
+ 1300 val loss 4.4970
159
+ 1300 val perplexity 89.7513
160
+ 1300 train 4.463191 (lr=7.5556e-04) (hash(x)=77545187)
161
+ 1310 train 4.511926 (lr=7.5406e-04) (hash(x)=86412685)
162
+ 1320 train 4.402415 (lr=7.5254e-04) (hash(x)=86429640)
163
+ 1330 train 4.545220 (lr=7.5099e-04) (hash(x)=79645281)
164
+ 1340 train 4.501055 (lr=7.4943e-04) (hash(x)=82668541)
165
+ 1350 train 4.353905 (lr=7.4784e-04) (hash(x)=73627248)
166
+ 1360 train 4.444376 (lr=7.4622e-04) (hash(x)=74845133)
167
+ 1370 train 4.358597 (lr=7.4459e-04) (hash(x)=77850497)
168
+ 1380 train 4.382380 (lr=7.4293e-04) (hash(x)=76143954)
169
+ 1390 train 4.391551 (lr=7.4125e-04) (hash(x)=80675544)
170
+ 1400 val loss 4.4580
171
+ 1400 val perplexity 86.3124
172
+ 1400 train 4.365345 (lr=7.3954e-04) (hash(x)=76010938)
173
+ 1410 train 4.420878 (lr=7.3782e-04) (hash(x)=81260300)
174
+ 1420 train 4.521589 (lr=7.3607e-04) (hash(x)=83803629)
175
+ 1430 train 4.358571 (lr=7.3430e-04) (hash(x)=83840110)
176
+ 1440 train 4.554413 (lr=7.3251e-04) (hash(x)=97711831)
177
+ 1450 train 4.424284 (lr=7.3070e-04) (hash(x)=81110139)
178
+ 1460 train 4.337059 (lr=7.2887e-04) (hash(x)=90942839)
179
+ 1470 train 4.376791 (lr=7.2701e-04) (hash(x)=80721564)
180
+ 1480 train 4.474306 (lr=7.2514e-04) (hash(x)=77852059)
181
+ 1490 train 4.370626 (lr=7.2324e-04) (hash(x)=80011365)
182
+ 1500 val loss 4.4096
183
+ 1500 val perplexity 82.2353
184
+ 1500 train 4.392446 (lr=7.2132e-04) (hash(x)=84496142)
185
+ 1510 train 4.334968 (lr=7.1938e-04) (hash(x)=70186729)
186
+ 1520 train 4.360988 (lr=7.1742e-04) (hash(x)=74854227)
187
+ 1530 train 4.262049 (lr=7.1544e-04) (hash(x)=77468161)
188
+ 1540 train 4.280844 (lr=7.1344e-04) (hash(x)=87853059)
189
+ 1550 train 4.203699 (lr=7.1142e-04) (hash(x)=71225436)
190
+ 1560 train 4.309296 (lr=7.0939e-04) (hash(x)=84072783)
191
+ 1570 train 4.365957 (lr=7.0733e-04) (hash(x)=72723098)
192
+ 1580 train 4.384117 (lr=7.0525e-04) (hash(x)=90409866)
193
+ 1590 train 4.446133 (lr=7.0315e-04) (hash(x)=87481378)
194
+ 1600 val loss 4.3710
195
+ 1600 val perplexity 79.1224
196
+ 1600 train 4.429656 (lr=7.0103e-04) (hash(x)=77643862)
197
+ 1610 train 4.321773 (lr=6.9889e-04) (hash(x)=88862575)
198
+ 1620 train 4.329190 (lr=6.9673e-04) (hash(x)=84612581)
199
+ 1630 train 4.367182 (lr=6.9456e-04) (hash(x)=87075989)
200
+ 1640 train 4.252344 (lr=6.9236e-04) (hash(x)=88277361)
201
+ 1650 train 4.364820 (lr=6.9015e-04) (hash(x)=78750236)
202
+ 1660 train 4.291776 (lr=6.8792e-04) (hash(x)=82604581)
203
+ 1670 train 4.119716 (lr=6.8567e-04) (hash(x)=68482265)
204
+ 1680 train 4.250303 (lr=6.8340e-04) (hash(x)=75088835)
205
+ 1690 train 4.195225 (lr=6.8112e-04) (hash(x)=66060989)
206
+ 1700 val loss 4.3646
207
+ 1700 val perplexity 78.6181
208
+ 1700 train 4.359484 (lr=6.7881e-04) (hash(x)=79986754)
209
+ 1710 train 4.350958 (lr=6.7649e-04) (hash(x)=83657930)
210
+ 1720 train 4.292342 (lr=6.7415e-04) (hash(x)=81754135)
211
+ 1730 train 4.368599 (lr=6.7180e-04) (hash(x)=78904427)
212
+ 1740 train 4.277336 (lr=6.6943e-04) (hash(x)=85920177)
213
+ 1750 train 4.320709 (lr=6.6704e-04) (hash(x)=86573211)
214
+ 1760 train 4.279339 (lr=6.6463e-04) (hash(x)=81737128)
215
+ 1770 train 4.268407 (lr=6.6221e-04) (hash(x)=77400968)
216
+ 1780 train 4.284335 (lr=6.5977e-04) (hash(x)=73545497)
217
+ 1790 train 4.525265 (lr=6.5731e-04) (hash(x)=71641943)
218
+ 1800 val loss 4.3151
219
+ 1800 val perplexity 74.8181
220
+ 1800 train 4.376225 (lr=6.5484e-04) (hash(x)=87819781)
221
+ 1810 train 4.233324 (lr=6.5235e-04) (hash(x)=86870770)
222
+ 1820 train 4.244097 (lr=6.4985e-04) (hash(x)=82522211)
223
+ 1830 train 4.325108 (lr=6.4733e-04) (hash(x)=61947437)
224
+ 1840 train 4.209496 (lr=6.4480e-04) (hash(x)=79865406)
225
+ 1850 train 4.174800 (lr=6.4225e-04) (hash(x)=79828721)
226
+ 1860 train 4.318232 (lr=6.3968e-04) (hash(x)=80869571)
227
+ 1870 train 4.301679 (lr=6.3711e-04) (hash(x)=73780971)
228
+ 1880 train 4.265572 (lr=6.3451e-04) (hash(x)=79249549)
229
+ 1890 train 4.319492 (lr=6.3191e-04) (hash(x)=81041904)
230
+ 1900 val loss 4.2906
231
+ 1900 val perplexity 73.0124
232
+ 1900 train 4.389087 (lr=6.2928e-04) (hash(x)=82456430)
233
+ 1910 train 4.153023 (lr=6.2665e-04) (hash(x)=82222135)
234
+ 1920 train 4.193025 (lr=6.2400e-04) (hash(x)=70033249)
235
+ 1930 train 4.173045 (lr=6.2134e-04) (hash(x)=72887360)
236
+ 1940 train 4.225553 (lr=6.1866e-04) (hash(x)=83251100)
237
+ 1950 train 4.266335 (lr=6.1597e-04) (hash(x)=79660266)
238
+ 1960 train 4.140210 (lr=6.1327e-04) (hash(x)=78919068)
239
+ 1970 train 4.237479 (lr=6.1055e-04) (hash(x)=94690431)
240
+ 1980 train 4.085918 (lr=6.0783e-04) (hash(x)=88153756)
241
+ 1990 train 4.129201 (lr=6.0509e-04) (hash(x)=77195688)
242
+ 2000 val loss 4.2654
243
+ 2000 val perplexity 71.1960
244
+ 2000 train 4.233664 (lr=6.0233e-04) (hash(x)=81308591)
245
+ 2010 train 4.278122 (lr=5.9957e-04) (hash(x)=80205479)
246
+ 2020 train 4.193841 (lr=5.9679e-04) (hash(x)=81008704)
247
+ 2030 train 4.258300 (lr=5.9401e-04) (hash(x)=82914358)
248
+ 2040 train 4.270494 (lr=5.9121e-04) (hash(x)=88064399)
249
+ 2050 train 4.273919 (lr=5.8840e-04) (hash(x)=82889390)
250
+ 2060 train 4.344082 (lr=5.8558e-04) (hash(x)=80913554)
251
+ 2070 train 4.184629 (lr=5.8275e-04) (hash(x)=78121791)
252
+ 2080 train 4.319590 (lr=5.7990e-04) (hash(x)=75356657)
253
+ 2090 train 4.145003 (lr=5.7705e-04) (hash(x)=74048412)
254
+ 2100 val loss 4.2389
255
+ 2100 val perplexity 69.3323
256
+ 2100 train 4.062921 (lr=5.7419e-04) (hash(x)=68928225)
257
+ 2110 train 4.116064 (lr=5.7132e-04) (hash(x)=93734745)
258
+ 2120 train 4.113101 (lr=5.6844e-04) (hash(x)=72878164)
259
+ 2130 train 4.240356 (lr=5.6554e-04) (hash(x)=71508226)
260
+ 2140 train 4.196486 (lr=5.6264e-04) (hash(x)=84168671)
261
+ 2150 train 4.136520 (lr=5.5973e-04) (hash(x)=74673639)
262
+ 2160 train 4.284075 (lr=5.5681e-04) (hash(x)=75470031)
263
+ 2170 train 4.245594 (lr=5.5389e-04) (hash(x)=74307890)
264
+ 2180 train 4.359695 (lr=5.5095e-04) (hash(x)=77214245)
265
+ 2190 train 4.139488 (lr=5.4801e-04) (hash(x)=88628359)
266
+ 2200 val loss 4.2191
267
+ 2200 val perplexity 67.9702
268
+ 2200 train 4.151878 (lr=5.4506e-04) (hash(x)=74779126)
269
+ 2210 train 4.192382 (lr=5.4210e-04) (hash(x)=79817976)
270
+ 2220 train 4.142745 (lr=5.3913e-04) (hash(x)=75258996)
271
+ 2230 train 4.120797 (lr=5.3616e-04) (hash(x)=88164047)
272
+ 2240 train 4.098520 (lr=5.3317e-04) (hash(x)=78580686)
273
+ 2250 train 4.041303 (lr=5.3019e-04) (hash(x)=76510617)
274
+ 2260 train 4.118978 (lr=5.2719e-04) (hash(x)=76043966)
275
+ 2270 train 4.094014 (lr=5.2419e-04) (hash(x)=73062098)
276
+ 2280 train 4.321898 (lr=5.2118e-04) (hash(x)=81885909)
277
+ 2290 train 4.255442 (lr=5.1817e-04) (hash(x)=81481741)
278
+ 2300 val loss 4.1963
279
+ 2300 val perplexity 66.4379
280
+ 2300 train 4.117752 (lr=5.1515e-04) (hash(x)=82104275)
281
+ 2310 train 4.166573 (lr=5.1213e-04) (hash(x)=82137309)
282
+ 2320 train 4.142792 (lr=5.0910e-04) (hash(x)=86741167)
283
+ 2330 train 4.157535 (lr=5.0606e-04) (hash(x)=84796263)
284
+ 2340 train 4.136901 (lr=5.0302e-04) (hash(x)=169966529)
285
+ 2350 train 3.997656 (lr=4.9998e-04) (hash(x)=80006061)
286
+ 2360 train 4.062540 (lr=4.9693e-04) (hash(x)=71090999)
287
+ 2370 train 4.190221 (lr=4.9387e-04) (hash(x)=86641850)
288
+ 2380 train 4.092087 (lr=4.9082e-04) (hash(x)=84573253)
289
+ 2390 train 4.126458 (lr=4.8776e-04) (hash(x)=74483764)
290
+ 2400 val loss 4.1890
291
+ 2400 val perplexity 65.9553
292
+ 2400 train 4.099054 (lr=4.8469e-04) (hash(x)=78327659)
293
+ 2410 train 4.161429 (lr=4.8162e-04) (hash(x)=77273627)
294
+ 2420 train 4.022769 (lr=4.7855e-04) (hash(x)=76938049)
295
+ 2430 train 4.036468 (lr=4.7548e-04) (hash(x)=98449442)
296
+ 2440 train 3.888872 (lr=4.7240e-04) (hash(x)=78157797)
297
+ 2450 train 3.931483 (lr=4.6932e-04) (hash(x)=80637582)
298
+ 2460 train 3.918687 (lr=4.6624e-04) (hash(x)=80023854)
299
+ 2470 train 3.983843 (lr=4.6316e-04) (hash(x)=86678884)
300
+ 2480 train 4.202255 (lr=4.6008e-04) (hash(x)=84086469)
301
+ 2490 train 4.227712 (lr=4.5699e-04) (hash(x)=83131141)
302
+ 2500 val loss 4.1625
303
+ 2500 val perplexity 64.2324
304
+ 2500 train 4.140171 (lr=4.5390e-04) (hash(x)=82583497)
305
+ 2510 train 4.206031 (lr=4.5081e-04) (hash(x)=79164326)
306
+ 2520 train 4.106184 (lr=4.4772e-04) (hash(x)=76456503)
307
+ 2530 train 4.167851 (lr=4.4464e-04) (hash(x)=82001228)
308
+ 2540 train 4.205464 (lr=4.4155e-04) (hash(x)=75615595)
309
+ 2550 train 4.150397 (lr=4.3845e-04) (hash(x)=78397869)
310
+ 2560 train 4.151053 (lr=4.3536e-04) (hash(x)=75547032)
311
+ 2570 train 4.063576 (lr=4.3228e-04) (hash(x)=89201025)
312
+ 2580 train 4.049784 (lr=4.2919e-04) (hash(x)=89856704)
313
+ 2590 train 4.056904 (lr=4.2610e-04) (hash(x)=82175682)
314
+ 2600 val loss 4.1449
315
+ 2600 val perplexity 63.1142
316
+ 2600 train 4.062811 (lr=4.2301e-04) (hash(x)=78312826)
317
+ 2610 train 4.054043 (lr=4.1992e-04) (hash(x)=77066588)
318
+ 2620 train 3.898928 (lr=4.1684e-04) (hash(x)=78666061)
319
+ 2630 train 3.893532 (lr=4.1376e-04) (hash(x)=93762143)
320
+ 2640 train 3.911001 (lr=4.1068e-04) (hash(x)=83191587)
321
+ 2650 train 3.840140 (lr=4.0760e-04) (hash(x)=87169585)
322
+ 2660 train 4.049696 (lr=4.0452e-04) (hash(x)=86426388)
323
+ 2670 train 4.191960 (lr=4.0145e-04) (hash(x)=76692638)
324
+ 2680 train 4.100883 (lr=3.9838e-04) (hash(x)=77446063)
325
+ 2690 train 4.105713 (lr=3.9531e-04) (hash(x)=79809050)
326
+ 2700 val loss 4.1373
327
+ 2700 val perplexity 62.6346
328
+ 2700 train 4.034157 (lr=3.9224e-04) (hash(x)=83116823)
329
+ 2710 train 4.146717 (lr=3.8918e-04) (hash(x)=75622148)
330
+ 2720 train 4.071832 (lr=3.8613e-04) (hash(x)=80690512)
331
+ 2730 train 4.141748 (lr=3.8307e-04) (hash(x)=78009984)
332
+ 2740 train 4.097802 (lr=3.8002e-04) (hash(x)=74205488)
333
+ 2750 train 4.099316 (lr=3.7698e-04) (hash(x)=91013332)
334
+ 2760 train 4.024355 (lr=3.7394e-04) (hash(x)=76954961)
335
+ 2770 train 4.034565 (lr=3.7090e-04) (hash(x)=79390317)
336
+ 2780 train 3.900937 (lr=3.6787e-04) (hash(x)=70168783)
337
+ 2790 train 4.056646 (lr=3.6485e-04) (hash(x)=76028417)
338
+ 2800 val loss 4.1185
339
+ 2800 val perplexity 61.4661
340
+ 2800 train 3.889193 (lr=3.6183e-04) (hash(x)=77656050)
341
+ 2810 train 3.936734 (lr=3.5882e-04) (hash(x)=94248216)
342
+ 2820 train 4.001037 (lr=3.5581e-04) (hash(x)=78305078)
343
+ 2830 train 3.912551 (lr=3.5281e-04) (hash(x)=79948848)
344
+ 2840 train 3.784479 (lr=3.4981e-04) (hash(x)=85341024)
345
+ 2850 train 4.086265 (lr=3.4683e-04) (hash(x)=78735170)
346
+ 2860 train 4.337343 (lr=3.4384e-04) (hash(x)=71616419)
347
+ 2870 train 4.109067 (lr=3.4087e-04) (hash(x)=78656517)
348
+ 2880 train 4.122245 (lr=3.3790e-04) (hash(x)=80073987)
349
+ 2890 train 4.055164 (lr=3.3494e-04) (hash(x)=76894809)
350
+ 2900 val loss 4.0947
351
+ 2900 val perplexity 60.0190
352
+ 2900 train 4.031393 (lr=3.3199e-04) (hash(x)=80499838)
353
+ 2910 train 4.079880 (lr=3.2905e-04) (hash(x)=72673354)
354
+ 2920 train 4.062461 (lr=3.2611e-04) (hash(x)=84265768)
355
+ 2930 train 4.095368 (lr=3.2319e-04) (hash(x)=79612060)
356
+ 2940 train 3.943176 (lr=3.2027e-04) (hash(x)=74970087)
357
+ 2950 train 4.101684 (lr=3.1736e-04) (hash(x)=84166818)
358
+ 2960 train 4.089890 (lr=3.1446e-04) (hash(x)=89410221)
359
+ 2970 train 4.031025 (lr=3.1156e-04) (hash(x)=75672566)
360
+ 2980 train 3.884170 (lr=3.0868e-04) (hash(x)=81760314)
361
+ 2990 train 3.860748 (lr=3.0581e-04) (hash(x)=80605200)
362
+ 3000 val loss 4.0945
363
+ 3000 val perplexity 60.0080
364
+ 3000 train 3.860792 (lr=3.0295e-04) (hash(x)=83804735)
365
+ 3010 train 3.787617 (lr=3.0010e-04) (hash(x)=77015303)
366
+ 3020 train 3.858312 (lr=2.9725e-04) (hash(x)=81464523)
367
+ 3030 train 4.048865 (lr=2.9442e-04) (hash(x)=80168230)
368
+ 3040 train 3.996626 (lr=2.9160e-04) (hash(x)=84817006)
369
+ 3050 train 4.100596 (lr=2.8879e-04) (hash(x)=71601811)
370
+ 3060 train 4.015127 (lr=2.8599e-04) (hash(x)=85499733)
371
+ 3070 train 4.075488 (lr=2.8321e-04) (hash(x)=78670408)
372
+ 3080 train 3.963888 (lr=2.8043e-04) (hash(x)=77120468)
373
+ 3090 train 4.078160 (lr=2.7767e-04) (hash(x)=77927426)
374
+ 3100 val loss 4.0761
375
+ 3100 val perplexity 58.9171
376
+ 3100 train 4.089870 (lr=2.7491e-04) (hash(x)=83998606)
377
+ 3110 train 3.985148 (lr=2.7217e-04) (hash(x)=77990218)
378
+ 3120 train 3.901287 (lr=2.6945e-04) (hash(x)=81623970)
379
+ 3130 train 3.875212 (lr=2.6673e-04) (hash(x)=75014781)
380
+ 3140 train 3.929268 (lr=2.6403e-04) (hash(x)=72591250)
381
+ 3150 train 3.946052 (lr=2.6134e-04) (hash(x)=81421847)
382
+ 3160 train 3.992490 (lr=2.5866e-04) (hash(x)=75112631)
383
+ 3170 train 4.094281 (lr=2.5600e-04) (hash(x)=87518033)
384
+ 3180 train 3.943526 (lr=2.5335e-04) (hash(x)=78648348)
385
+ 3190 train 4.060004 (lr=2.5072e-04) (hash(x)=78270029)
386
+ 3200 val loss 4.0585
387
+ 3200 val perplexity 57.8863
388
+ 3200 train 4.136689 (lr=2.4809e-04) (hash(x)=83380714)
389
+ 3210 train 4.304782 (lr=2.4549e-04) (hash(x)=67804991)
390
+ 3220 train 4.043062 (lr=2.4289e-04) (hash(x)=76968804)
391
+ 3230 train 3.977912 (lr=2.4032e-04) (hash(x)=75798670)
392
+ 3240 train 4.049535 (lr=2.3775e-04) (hash(x)=83192811)
393
+ 3250 train 3.988622 (lr=2.3520e-04) (hash(x)=81659789)
394
+ 3260 train 4.029355 (lr=2.3267e-04) (hash(x)=80622502)
395
+ 3270 train 4.003949 (lr=2.3015e-04) (hash(x)=85436511)
396
+ 3280 train 3.941789 (lr=2.2765e-04) (hash(x)=80589180)
397
+ 3290 train 4.080757 (lr=2.2516e-04) (hash(x)=85747193)
398
+ 3300 val loss 4.0538
399
+ 3300 val perplexity 57.6185
400
+ 3300 train 3.948888 (lr=2.2269e-04) (hash(x)=74635692)
401
+ 3310 train 3.939957 (lr=2.2023e-04) (hash(x)=82045455)
402
+ 3320 train 3.806700 (lr=2.1779e-04) (hash(x)=73249173)
403
+ 3330 train 3.771123 (lr=2.1537e-04) (hash(x)=71492338)
404
+ 3340 train 4.098222 (lr=2.1296e-04) (hash(x)=76374071)
405
+ 3350 train 4.070264 (lr=2.1057e-04) (hash(x)=80474064)
406
+ 3360 train 3.984107 (lr=2.0820e-04) (hash(x)=84390892)
407
+ 3370 train 4.032338 (lr=2.0585e-04) (hash(x)=83399949)
408
+ 3380 train 4.079006 (lr=2.0351e-04) (hash(x)=77648059)
409
+ 3390 train 4.049698 (lr=2.0119e-04) (hash(x)=80152701)
410
+ 3400 val loss 4.0370
411
+ 3400 val perplexity 56.6555
412
+ 3400 train 4.099397 (lr=1.9888e-04) (hash(x)=81952545)
413
+ 3410 train 4.060771 (lr=1.9660e-04) (hash(x)=80908993)
414
+ 3420 train 4.100306 (lr=1.9433e-04) (hash(x)=79610037)
415
+ 3430 train 3.980257 (lr=1.9208e-04) (hash(x)=87624382)
416
+ 3440 train 4.027635 (lr=1.8985e-04) (hash(x)=82336381)
417
+ 3450 train 3.998418 (lr=1.8764e-04) (hash(x)=87687835)
418
+ 3460 train 4.004923 (lr=1.8544e-04) (hash(x)=76376135)
419
+ 3470 train 3.919445 (lr=1.8327e-04) (hash(x)=79331391)
420
+ 3480 train 3.831715 (lr=1.8111e-04) (hash(x)=107002681)
421
+ 3490 train 3.905044 (lr=1.7897e-04) (hash(x)=77465514)
422
+ 3500 val loss 4.0327
423
+ 3500 val perplexity 56.4119
424
+ 3500 train 3.871272 (lr=1.7685e-04) (hash(x)=88237229)
425
+ 3510 train 3.935917 (lr=1.7475e-04) (hash(x)=81011739)
426
+ 3520 train 3.979258 (lr=1.7267e-04) (hash(x)=64643427)
427
+ 3530 train 4.255639 (lr=1.7061e-04) (hash(x)=78029539)
428
+ 3540 train 4.095768 (lr=1.6858e-04) (hash(x)=83188968)
429
+ 3550 train 4.113892 (lr=1.6656e-04) (hash(x)=86104185)
430
+ 3560 train 4.040516 (lr=1.6456e-04) (hash(x)=81993629)
431
+ 3570 train 4.010998 (lr=1.6258e-04) (hash(x)=72719368)
432
+ 3580 train 4.022573 (lr=1.6062e-04) (hash(x)=90091487)
433
+ 3590 train 3.910327 (lr=1.5868e-04) (hash(x)=77393152)
434
+ 3600 val loss 4.0165
435
+ 3600 val perplexity 55.5089
436
+ 3600 train 4.018743 (lr=1.5676e-04) (hash(x)=73103504)
437
+ 3610 train 4.026032 (lr=1.5486e-04) (hash(x)=84429400)
438
+ 3620 train 3.998312 (lr=1.5299e-04) (hash(x)=77021795)
439
+ 3630 train 3.952541 (lr=1.5113e-04) (hash(x)=92830605)
440
+ 3640 train 4.142409 (lr=1.4930e-04) (hash(x)=78313175)
441
+ 3650 train 3.944538 (lr=1.4749e-04) (hash(x)=85395549)
442
+ 3660 train 3.742173 (lr=1.4570e-04) (hash(x)=78114459)
443
+ 3670 train 3.869202 (lr=1.4393e-04) (hash(x)=74968316)
444
+ 3680 train 3.796798 (lr=1.4218e-04) (hash(x)=73358737)
445
+ 3690 train 3.904264 (lr=1.4046e-04) (hash(x)=76399442)
446
+ 3700 val loss 4.0180
447
+ 3700 val perplexity 55.5926
448
+ 3700 train 3.904297 (lr=1.3875e-04) (hash(x)=74039273)
449
+ 3710 train 4.075643 (lr=1.3707e-04) (hash(x)=79299680)
450
+ 3720 train 3.994953 (lr=1.3541e-04) (hash(x)=94701498)
451
+ 3730 train 4.075052 (lr=1.3378e-04) (hash(x)=75352071)
452
+ 3740 train 3.953482 (lr=1.3216e-04) (hash(x)=77909487)
453
+ 3750 train 4.054442 (lr=1.3057e-04) (hash(x)=73986730)
454
+ 3760 train 4.009195 (lr=1.2901e-04) (hash(x)=79325763)
455
+ 3770 train 4.056023 (lr=1.2746e-04) (hash(x)=72457818)
456
+ 3780 train 4.198357 (lr=1.2594e-04) (hash(x)=71775590)
457
+ 3790 train 4.088856 (lr=1.2444e-04) (hash(x)=82638943)
458
+ 3800 val loss 4.0030
459
+ 3800 val perplexity 54.7611
460
+ 3800 train 4.051215 (lr=1.2296e-04) (hash(x)=79965893)
461
+ 3810 train 3.990251 (lr=1.2151e-04) (hash(x)=72598235)
462
+ 3820 train 3.971640 (lr=1.2008e-04) (hash(x)=83113889)
463
+ 3830 train 4.033205 (lr=1.1868e-04) (hash(x)=74434590)
464
+ 3840 train 3.948935 (lr=1.1730e-04) (hash(x)=82860348)
465
+ 3850 train 3.885401 (lr=1.1594e-04) (hash(x)=78067565)
466
+ 3860 train 3.953950 (lr=1.1461e-04) (hash(x)=82592498)
467
+ 3870 train 3.890995 (lr=1.1330e-04) (hash(x)=81820733)
468
+ 3880 train 3.904396 (lr=1.1201e-04) (hash(x)=87709040)
469
+ 3890 train 3.946584 (lr=1.1075e-04) (hash(x)=70379093)
470
+ 3900 val loss 3.9978
471
+ 3900 val perplexity 54.4802
472
+ 3900 train 4.078245 (lr=1.0951e-04) (hash(x)=76597431)
473
+ 3910 train 3.959372 (lr=1.0830e-04) (hash(x)=90490716)
474
+ 3920 train 4.055665 (lr=1.0711e-04) (hash(x)=81970659)
475
+ 3930 train 4.047575 (lr=1.0594e-04) (hash(x)=81496334)
476
+ 3940 train 4.018819 (lr=1.0480e-04) (hash(x)=75717605)
477
+ 3950 train 3.970653 (lr=1.0369e-04) (hash(x)=82749357)
478
+ 3960 train 4.004234 (lr=1.0260e-04) (hash(x)=89355157)
479
+ 3970 train 3.954911 (lr=1.0153e-04) (hash(x)=78980403)
480
+ 3980 train 3.928860 (lr=1.0049e-04) (hash(x)=76627217)
481
+ 3990 train 3.928572 (lr=9.9479e-05) (hash(x)=72412879)
482
+ 4000 val loss 3.9930
483
+ 4000 val perplexity 54.2161
484
+ 4000 train 3.938426 (lr=9.8489e-05) (hash(x)=83018142)
485
+ 4010 train 3.956633 (lr=9.7524e-05) (hash(x)=81272436)
486
+ 4020 train 3.887358 (lr=9.6585e-05) (hash(x)=85497482)
487
+ 4030 train 3.965064 (lr=9.5670e-05) (hash(x)=85241734)
488
+ 4040 train 3.925635 (lr=9.4781e-05) (hash(x)=77925307)
489
+ 4050 train 3.921662 (lr=9.3917e-05) (hash(x)=84826179)
490
+ 4060 train 3.984097 (lr=9.3079e-05) (hash(x)=83606764)
491
+ 4070 train 3.895870 (lr=9.2267e-05) (hash(x)=80567590)
492
+ 4080 train 3.945748 (lr=9.1480e-05) (hash(x)=76860998)
493
+ 4090 train 3.983865 (lr=9.0718e-05) (hash(x)=74902328)
494
+ 4100 val loss 3.9834
495
+ 4100 val perplexity 53.7005
496
+ 4100 train 4.029130 (lr=8.9983e-05) (hash(x)=82832041)
497
+ 4110 train 3.910758 (lr=8.9273e-05) (hash(x)=79143262)
498
+ 4120 train 3.930658 (lr=8.8589e-05) (hash(x)=77038149)
499
+ 4130 train 4.117441 (lr=8.7931e-05) (hash(x)=86339074)
500
+ 4140 train 4.181167 (lr=8.7299e-05) (hash(x)=76686216)
501
+ 4150 train 3.959009 (lr=8.6693e-05) (hash(x)=70522682)
502
+ 4160 train 3.927616 (lr=8.6113e-05) (hash(x)=90958555)
503
+ 4170 train 3.950793 (lr=8.5559e-05) (hash(x)=91463532)
504
+ 4180 train 3.939378 (lr=8.5031e-05) (hash(x)=81959329)
505
+ 4190 train 3.890112 (lr=8.4529e-05) (hash(x)=83146752)
506
+ 4200 val loss 3.9859
507
+ 4200 val perplexity 53.8312
508
+ 4200 train 3.878052 (lr=8.4054e-05) (hash(x)=78361715)
509
+ 4210 train 4.027769 (lr=8.3605e-05) (hash(x)=87364889)
510
+ 4220 train 3.875301 (lr=8.3182e-05) (hash(x)=70465156)
511
+ 4230 train 4.028909 (lr=8.2785e-05) (hash(x)=84524081)
512
+ 4240 train 3.988286 (lr=8.2414e-05) (hash(x)=77824868)
513
+ 4250 train 4.000423 (lr=8.2070e-05) (hash(x)=81710711)
514
+ 4260 train 3.982910 (lr=8.1752e-05) (hash(x)=76362728)
515
+ 4270 train 3.972584 (lr=8.1461e-05) (hash(x)=83115208)
516
+ 4280 train 3.884435 (lr=8.1196e-05) (hash(x)=87218314)
517
+ 4290 train 4.044530 (lr=8.0958e-05) (hash(x)=74582673)
518
+ 4300 val loss 3.9812
519
+ 4300 val perplexity 53.5801
520
+ 4300 train 3.918951 (lr=8.0746e-05) (hash(x)=77379615)
521
+ 4310 train 3.898298 (lr=8.0560e-05) (hash(x)=78669579)
522
+ 4320 train 4.018243 (lr=8.0401e-05) (hash(x)=83066608)
523
+ 4330 train 3.976596 (lr=8.0269e-05) (hash(x)=83037340)
524
+ 4340 train 4.018907 (lr=8.0162e-05) (hash(x)=82849771)
525
+ 4350 train 3.950427 (lr=8.0083e-05) (hash(x)=76693985)
526
+ 4360 train 4.038631 (lr=8.0030e-05) (hash(x)=77745394)
527
+ 4370 train 4.000293 (lr=8.0003e-05) (hash(x)=79954388)
528
+ 4374 val loss 3.9749
529
+ 4374 val perplexity 53.2455
12_head_baseline_lr_8e-4_head_dim_22_fixed/model_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f42195ae10961caf47df8ac4366ecbda39ae2b4b0dd9a35e6a93e860e8c0d0e
3
+ size 96858242
12_head_baseline_lr_8e-4_head_dim_22_fixed/optimizer_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:54f76bd93b577e794013bc8b0018ab4884260a80ce5605302f32cca0907b58cf
3
+ size 187435910