andrew-healey commited on
Commit
96a9928
·
verified ·
1 Parent(s): 866c89c

Upload folder using huggingface_hub

Browse files
12_head_baseline_lr_35e-4_head_dim_22/args.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "logs/repro_selective_pattern_rankings/12_head_baseline_lr_35e-4_head_dim_22", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 12, "n_embd": 264, "head_dim": 22, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 4375, "warmup_steps": 250, "group": "repro_selective_pattern_rankings", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1339, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 64, "total_batch_size": 131072, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.0035, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "12_head_baseline_lr_35e-4"}
12_head_baseline_lr_35e-4_head_dim_22/dataloader_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8ec6682852ed60a9d42cb4047300f65a04f87328c0ad7a4516be84d11b28f216
3
+ size 964
12_head_baseline_lr_35e-4_head_dim_22/log2.txt ADDED
@@ -0,0 +1,529 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ max_steps: 4375
2
+ 0 val loss 10.9076
3
+ 0 val perplexity 54591.7266
4
+ 0 train 10.908084 (lr=4.8951e-06) (hash(x)=93356070)
5
+ 10 train 10.475616 (lr=5.3846e-05) (hash(x)=91253010)
6
+ 20 train 10.225532 (lr=1.0280e-04) (hash(x)=74934453)
7
+ 30 train 9.874416 (lr=1.5175e-04) (hash(x)=79278034)
8
+ 40 train 9.362942 (lr=2.0070e-04) (hash(x)=80107892)
9
+ 50 train 8.847641 (lr=2.4965e-04) (hash(x)=70745428)
10
+ 60 train 8.395575 (lr=2.9860e-04) (hash(x)=80077589)
11
+ 70 train 7.832136 (lr=3.4755e-04) (hash(x)=76213766)
12
+ 80 train 7.642156 (lr=3.9650e-04) (hash(x)=83218328)
13
+ 90 train 7.471665 (lr=4.4545e-04) (hash(x)=74855845)
14
+ 100 val loss 7.3602
15
+ 100 val perplexity 1572.1808
16
+ 100 train 7.230016 (lr=4.9441e-04) (hash(x)=82814902)
17
+ 110 train 7.126770 (lr=5.4336e-04) (hash(x)=84907741)
18
+ 120 train 6.978075 (lr=5.9231e-04) (hash(x)=82613223)
19
+ 130 train 6.803350 (lr=6.4126e-04) (hash(x)=83540876)
20
+ 140 train 6.740945 (lr=6.9021e-04) (hash(x)=75095216)
21
+ 150 train 6.788873 (lr=7.3916e-04) (hash(x)=97190944)
22
+ 160 train 6.698927 (lr=7.8811e-04) (hash(x)=82117809)
23
+ 170 train 6.599126 (lr=8.3706e-04) (hash(x)=70514724)
24
+ 180 train 6.528831 (lr=8.8601e-04) (hash(x)=81029624)
25
+ 190 train 6.440722 (lr=9.3497e-04) (hash(x)=78996841)
26
+ 200 val loss 6.3873
27
+ 200 val perplexity 594.2403
28
+ 200 train 6.390795 (lr=9.8392e-04) (hash(x)=79845097)
29
+ 210 train 6.275928 (lr=1.0329e-03) (hash(x)=78997683)
30
+ 220 train 6.198835 (lr=1.0818e-03) (hash(x)=74895865)
31
+ 230 train 6.346650 (lr=1.1308e-03) (hash(x)=80933276)
32
+ 240 train 6.057018 (lr=1.1797e-03) (hash(x)=77664606)
33
+ 250 train 6.080961 (lr=1.2287e-03) (hash(x)=75903930)
34
+ 260 train 5.984088 (lr=1.2776e-03) (hash(x)=82996853)
35
+ 270 train 5.904939 (lr=1.3266e-03) (hash(x)=73269568)
36
+ 280 train 5.849591 (lr=1.3755e-03) (hash(x)=82906427)
37
+ 290 train 5.766364 (lr=1.4245e-03) (hash(x)=71797895)
38
+ 300 val loss 5.9029
39
+ 300 val perplexity 366.1036
40
+ 300 train 5.621789 (lr=1.4734e-03) (hash(x)=85232249)
41
+ 310 train 5.564695 (lr=1.5224e-03) (hash(x)=75252489)
42
+ 320 train 5.644699 (lr=1.5713e-03) (hash(x)=74394644)
43
+ 330 train 5.745005 (lr=1.6203e-03) (hash(x)=82772910)
44
+ 340 train 5.778711 (lr=1.6692e-03) (hash(x)=81627464)
45
+ 350 train 5.769141 (lr=1.7182e-03) (hash(x)=84678053)
46
+ 360 train 5.699967 (lr=1.7671e-03) (hash(x)=81884128)
47
+ 370 train 5.697562 (lr=1.8161e-03) (hash(x)=76379242)
48
+ 380 train 5.491585 (lr=1.8650e-03) (hash(x)=98182875)
49
+ 390 train 5.478452 (lr=1.9140e-03) (hash(x)=79710436)
50
+ 400 val loss 5.5246
51
+ 400 val perplexity 250.7787
52
+ 400 train 5.475484 (lr=1.9629e-03) (hash(x)=79841071)
53
+ 410 train 5.420447 (lr=2.0119e-03) (hash(x)=75844151)
54
+ 420 train 5.418609 (lr=2.0608e-03) (hash(x)=73125036)
55
+ 430 train 5.437809 (lr=2.1098e-03) (hash(x)=84214858)
56
+ 440 train 5.333423 (lr=2.1587e-03) (hash(x)=80456994)
57
+ 450 train 5.317306 (lr=2.2077e-03) (hash(x)=76735962)
58
+ 460 train 5.227948 (lr=2.2566e-03) (hash(x)=81845446)
59
+ 470 train 5.041347 (lr=2.3056e-03) (hash(x)=76094689)
60
+ 480 train 5.165458 (lr=2.3545e-03) (hash(x)=83806686)
61
+ 490 train 4.979968 (lr=2.4035e-03) (hash(x)=84690227)
62
+ 500 val loss 5.3247
63
+ 500 val perplexity 205.3512
64
+ 500 train 5.066565 (lr=2.4524e-03) (hash(x)=71851938)
65
+ 510 train 5.166353 (lr=2.5014e-03) (hash(x)=77159346)
66
+ 520 train 5.216027 (lr=2.5503e-03) (hash(x)=80755753)
67
+ 530 train 5.324448 (lr=2.5993e-03) (hash(x)=75379679)
68
+ 540 train 5.231430 (lr=2.6483e-03) (hash(x)=82458619)
69
+ 550 train 5.202992 (lr=2.6972e-03) (hash(x)=86366396)
70
+ 560 train 5.227627 (lr=2.7462e-03) (hash(x)=85095044)
71
+ 570 train 5.190287 (lr=2.7951e-03) (hash(x)=78385159)
72
+ 580 train 5.125402 (lr=2.8441e-03) (hash(x)=79342394)
73
+ 590 train 5.154836 (lr=2.8930e-03) (hash(x)=70782192)
74
+ 600 val loss 5.1448
75
+ 600 val perplexity 171.5383
76
+ 600 train 5.127186 (lr=2.9420e-03) (hash(x)=76130353)
77
+ 610 train 5.031870 (lr=2.9909e-03) (hash(x)=74778440)
78
+ 620 train 5.000023 (lr=3.0399e-03) (hash(x)=79129709)
79
+ 630 train 4.907879 (lr=3.0888e-03) (hash(x)=76469962)
80
+ 640 train 4.912803 (lr=3.1378e-03) (hash(x)=78288049)
81
+ 650 train 4.857151 (lr=3.1867e-03) (hash(x)=76641388)
82
+ 660 train 4.785213 (lr=3.2357e-03) (hash(x)=75937906)
83
+ 670 train 4.783622 (lr=3.2846e-03) (hash(x)=74807157)
84
+ 680 train 4.673441 (lr=3.3336e-03) (hash(x)=77490144)
85
+ 690 train 4.706055 (lr=3.3825e-03) (hash(x)=72900124)
86
+ 700 val loss 4.9722
87
+ 700 val perplexity 144.3498
88
+ 700 train 4.664327 (lr=3.4315e-03) (hash(x)=73424218)
89
+ 710 train 4.949912 (lr=3.4804e-03) (hash(x)=86052345)
90
+ 720 train 4.910357 (lr=3.5000e-03) (hash(x)=77462613)
91
+ 730 train 4.917890 (lr=3.4999e-03) (hash(x)=79027471)
92
+ 740 train 4.882577 (lr=3.4996e-03) (hash(x)=78149992)
93
+ 750 train 4.818355 (lr=3.4993e-03) (hash(x)=89147499)
94
+ 760 train 4.854363 (lr=3.4988e-03) (hash(x)=74931538)
95
+ 770 train 4.795114 (lr=3.4982e-03) (hash(x)=89648204)
96
+ 780 train 4.841908 (lr=3.4975e-03) (hash(x)=83708293)
97
+ 790 train 4.838573 (lr=3.4967e-03) (hash(x)=84336512)
98
+ 800 val loss 4.8097
99
+ 800 val perplexity 122.6984
100
+ 800 train 4.733207 (lr=3.4958e-03) (hash(x)=75025285)
101
+ 810 train 4.645676 (lr=3.4948e-03) (hash(x)=79436536)
102
+ 820 train 4.648520 (lr=3.4936e-03) (hash(x)=90467390)
103
+ 830 train 4.688494 (lr=3.4923e-03) (hash(x)=77292020)
104
+ 840 train 4.607029 (lr=3.4909e-03) (hash(x)=75568927)
105
+ 850 train 4.625541 (lr=3.4894e-03) (hash(x)=79671781)
106
+ 860 train 4.716887 (lr=3.4878e-03) (hash(x)=84280943)
107
+ 870 train 4.695498 (lr=3.4861e-03) (hash(x)=80901294)
108
+ 880 train 4.815775 (lr=3.4842e-03) (hash(x)=81437584)
109
+ 890 train 4.678123 (lr=3.4823e-03) (hash(x)=73893778)
110
+ 900 val loss 4.6980
111
+ 900 val perplexity 109.7319
112
+ 900 train 4.631812 (lr=3.4802e-03) (hash(x)=74987794)
113
+ 910 train 4.640407 (lr=3.4780e-03) (hash(x)=93721374)
114
+ 920 train 4.614246 (lr=3.4757e-03) (hash(x)=79149678)
115
+ 930 train 4.576686 (lr=3.4733e-03) (hash(x)=83179387)
116
+ 940 train 4.577224 (lr=3.4707e-03) (hash(x)=89009978)
117
+ 950 train 4.569585 (lr=3.4681e-03) (hash(x)=76483735)
118
+ 960 train 4.498858 (lr=3.4653e-03) (hash(x)=78638850)
119
+ 970 train 4.527314 (lr=3.4624e-03) (hash(x)=90415206)
120
+ 980 train 4.473612 (lr=3.4594e-03) (hash(x)=90819195)
121
+ 990 train 4.605165 (lr=3.4563e-03) (hash(x)=85938852)
122
+ 1000 val loss 4.5837
123
+ 1000 val perplexity 97.8792
124
+ 1000 train 4.631423 (lr=3.4531e-03) (hash(x)=82436789)
125
+ 1010 train 4.687474 (lr=3.4498e-03) (hash(x)=63022149)
126
+ 1020 train 4.599560 (lr=3.4463e-03) (hash(x)=83501199)
127
+ 1030 train 4.583072 (lr=3.4428e-03) (hash(x)=77093326)
128
+ 1040 train 4.455826 (lr=3.4391e-03) (hash(x)=73437559)
129
+ 1050 train 4.550467 (lr=3.4353e-03) (hash(x)=84550388)
130
+ 1060 train 4.543569 (lr=3.4314e-03) (hash(x)=86184566)
131
+ 1070 train 4.536319 (lr=3.4274e-03) (hash(x)=85644922)
132
+ 1080 train 4.471462 (lr=3.4233e-03) (hash(x)=86179801)
133
+ 1090 train 4.451015 (lr=3.4191e-03) (hash(x)=82868303)
134
+ 1100 val loss 4.5207
135
+ 1100 val perplexity 91.8982
136
+ 1100 train 4.542423 (lr=3.4148e-03) (hash(x)=96780388)
137
+ 1110 train 4.480480 (lr=3.4103e-03) (hash(x)=87223122)
138
+ 1120 train 4.450761 (lr=3.4058e-03) (hash(x)=77292786)
139
+ 1130 train 4.472401 (lr=3.4011e-03) (hash(x)=88761375)
140
+ 1140 train 4.383306 (lr=3.3964e-03) (hash(x)=80956468)
141
+ 1150 train 4.560204 (lr=3.3915e-03) (hash(x)=84725462)
142
+ 1160 train 4.546232 (lr=3.3865e-03) (hash(x)=76098113)
143
+ 1170 train 4.514983 (lr=3.3814e-03) (hash(x)=84387685)
144
+ 1180 train 4.463761 (lr=3.3762e-03) (hash(x)=76120321)
145
+ 1190 train 4.429448 (lr=3.3709e-03) (hash(x)=85974065)
146
+ 1200 val loss 4.4449
147
+ 1200 val perplexity 85.1890
148
+ 1200 train 4.326890 (lr=3.3655e-03) (hash(x)=79340644)
149
+ 1210 train 4.428706 (lr=3.3600e-03) (hash(x)=88345025)
150
+ 1220 train 4.331655 (lr=3.3543e-03) (hash(x)=86406230)
151
+ 1230 train 4.323251 (lr=3.3486e-03) (hash(x)=80608975)
152
+ 1240 train 4.374128 (lr=3.3428e-03) (hash(x)=81635225)
153
+ 1250 train 4.213240 (lr=3.3368e-03) (hash(x)=82126376)
154
+ 1260 train 4.390880 (lr=3.3308e-03) (hash(x)=83737972)
155
+ 1270 train 4.300529 (lr=3.3246e-03) (hash(x)=81132345)
156
+ 1280 train 4.383876 (lr=3.3184e-03) (hash(x)=85850404)
157
+ 1290 train 4.394030 (lr=3.3120e-03) (hash(x)=83490640)
158
+ 1300 val loss 4.4036
159
+ 1300 val perplexity 81.7478
160
+ 1300 train 4.374684 (lr=3.3056e-03) (hash(x)=77545187)
161
+ 1310 train 4.415729 (lr=3.2990e-03) (hash(x)=86412685)
162
+ 1320 train 4.318209 (lr=3.2924e-03) (hash(x)=86429640)
163
+ 1330 train 4.456046 (lr=3.2856e-03) (hash(x)=79645281)
164
+ 1340 train 4.412708 (lr=3.2787e-03) (hash(x)=82668541)
165
+ 1350 train 4.274076 (lr=3.2718e-03) (hash(x)=73627248)
166
+ 1360 train 4.370265 (lr=3.2647e-03) (hash(x)=74845133)
167
+ 1370 train 4.278931 (lr=3.2576e-03) (hash(x)=77850497)
168
+ 1380 train 4.306871 (lr=3.2503e-03) (hash(x)=76143954)
169
+ 1390 train 4.317526 (lr=3.2430e-03) (hash(x)=80675544)
170
+ 1400 val loss 4.3850
171
+ 1400 val perplexity 80.2346
172
+ 1400 train 4.294517 (lr=3.2355e-03) (hash(x)=76010938)
173
+ 1410 train 4.343231 (lr=3.2280e-03) (hash(x)=81260300)
174
+ 1420 train 4.461360 (lr=3.2203e-03) (hash(x)=83803629)
175
+ 1430 train 4.284713 (lr=3.2126e-03) (hash(x)=83840110)
176
+ 1440 train 4.475954 (lr=3.2047e-03) (hash(x)=97711831)
177
+ 1450 train 4.345918 (lr=3.1968e-03) (hash(x)=81110139)
178
+ 1460 train 4.271299 (lr=3.1888e-03) (hash(x)=90942839)
179
+ 1470 train 4.311315 (lr=3.1807e-03) (hash(x)=80721564)
180
+ 1480 train 4.407685 (lr=3.1725e-03) (hash(x)=77852059)
181
+ 1490 train 4.302835 (lr=3.1642e-03) (hash(x)=80011365)
182
+ 1500 val loss 4.3433
183
+ 1500 val perplexity 76.9592
184
+ 1500 train 4.332174 (lr=3.1558e-03) (hash(x)=84496142)
185
+ 1510 train 4.268084 (lr=3.1473e-03) (hash(x)=70186729)
186
+ 1520 train 4.290076 (lr=3.1387e-03) (hash(x)=74854227)
187
+ 1530 train 4.201273 (lr=3.1301e-03) (hash(x)=77468161)
188
+ 1540 train 4.214620 (lr=3.1213e-03) (hash(x)=87853059)
189
+ 1550 train 4.146360 (lr=3.1125e-03) (hash(x)=71225436)
190
+ 1560 train 4.246445 (lr=3.1036e-03) (hash(x)=84072783)
191
+ 1570 train 4.302769 (lr=3.0945e-03) (hash(x)=72723098)
192
+ 1580 train 4.316713 (lr=3.0855e-03) (hash(x)=90409866)
193
+ 1590 train 4.391655 (lr=3.0763e-03) (hash(x)=87481378)
194
+ 1600 val loss 4.3159
195
+ 1600 val perplexity 74.8841
196
+ 1600 train 4.374770 (lr=3.0670e-03) (hash(x)=77643862)
197
+ 1610 train 4.258157 (lr=3.0576e-03) (hash(x)=88862575)
198
+ 1620 train 4.276330 (lr=3.0482e-03) (hash(x)=84612581)
199
+ 1630 train 4.307610 (lr=3.0387e-03) (hash(x)=87075989)
200
+ 1640 train 4.197524 (lr=3.0291e-03) (hash(x)=88277361)
201
+ 1650 train 4.314314 (lr=3.0194e-03) (hash(x)=78750236)
202
+ 1660 train 4.238356 (lr=3.0096e-03) (hash(x)=82604581)
203
+ 1670 train 4.081969 (lr=2.9998e-03) (hash(x)=68482265)
204
+ 1680 train 4.203860 (lr=2.9899e-03) (hash(x)=75088835)
205
+ 1690 train 4.143486 (lr=2.9799e-03) (hash(x)=66060989)
206
+ 1700 val loss 4.3158
207
+ 1700 val perplexity 74.8749
208
+ 1700 train 4.302199 (lr=2.9698e-03) (hash(x)=79986754)
209
+ 1710 train 4.301451 (lr=2.9597e-03) (hash(x)=83657930)
210
+ 1720 train 4.236944 (lr=2.9494e-03) (hash(x)=81754135)
211
+ 1730 train 4.324185 (lr=2.9391e-03) (hash(x)=78904427)
212
+ 1740 train 4.225450 (lr=2.9287e-03) (hash(x)=85920177)
213
+ 1750 train 4.279061 (lr=2.9183e-03) (hash(x)=86573211)
214
+ 1760 train 4.235025 (lr=2.9078e-03) (hash(x)=81737128)
215
+ 1770 train 4.223839 (lr=2.8972e-03) (hash(x)=77400968)
216
+ 1780 train 4.236845 (lr=2.8865e-03) (hash(x)=73545497)
217
+ 1790 train 4.481628 (lr=2.8757e-03) (hash(x)=71641943)
218
+ 1800 val loss 4.2693
219
+ 1800 val perplexity 71.4739
220
+ 1800 train 4.329270 (lr=2.8649e-03) (hash(x)=87819781)
221
+ 1810 train 4.185396 (lr=2.8540e-03) (hash(x)=86870770)
222
+ 1820 train 4.193636 (lr=2.8431e-03) (hash(x)=82522211)
223
+ 1830 train 4.285587 (lr=2.8321e-03) (hash(x)=61947437)
224
+ 1840 train 4.168092 (lr=2.8210e-03) (hash(x)=79865406)
225
+ 1850 train 4.129658 (lr=2.8098e-03) (hash(x)=79828721)
226
+ 1860 train 4.276411 (lr=2.7986e-03) (hash(x)=80869571)
227
+ 1870 train 4.257335 (lr=2.7873e-03) (hash(x)=73780971)
228
+ 1880 train 4.225622 (lr=2.7760e-03) (hash(x)=79249549)
229
+ 1890 train 4.281643 (lr=2.7646e-03) (hash(x)=81041904)
230
+ 1900 val loss 4.2482
231
+ 1900 val perplexity 69.9760
232
+ 1900 train 4.333055 (lr=2.7531e-03) (hash(x)=82456430)
233
+ 1910 train 4.114544 (lr=2.7416e-03) (hash(x)=82222135)
234
+ 1920 train 4.150123 (lr=2.7300e-03) (hash(x)=70033249)
235
+ 1930 train 4.134658 (lr=2.7183e-03) (hash(x)=72887360)
236
+ 1940 train 4.188206 (lr=2.7066e-03) (hash(x)=83251100)
237
+ 1950 train 4.225506 (lr=2.6949e-03) (hash(x)=79660266)
238
+ 1960 train 4.105270 (lr=2.6831e-03) (hash(x)=78919068)
239
+ 1970 train 4.207603 (lr=2.6712e-03) (hash(x)=94690431)
240
+ 1980 train 4.039122 (lr=2.6592e-03) (hash(x)=88153756)
241
+ 1990 train 4.094770 (lr=2.6473e-03) (hash(x)=77195688)
242
+ 2000 val loss 4.2263
243
+ 2000 val perplexity 68.4620
244
+ 2000 train 4.193507 (lr=2.6352e-03) (hash(x)=81308591)
245
+ 2010 train 4.242415 (lr=2.6231e-03) (hash(x)=80205479)
246
+ 2020 train 4.157186 (lr=2.6110e-03) (hash(x)=81008704)
247
+ 2030 train 4.221213 (lr=2.5988e-03) (hash(x)=82914358)
248
+ 2040 train 4.234533 (lr=2.5865e-03) (hash(x)=88064399)
249
+ 2050 train 4.241021 (lr=2.5742e-03) (hash(x)=82889390)
250
+ 2060 train 4.303810 (lr=2.5619e-03) (hash(x)=80913554)
251
+ 2070 train 4.144627 (lr=2.5495e-03) (hash(x)=78121791)
252
+ 2080 train 4.285869 (lr=2.5371e-03) (hash(x)=75356657)
253
+ 2090 train 4.111552 (lr=2.5246e-03) (hash(x)=74048412)
254
+ 2100 val loss 4.2029
255
+ 2100 val perplexity 66.8820
256
+ 2100 train 4.030091 (lr=2.5121e-03) (hash(x)=68928225)
257
+ 2110 train 4.081536 (lr=2.4995e-03) (hash(x)=93734745)
258
+ 2120 train 4.074827 (lr=2.4869e-03) (hash(x)=72878164)
259
+ 2130 train 4.210315 (lr=2.4743e-03) (hash(x)=71508226)
260
+ 2140 train 4.160179 (lr=2.4616e-03) (hash(x)=84168671)
261
+ 2150 train 4.100376 (lr=2.4488e-03) (hash(x)=74673639)
262
+ 2160 train 4.244843 (lr=2.4361e-03) (hash(x)=75470031)
263
+ 2170 train 4.208879 (lr=2.4233e-03) (hash(x)=74307890)
264
+ 2180 train 4.321672 (lr=2.4104e-03) (hash(x)=77214245)
265
+ 2190 train 4.103363 (lr=2.3975e-03) (hash(x)=88628359)
266
+ 2200 val loss 4.1833
267
+ 2200 val perplexity 65.5810
268
+ 2200 train 4.122381 (lr=2.3846e-03) (hash(x)=74779126)
269
+ 2210 train 4.154639 (lr=2.3717e-03) (hash(x)=79817976)
270
+ 2220 train 4.112637 (lr=2.3587e-03) (hash(x)=75258996)
271
+ 2230 train 4.086831 (lr=2.3457e-03) (hash(x)=88164047)
272
+ 2240 train 4.070381 (lr=2.3326e-03) (hash(x)=78580686)
273
+ 2250 train 4.006927 (lr=2.3196e-03) (hash(x)=76510617)
274
+ 2260 train 4.082644 (lr=2.3065e-03) (hash(x)=76043966)
275
+ 2270 train 4.061935 (lr=2.2933e-03) (hash(x)=73062098)
276
+ 2280 train 4.286040 (lr=2.2802e-03) (hash(x)=81885909)
277
+ 2290 train 4.223708 (lr=2.2670e-03) (hash(x)=81481741)
278
+ 2300 val loss 4.1614
279
+ 2300 val perplexity 64.1639
280
+ 2300 train 4.081662 (lr=2.2538e-03) (hash(x)=82104275)
281
+ 2310 train 4.126639 (lr=2.2405e-03) (hash(x)=82137309)
282
+ 2320 train 4.107265 (lr=2.2273e-03) (hash(x)=86741167)
283
+ 2330 train 4.124637 (lr=2.2140e-03) (hash(x)=84796263)
284
+ 2340 train 4.086658 (lr=2.2007e-03) (hash(x)=169966529)
285
+ 2350 train 3.970767 (lr=2.1874e-03) (hash(x)=80006061)
286
+ 2360 train 4.028484 (lr=2.1741e-03) (hash(x)=71090999)
287
+ 2370 train 4.158218 (lr=2.1607e-03) (hash(x)=86641850)
288
+ 2380 train 4.057276 (lr=2.1473e-03) (hash(x)=84573253)
289
+ 2390 train 4.090212 (lr=2.1339e-03) (hash(x)=74483764)
290
+ 2400 val loss 4.1577
291
+ 2400 val perplexity 63.9215
292
+ 2400 train 4.062928 (lr=2.1205e-03) (hash(x)=78327659)
293
+ 2410 train 4.126294 (lr=2.1071e-03) (hash(x)=77273627)
294
+ 2420 train 3.993065 (lr=2.0937e-03) (hash(x)=76938049)
295
+ 2430 train 3.998426 (lr=2.0802e-03) (hash(x)=98449442)
296
+ 2440 train 3.855887 (lr=2.0668e-03) (hash(x)=78157797)
297
+ 2450 train 3.895722 (lr=2.0533e-03) (hash(x)=80637582)
298
+ 2460 train 3.884335 (lr=2.0398e-03) (hash(x)=80023854)
299
+ 2470 train 3.953012 (lr=2.0263e-03) (hash(x)=86678884)
300
+ 2480 train 4.172486 (lr=2.0128e-03) (hash(x)=84086469)
301
+ 2490 train 4.194680 (lr=1.9993e-03) (hash(x)=83131141)
302
+ 2500 val loss 4.1305
303
+ 2500 val perplexity 62.2072
304
+ 2500 train 4.109979 (lr=1.9858e-03) (hash(x)=82583497)
305
+ 2510 train 4.166650 (lr=1.9723e-03) (hash(x)=79164326)
306
+ 2520 train 4.078309 (lr=1.9588e-03) (hash(x)=76456503)
307
+ 2530 train 4.135511 (lr=1.9453e-03) (hash(x)=82001228)
308
+ 2540 train 4.164735 (lr=1.9318e-03) (hash(x)=75615595)
309
+ 2550 train 4.114700 (lr=1.9182e-03) (hash(x)=78397869)
310
+ 2560 train 4.115577 (lr=1.9047e-03) (hash(x)=75547032)
311
+ 2570 train 4.029257 (lr=1.8912e-03) (hash(x)=89201025)
312
+ 2580 train 4.018538 (lr=1.8777e-03) (hash(x)=89856704)
313
+ 2590 train 4.024755 (lr=1.8642e-03) (hash(x)=82175682)
314
+ 2600 val loss 4.1136
315
+ 2600 val perplexity 61.1656
316
+ 2600 train 4.033382 (lr=1.8507e-03) (hash(x)=78312826)
317
+ 2610 train 4.020324 (lr=1.8372e-03) (hash(x)=77066588)
318
+ 2620 train 3.862560 (lr=1.8237e-03) (hash(x)=78666061)
319
+ 2630 train 3.862125 (lr=1.8102e-03) (hash(x)=93762143)
320
+ 2640 train 3.879967 (lr=1.7967e-03) (hash(x)=83191587)
321
+ 2650 train 3.800605 (lr=1.7832e-03) (hash(x)=87169585)
322
+ 2660 train 4.021132 (lr=1.7698e-03) (hash(x)=86426388)
323
+ 2670 train 4.153884 (lr=1.7563e-03) (hash(x)=76692638)
324
+ 2680 train 4.072164 (lr=1.7429e-03) (hash(x)=77446063)
325
+ 2690 train 4.070703 (lr=1.7295e-03) (hash(x)=79809050)
326
+ 2700 val loss 4.1071
327
+ 2700 val perplexity 60.7709
328
+ 2700 train 4.002093 (lr=1.7161e-03) (hash(x)=83116823)
329
+ 2710 train 4.113149 (lr=1.7027e-03) (hash(x)=75622148)
330
+ 2720 train 4.042696 (lr=1.6893e-03) (hash(x)=80690512)
331
+ 2730 train 4.108356 (lr=1.6759e-03) (hash(x)=78009984)
332
+ 2740 train 4.065202 (lr=1.6626e-03) (hash(x)=74205488)
333
+ 2750 train 4.069386 (lr=1.6493e-03) (hash(x)=91013332)
334
+ 2760 train 3.995842 (lr=1.6360e-03) (hash(x)=76954961)
335
+ 2770 train 4.000626 (lr=1.6227e-03) (hash(x)=79390317)
336
+ 2780 train 3.872460 (lr=1.6095e-03) (hash(x)=70168783)
337
+ 2790 train 4.038021 (lr=1.5962e-03) (hash(x)=76028417)
338
+ 2800 val loss 4.0888
339
+ 2800 val perplexity 59.6694
340
+ 2800 train 3.854482 (lr=1.5830e-03) (hash(x)=77656050)
341
+ 2810 train 3.903926 (lr=1.5698e-03) (hash(x)=94248216)
342
+ 2820 train 3.968278 (lr=1.5567e-03) (hash(x)=78305078)
343
+ 2830 train 3.878869 (lr=1.5435e-03) (hash(x)=79948848)
344
+ 2840 train 3.743937 (lr=1.5304e-03) (hash(x)=85341024)
345
+ 2850 train 4.054077 (lr=1.5174e-03) (hash(x)=78735170)
346
+ 2860 train 4.300416 (lr=1.5043e-03) (hash(x)=71616419)
347
+ 2870 train 4.074437 (lr=1.4913e-03) (hash(x)=78656517)
348
+ 2880 train 4.089076 (lr=1.4783e-03) (hash(x)=80073987)
349
+ 2890 train 4.013352 (lr=1.4654e-03) (hash(x)=76894809)
350
+ 2900 val loss 4.0622
351
+ 2900 val perplexity 58.1040
352
+ 2900 train 3.995355 (lr=1.4525e-03) (hash(x)=80499838)
353
+ 2910 train 4.047222 (lr=1.4396e-03) (hash(x)=72673354)
354
+ 2920 train 4.033263 (lr=1.4267e-03) (hash(x)=84265768)
355
+ 2930 train 4.056005 (lr=1.4139e-03) (hash(x)=79612060)
356
+ 2940 train 3.910585 (lr=1.4012e-03) (hash(x)=74970087)
357
+ 2950 train 4.069673 (lr=1.3884e-03) (hash(x)=84166818)
358
+ 2960 train 4.054111 (lr=1.3757e-03) (hash(x)=89410221)
359
+ 2970 train 3.994348 (lr=1.3631e-03) (hash(x)=75672566)
360
+ 2980 train 3.850848 (lr=1.3505e-03) (hash(x)=81760314)
361
+ 2990 train 3.824719 (lr=1.3379e-03) (hash(x)=80605200)
362
+ 3000 val loss 4.0600
363
+ 3000 val perplexity 57.9768
364
+ 3000 train 3.816441 (lr=1.3254e-03) (hash(x)=83804735)
365
+ 3010 train 3.746111 (lr=1.3129e-03) (hash(x)=77015303)
366
+ 3020 train 3.813779 (lr=1.3005e-03) (hash(x)=81464523)
367
+ 3030 train 4.013256 (lr=1.2881e-03) (hash(x)=80168230)
368
+ 3040 train 3.954233 (lr=1.2758e-03) (hash(x)=84817006)
369
+ 3050 train 4.061200 (lr=1.2635e-03) (hash(x)=71601811)
370
+ 3060 train 3.985192 (lr=1.2512e-03) (hash(x)=85499733)
371
+ 3070 train 4.040086 (lr=1.2390e-03) (hash(x)=78670408)
372
+ 3080 train 3.932832 (lr=1.2269e-03) (hash(x)=77120468)
373
+ 3090 train 4.038423 (lr=1.2148e-03) (hash(x)=77927426)
374
+ 3100 val loss 4.0402
375
+ 3100 val perplexity 56.8372
376
+ 3100 train 4.053142 (lr=1.2027e-03) (hash(x)=83998606)
377
+ 3110 train 3.954900 (lr=1.1908e-03) (hash(x)=77990218)
378
+ 3120 train 3.866522 (lr=1.1788e-03) (hash(x)=81623970)
379
+ 3130 train 3.841614 (lr=1.1669e-03) (hash(x)=75014781)
380
+ 3140 train 3.893361 (lr=1.1551e-03) (hash(x)=72591250)
381
+ 3150 train 3.905207 (lr=1.1434e-03) (hash(x)=81421847)
382
+ 3160 train 3.959923 (lr=1.1317e-03) (hash(x)=75112631)
383
+ 3170 train 4.051120 (lr=1.1200e-03) (hash(x)=87518033)
384
+ 3180 train 3.903703 (lr=1.1084e-03) (hash(x)=78648348)
385
+ 3190 train 4.018905 (lr=1.0969e-03) (hash(x)=78270029)
386
+ 3200 val loss 4.0193
387
+ 3200 val perplexity 55.6637
388
+ 3200 train 4.092290 (lr=1.0854e-03) (hash(x)=83380714)
389
+ 3210 train 4.276913 (lr=1.0740e-03) (hash(x)=67804991)
390
+ 3220 train 4.002490 (lr=1.0627e-03) (hash(x)=76968804)
391
+ 3230 train 3.937056 (lr=1.0514e-03) (hash(x)=75798670)
392
+ 3240 train 4.013489 (lr=1.0402e-03) (hash(x)=83192811)
393
+ 3250 train 3.950767 (lr=1.0290e-03) (hash(x)=81659789)
394
+ 3260 train 3.991711 (lr=1.0179e-03) (hash(x)=80622502)
395
+ 3270 train 3.960459 (lr=1.0069e-03) (hash(x)=85436511)
396
+ 3280 train 3.898784 (lr=9.9596e-04) (hash(x)=80589180)
397
+ 3290 train 4.045554 (lr=9.8508e-04) (hash(x)=85747193)
398
+ 3300 val loss 4.0138
399
+ 3300 val perplexity 55.3577
400
+ 3300 train 3.909225 (lr=9.7426e-04) (hash(x)=74635692)
401
+ 3310 train 3.896818 (lr=9.6352e-04) (hash(x)=82045455)
402
+ 3320 train 3.765933 (lr=9.5285e-04) (hash(x)=73249173)
403
+ 3330 train 3.724060 (lr=9.4225e-04) (hash(x)=71492338)
404
+ 3340 train 4.059547 (lr=9.3172e-04) (hash(x)=76374071)
405
+ 3350 train 4.029555 (lr=9.2127e-04) (hash(x)=80474064)
406
+ 3360 train 3.937260 (lr=9.1088e-04) (hash(x)=84390892)
407
+ 3370 train 3.985572 (lr=9.0058e-04) (hash(x)=83399949)
408
+ 3380 train 4.043242 (lr=8.9035e-04) (hash(x)=77648059)
409
+ 3390 train 4.009696 (lr=8.8019e-04) (hash(x)=80152701)
410
+ 3400 val loss 3.9929
411
+ 3400 val perplexity 54.2101
412
+ 3400 train 4.056261 (lr=8.7012e-04) (hash(x)=81952545)
413
+ 3410 train 4.016254 (lr=8.6012e-04) (hash(x)=80908993)
414
+ 3420 train 4.059134 (lr=8.5019e-04) (hash(x)=79610037)
415
+ 3430 train 3.921913 (lr=8.4035e-04) (hash(x)=87624382)
416
+ 3440 train 3.980027 (lr=8.3059e-04) (hash(x)=82336381)
417
+ 3450 train 3.958662 (lr=8.2091e-04) (hash(x)=87687835)
418
+ 3460 train 3.963939 (lr=8.1131e-04) (hash(x)=76376135)
419
+ 3470 train 3.874727 (lr=8.0179e-04) (hash(x)=79331391)
420
+ 3480 train 3.780951 (lr=7.9235e-04) (hash(x)=107002681)
421
+ 3490 train 3.860982 (lr=7.8300e-04) (hash(x)=77465514)
422
+ 3500 val loss 3.9883
423
+ 3500 val perplexity 53.9644
424
+ 3500 train 3.825645 (lr=7.7373e-04) (hash(x)=88237229)
425
+ 3510 train 3.890729 (lr=7.6455e-04) (hash(x)=81011739)
426
+ 3520 train 3.943923 (lr=7.5545e-04) (hash(x)=64643427)
427
+ 3530 train 4.220985 (lr=7.4644e-04) (hash(x)=78029539)
428
+ 3540 train 4.044697 (lr=7.3752e-04) (hash(x)=83188968)
429
+ 3550 train 4.068678 (lr=7.2868e-04) (hash(x)=86104185)
430
+ 3560 train 3.993771 (lr=7.1993e-04) (hash(x)=81993629)
431
+ 3570 train 3.964978 (lr=7.1127e-04) (hash(x)=72719368)
432
+ 3580 train 3.971447 (lr=7.0270e-04) (hash(x)=90091487)
433
+ 3590 train 3.859858 (lr=6.9422e-04) (hash(x)=77393152)
434
+ 3600 val loss 3.9695
435
+ 3600 val perplexity 52.9570
436
+ 3600 train 3.970802 (lr=6.8583e-04) (hash(x)=73103504)
437
+ 3610 train 3.977986 (lr=6.7753e-04) (hash(x)=84429400)
438
+ 3620 train 3.955100 (lr=6.6932e-04) (hash(x)=77021795)
439
+ 3630 train 3.902692 (lr=6.6121e-04) (hash(x)=92830605)
440
+ 3640 train 4.097820 (lr=6.5319e-04) (hash(x)=78313175)
441
+ 3650 train 3.900215 (lr=6.4526e-04) (hash(x)=85395549)
442
+ 3660 train 3.693795 (lr=6.3743e-04) (hash(x)=78114459)
443
+ 3670 train 3.822485 (lr=6.2969e-04) (hash(x)=74968316)
444
+ 3680 train 3.745454 (lr=6.2204e-04) (hash(x)=73358737)
445
+ 3690 train 3.848137 (lr=6.1450e-04) (hash(x)=76399442)
446
+ 3700 val loss 3.9708
447
+ 3700 val perplexity 53.0296
448
+ 3700 train 3.855102 (lr=6.0705e-04) (hash(x)=74039273)
449
+ 3710 train 4.028332 (lr=5.9969e-04) (hash(x)=79299680)
450
+ 3720 train 3.942715 (lr=5.9244e-04) (hash(x)=94701498)
451
+ 3730 train 4.018158 (lr=5.8528e-04) (hash(x)=75352071)
452
+ 3740 train 3.902794 (lr=5.7822e-04) (hash(x)=77909487)
453
+ 3750 train 4.012174 (lr=5.7126e-04) (hash(x)=73986730)
454
+ 3760 train 3.966691 (lr=5.6440e-04) (hash(x)=79325763)
455
+ 3770 train 4.014657 (lr=5.5764e-04) (hash(x)=72457818)
456
+ 3780 train 4.141690 (lr=5.5098e-04) (hash(x)=71775590)
457
+ 3790 train 4.029135 (lr=5.4443e-04) (hash(x)=82638943)
458
+ 3800 val loss 3.9530
459
+ 3800 val perplexity 52.0928
460
+ 3800 train 4.006685 (lr=5.3797e-04) (hash(x)=79965893)
461
+ 3810 train 3.942750 (lr=5.3162e-04) (hash(x)=72598235)
462
+ 3820 train 3.920198 (lr=5.2537e-04) (hash(x)=83113889)
463
+ 3830 train 3.985005 (lr=5.1922e-04) (hash(x)=74434590)
464
+ 3840 train 3.904609 (lr=5.1317e-04) (hash(x)=82860348)
465
+ 3850 train 3.839092 (lr=5.0723e-04) (hash(x)=78067565)
466
+ 3860 train 3.902898 (lr=5.0140e-04) (hash(x)=82592498)
467
+ 3870 train 3.835158 (lr=4.9567e-04) (hash(x)=81820733)
468
+ 3880 train 3.850827 (lr=4.9004e-04) (hash(x)=87709040)
469
+ 3890 train 3.895581 (lr=4.8452e-04) (hash(x)=70379093)
470
+ 3900 val loss 3.9468
471
+ 3900 val perplexity 51.7679
472
+ 3900 train 4.019745 (lr=4.7911e-04) (hash(x)=76597431)
473
+ 3910 train 3.904516 (lr=4.7380e-04) (hash(x)=90490716)
474
+ 3920 train 4.008976 (lr=4.6860e-04) (hash(x)=81970659)
475
+ 3930 train 3.990327 (lr=4.6351e-04) (hash(x)=81496334)
476
+ 3940 train 3.974111 (lr=4.5852e-04) (hash(x)=75717605)
477
+ 3950 train 3.916921 (lr=4.5364e-04) (hash(x)=82749357)
478
+ 3960 train 3.944228 (lr=4.4888e-04) (hash(x)=89355157)
479
+ 3970 train 3.902046 (lr=4.4422e-04) (hash(x)=78980403)
480
+ 3980 train 3.873741 (lr=4.3966e-04) (hash(x)=76627217)
481
+ 3990 train 3.874669 (lr=4.3522e-04) (hash(x)=72412879)
482
+ 4000 val loss 3.9405
483
+ 4000 val perplexity 51.4441
484
+ 4000 train 3.889693 (lr=4.3089e-04) (hash(x)=83018142)
485
+ 4010 train 3.901834 (lr=4.2667e-04) (hash(x)=81272436)
486
+ 4020 train 3.826846 (lr=4.2256e-04) (hash(x)=85497482)
487
+ 4030 train 3.899328 (lr=4.1856e-04) (hash(x)=85241734)
488
+ 4040 train 3.873369 (lr=4.1467e-04) (hash(x)=77925307)
489
+ 4050 train 3.871701 (lr=4.1089e-04) (hash(x)=84826179)
490
+ 4060 train 3.924413 (lr=4.0722e-04) (hash(x)=83606764)
491
+ 4070 train 3.842561 (lr=4.0367e-04) (hash(x)=80567590)
492
+ 4080 train 3.890948 (lr=4.0022e-04) (hash(x)=76860998)
493
+ 4090 train 3.929630 (lr=3.9689e-04) (hash(x)=74902328)
494
+ 4100 val loss 3.9289
495
+ 4100 val perplexity 50.8514
496
+ 4100 train 3.970068 (lr=3.9368e-04) (hash(x)=82832041)
497
+ 4110 train 3.851775 (lr=3.9057e-04) (hash(x)=79143262)
498
+ 4120 train 3.879892 (lr=3.8758e-04) (hash(x)=77038149)
499
+ 4130 train 4.062265 (lr=3.8470e-04) (hash(x)=86339074)
500
+ 4140 train 4.132008 (lr=3.8193e-04) (hash(x)=76686216)
501
+ 4150 train 3.902624 (lr=3.7928e-04) (hash(x)=70522682)
502
+ 4160 train 3.870040 (lr=3.7674e-04) (hash(x)=90958555)
503
+ 4170 train 3.891679 (lr=3.7432e-04) (hash(x)=91463532)
504
+ 4180 train 3.877617 (lr=3.7201e-04) (hash(x)=81959329)
505
+ 4190 train 3.834756 (lr=3.6982e-04) (hash(x)=83146752)
506
+ 4200 val loss 3.9335
507
+ 4200 val perplexity 51.0855
508
+ 4200 train 3.823717 (lr=3.6774e-04) (hash(x)=78361715)
509
+ 4210 train 3.969989 (lr=3.6577e-04) (hash(x)=87364889)
510
+ 4220 train 3.825620 (lr=3.6392e-04) (hash(x)=70465156)
511
+ 4230 train 3.977006 (lr=3.6218e-04) (hash(x)=84524081)
512
+ 4240 train 3.934300 (lr=3.6056e-04) (hash(x)=77824868)
513
+ 4250 train 3.942354 (lr=3.5906e-04) (hash(x)=81710711)
514
+ 4260 train 3.926461 (lr=3.5767e-04) (hash(x)=76362728)
515
+ 4270 train 3.916588 (lr=3.5639e-04) (hash(x)=83115208)
516
+ 4280 train 3.830691 (lr=3.5523e-04) (hash(x)=87218314)
517
+ 4290 train 3.987258 (lr=3.5419e-04) (hash(x)=74582673)
518
+ 4300 val loss 3.9255
519
+ 4300 val perplexity 50.6798
520
+ 4300 train 3.865469 (lr=3.5326e-04) (hash(x)=77379615)
521
+ 4310 train 3.839643 (lr=3.5245e-04) (hash(x)=78669579)
522
+ 4320 train 3.955772 (lr=3.5175e-04) (hash(x)=83066608)
523
+ 4330 train 3.922378 (lr=3.5117e-04) (hash(x)=83037340)
524
+ 4340 train 3.958697 (lr=3.5071e-04) (hash(x)=82849771)
525
+ 4350 train 3.896540 (lr=3.5036e-04) (hash(x)=76693985)
526
+ 4360 train 3.984878 (lr=3.5013e-04) (hash(x)=77745394)
527
+ 4370 train 3.945752 (lr=3.5001e-04) (hash(x)=79954388)
528
+ 4374 val loss 3.9192
529
+ 4374 val perplexity 50.3600
12_head_baseline_lr_35e-4_head_dim_22/model_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16348390b7bc412c457a0dc2d4fbc164b05a09eea406f2e581d3061cf59c553d
3
+ size 96858242
12_head_baseline_lr_35e-4_head_dim_22/optimizer_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9650a8e2331ece8361d7df703073ad8e03d2ba2d6d6a44a734ed43a9baa09e3b
3
+ size 187435910