andrew-healey commited on
Commit
34a829d
·
verified ·
1 Parent(s): c2696cf

Upload folder using huggingface_hub

Browse files
4x_smaller_bs_half_lr_half_seq_len/args.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "4x_smaller_bs_half_lr_half_seq_len", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 12, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": null, "warmup_steps": null, "group": "shrinking_big_runs_2", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1337, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 512, "batch_size": 16, "total_batch_size": 131072, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": false, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 1.5e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": null, "n_embd": 768}
4x_smaller_bs_half_lr_half_seq_len/dataloader_02499.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dc64603b8ea6a36a9ccabc0a5d7c7479b649579c80e31622d3273d19aa5ac2a8
3
+ size 964
4x_smaller_bs_half_lr_half_seq_len/log2.txt ADDED
@@ -0,0 +1,303 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ max_steps: 2500
2
+ 0 val loss 10.9883
3
+ 0 val perplexity 59177.6016
4
+ 0 train 10.987196 (lr=2.0979e-08) (hash(x)=44290605)
5
+ 10 train 10.983902 (lr=2.3077e-07) (hash(x)=38564062)
6
+ 20 train 10.970700 (lr=4.4056e-07) (hash(x)=42646055)
7
+ 30 train 10.951057 (lr=6.5035e-07) (hash(x)=38523121)
8
+ 40 train 10.915579 (lr=8.6014e-07) (hash(x)=34789261)
9
+ 50 train 10.879971 (lr=1.0699e-06) (hash(x)=35228141)
10
+ 60 train 10.833032 (lr=1.2797e-06) (hash(x)=39587191)
11
+ 70 train 10.774123 (lr=1.4895e-06) (hash(x)=39506396)
12
+ 80 train 10.725358 (lr=1.6993e-06) (hash(x)=44465412)
13
+ 90 train 10.638872 (lr=1.9091e-06) (hash(x)=46934196)
14
+ 100 val loss 10.5456
15
+ 100 val perplexity 38009.1719
16
+ 100 train 10.548512 (lr=2.1189e-06) (hash(x)=37401443)
17
+ 110 train 10.453033 (lr=2.3287e-06) (hash(x)=36880637)
18
+ 120 train 10.333361 (lr=2.5385e-06) (hash(x)=40760978)
19
+ 130 train 10.240342 (lr=2.7483e-06) (hash(x)=34332830)
20
+ 140 train 10.125100 (lr=2.9580e-06) (hash(x)=41010950)
21
+ 150 train 10.016959 (lr=3.1678e-06) (hash(x)=41413911)
22
+ 160 train 9.938765 (lr=3.3776e-06) (hash(x)=43969526)
23
+ 170 train 9.858195 (lr=3.5874e-06) (hash(x)=36475394)
24
+ 180 train 9.822458 (lr=3.7972e-06) (hash(x)=34893035)
25
+ 190 train 9.778758 (lr=4.0070e-06) (hash(x)=39125577)
26
+ 200 val loss 9.7399
27
+ 200 val perplexity 16981.8379
28
+ 200 train 9.783566 (lr=4.2168e-06) (hash(x)=41789901)
29
+ 210 train 9.775907 (lr=4.4266e-06) (hash(x)=38495745)
30
+ 220 train 9.710948 (lr=4.6364e-06) (hash(x)=44203053)
31
+ 230 train 9.643511 (lr=4.8462e-06) (hash(x)=30353179)
32
+ 240 train 9.652902 (lr=5.0559e-06) (hash(x)=36564311)
33
+ 250 train 9.703348 (lr=5.2657e-06) (hash(x)=38917268)
34
+ 260 train 9.561985 (lr=5.4755e-06) (hash(x)=37263509)
35
+ 270 train 9.576735 (lr=5.6853e-06) (hash(x)=36081848)
36
+ 280 train 9.536775 (lr=5.8951e-06) (hash(x)=38157778)
37
+ 290 train 9.459773 (lr=6.1049e-06) (hash(x)=39695381)
38
+ 300 val loss 9.4520
39
+ 300 val perplexity 12733.2510
40
+ 300 train 9.454054 (lr=6.3147e-06) (hash(x)=38914105)
41
+ 310 train 9.422870 (lr=6.5245e-06) (hash(x)=40676791)
42
+ 320 train 9.391773 (lr=6.7343e-06) (hash(x)=42043458)
43
+ 330 train 9.351944 (lr=6.9441e-06) (hash(x)=43861760)
44
+ 340 train 9.303730 (lr=7.1538e-06) (hash(x)=42434651)
45
+ 350 train 9.334338 (lr=7.3636e-06) (hash(x)=42878691)
46
+ 360 train 9.132668 (lr=7.5734e-06) (hash(x)=37008216)
47
+ 370 train 9.063375 (lr=7.7832e-06) (hash(x)=44318637)
48
+ 380 train 9.078611 (lr=7.9930e-06) (hash(x)=39642311)
49
+ 390 train 8.941845 (lr=8.2028e-06) (hash(x)=38871537)
50
+ 400 val loss 8.9100
51
+ 400 val perplexity 7405.5044
52
+ 400 train 8.915948 (lr=8.4126e-06) (hash(x)=38494942)
53
+ 410 train 8.821939 (lr=8.6224e-06) (hash(x)=36423663)
54
+ 420 train 8.746850 (lr=8.8322e-06) (hash(x)=37628657)
55
+ 430 train 8.667839 (lr=9.0420e-06) (hash(x)=39593287)
56
+ 440 train 8.605666 (lr=9.2517e-06) (hash(x)=38939280)
57
+ 450 train 8.528643 (lr=9.4615e-06) (hash(x)=41400408)
58
+ 460 train 8.522301 (lr=9.6713e-06) (hash(x)=39718278)
59
+ 470 train 8.481720 (lr=9.8811e-06) (hash(x)=39685768)
60
+ 480 train 8.475543 (lr=1.0091e-05) (hash(x)=37108161)
61
+ 490 train 8.344969 (lr=1.0301e-05) (hash(x)=39677447)
62
+ 500 val loss 8.2716
63
+ 500 val perplexity 3911.3613
64
+ 500 train 8.293922 (lr=1.0510e-05) (hash(x)=38805353)
65
+ 510 train 8.172845 (lr=1.0720e-05) (hash(x)=37151493)
66
+ 520 train 8.160428 (lr=1.0930e-05) (hash(x)=42467708)
67
+ 530 train 8.005136 (lr=1.1140e-05) (hash(x)=39283765)
68
+ 540 train 8.018361 (lr=1.1350e-05) (hash(x)=36360047)
69
+ 550 train 8.012372 (lr=1.1559e-05) (hash(x)=36312866)
70
+ 560 train 7.872485 (lr=1.1769e-05) (hash(x)=41515622)
71
+ 570 train 7.856777 (lr=1.1979e-05) (hash(x)=36732231)
72
+ 580 train 7.792838 (lr=1.2189e-05) (hash(x)=41329361)
73
+ 590 train 7.696174 (lr=1.2399e-05) (hash(x)=38604113)
74
+ 600 val loss 7.7623
75
+ 600 val perplexity 2350.3792
76
+ 600 train 7.707370 (lr=1.2608e-05) (hash(x)=48521551)
77
+ 610 train 7.790296 (lr=1.2818e-05) (hash(x)=46162817)
78
+ 620 train 7.815807 (lr=1.3028e-05) (hash(x)=40949420)
79
+ 630 train 7.744004 (lr=1.3238e-05) (hash(x)=41577136)
80
+ 640 train 7.595581 (lr=1.3448e-05) (hash(x)=40803922)
81
+ 650 train 7.482319 (lr=1.3657e-05) (hash(x)=37788936)
82
+ 660 train 7.474987 (lr=1.3867e-05) (hash(x)=39766064)
83
+ 670 train 7.486190 (lr=1.4077e-05) (hash(x)=37945924)
84
+ 680 train 7.465253 (lr=1.4287e-05) (hash(x)=37529238)
85
+ 690 train 7.483014 (lr=1.4497e-05) (hash(x)=44662178)
86
+ 700 val loss 7.4751
87
+ 700 val perplexity 1763.6294
88
+ 700 train 7.442530 (lr=1.4706e-05) (hash(x)=40331394)
89
+ 710 train 7.356412 (lr=1.4916e-05) (hash(x)=37685289)
90
+ 720 train 7.286057 (lr=1.5000e-05) (hash(x)=36102366)
91
+ 730 train 7.324800 (lr=1.4998e-05) (hash(x)=40147452)
92
+ 740 train 7.267545 (lr=1.4993e-05) (hash(x)=43905866)
93
+ 750 train 7.399191 (lr=1.4987e-05) (hash(x)=42843608)
94
+ 760 train 7.409278 (lr=1.4979e-05) (hash(x)=37296224)
95
+ 770 train 7.313937 (lr=1.4968e-05) (hash(x)=41230008)
96
+ 780 train 7.221914 (lr=1.4956e-05) (hash(x)=36407773)
97
+ 790 train 7.376118 (lr=1.4941e-05) (hash(x)=42561722)
98
+ 800 val loss 7.3352
99
+ 800 val perplexity 1533.2998
100
+ 800 train 7.219405 (lr=1.4925e-05) (hash(x)=48340441)
101
+ 810 train 7.200429 (lr=1.4906e-05) (hash(x)=36261826)
102
+ 820 train 7.126314 (lr=1.4885e-05) (hash(x)=37918881)
103
+ 830 train 7.233766 (lr=1.4862e-05) (hash(x)=38608334)
104
+ 840 train 7.124430 (lr=1.4837e-05) (hash(x)=39403505)
105
+ 850 train 7.176360 (lr=1.4810e-05) (hash(x)=43741441)
106
+ 860 train 7.174889 (lr=1.4781e-05) (hash(x)=38030739)
107
+ 870 train 7.256120 (lr=1.4750e-05) (hash(x)=36064710)
108
+ 880 train 7.313010 (lr=1.4717e-05) (hash(x)=40349968)
109
+ 890 train 7.249527 (lr=1.4682e-05) (hash(x)=38715806)
110
+ 900 val loss 7.2489
111
+ 900 val perplexity 1406.4869
112
+ 900 train 7.263391 (lr=1.4645e-05) (hash(x)=35310433)
113
+ 910 train 7.183582 (lr=1.4606e-05) (hash(x)=37426246)
114
+ 920 train 7.150632 (lr=1.4565e-05) (hash(x)=39966506)
115
+ 930 train 7.126366 (lr=1.4522e-05) (hash(x)=37747046)
116
+ 940 train 7.163310 (lr=1.4478e-05) (hash(x)=42710584)
117
+ 950 train 7.182676 (lr=1.4431e-05) (hash(x)=41617662)
118
+ 960 train 7.174756 (lr=1.4382e-05) (hash(x)=40145473)
119
+ 970 train 7.098168 (lr=1.4332e-05) (hash(x)=38513059)
120
+ 980 train 7.043861 (lr=1.4279e-05) (hash(x)=38574237)
121
+ 990 train 7.148112 (lr=1.4225e-05) (hash(x)=37633181)
122
+ 1000 val loss 7.1861
123
+ 1000 val perplexity 1320.9963
124
+ 1000 train 7.011847 (lr=1.4169e-05) (hash(x)=42781027)
125
+ 1010 train 7.141905 (lr=1.4110e-05) (hash(x)=48329642)
126
+ 1020 train 6.997019 (lr=1.4051e-05) (hash(x)=36577969)
127
+ 1030 train 7.045058 (lr=1.3989e-05) (hash(x)=39789757)
128
+ 1040 train 6.968916 (lr=1.3926e-05) (hash(x)=43293764)
129
+ 1050 train 7.247198 (lr=1.3860e-05) (hash(x)=40077223)
130
+ 1060 train 7.111925 (lr=1.3793e-05) (hash(x)=41035822)
131
+ 1070 train 7.198096 (lr=1.3725e-05) (hash(x)=42069615)
132
+ 1080 train 7.062248 (lr=1.3654e-05) (hash(x)=42668159)
133
+ 1090 train 7.042990 (lr=1.3582e-05) (hash(x)=36477372)
134
+ 1100 val loss 7.1245
135
+ 1100 val perplexity 1242.0374
136
+ 1100 train 7.101448 (lr=1.3509e-05) (hash(x)=41357005)
137
+ 1110 train 7.115828 (lr=1.3434e-05) (hash(x)=42852875)
138
+ 1120 train 7.008416 (lr=1.3357e-05) (hash(x)=35310013)
139
+ 1130 train 7.075465 (lr=1.3278e-05) (hash(x)=51231839)
140
+ 1140 train 6.985238 (lr=1.3198e-05) (hash(x)=41431568)
141
+ 1150 train 7.004578 (lr=1.3117e-05) (hash(x)=41848748)
142
+ 1160 train 6.896893 (lr=1.3033e-05) (hash(x)=36702617)
143
+ 1170 train 7.079871 (lr=1.2949e-05) (hash(x)=40801937)
144
+ 1180 train 7.095430 (lr=1.2863e-05) (hash(x)=41936951)
145
+ 1190 train 6.837819 (lr=1.2775e-05) (hash(x)=41913508)
146
+ 1200 val loss 7.0866
147
+ 1200 val perplexity 1195.8895
148
+ 1200 train 6.969180 (lr=1.2687e-05) (hash(x)=36842847)
149
+ 1210 train 6.975598 (lr=1.2596e-05) (hash(x)=30499991)
150
+ 1220 train 6.957082 (lr=1.2505e-05) (hash(x)=41929560)
151
+ 1230 train 7.102791 (lr=1.2412e-05) (hash(x)=47738317)
152
+ 1240 train 7.120340 (lr=1.2318e-05) (hash(x)=38256802)
153
+ 1250 train 7.078293 (lr=1.2222e-05) (hash(x)=28755106)
154
+ 1260 train 7.030218 (lr=1.2126e-05) (hash(x)=38336891)
155
+ 1270 train 7.085950 (lr=1.2028e-05) (hash(x)=36084046)
156
+ 1280 train 7.075439 (lr=1.1929e-05) (hash(x)=37921865)
157
+ 1290 train 6.831306 (lr=1.1829e-05) (hash(x)=38656933)
158
+ 1300 val loss 7.0392
159
+ 1300 val perplexity 1140.5215
160
+ 1300 train 6.900213 (lr=1.1727e-05) (hash(x)=39331872)
161
+ 1310 train 6.927700 (lr=1.1625e-05) (hash(x)=35713543)
162
+ 1320 train 6.914740 (lr=1.1522e-05) (hash(x)=42221951)
163
+ 1330 train 7.036623 (lr=1.1417e-05) (hash(x)=49371177)
164
+ 1340 train 6.992999 (lr=1.1312e-05) (hash(x)=39825492)
165
+ 1350 train 6.863598 (lr=1.1205e-05) (hash(x)=38122709)
166
+ 1360 train 6.947845 (lr=1.1098e-05) (hash(x)=46034654)
167
+ 1370 train 6.871446 (lr=1.0990e-05) (hash(x)=36717950)
168
+ 1380 train 6.875348 (lr=1.0881e-05) (hash(x)=40410179)
169
+ 1390 train 6.896519 (lr=1.0771e-05) (hash(x)=46535981)
170
+ 1400 val loss 7.0105
171
+ 1400 val perplexity 1108.2163
172
+ 1400 train 6.854383 (lr=1.0661e-05) (hash(x)=35556187)
173
+ 1410 train 6.907253 (lr=1.0549e-05) (hash(x)=47156509)
174
+ 1420 train 7.004330 (lr=1.0437e-05) (hash(x)=44281694)
175
+ 1430 train 7.111400 (lr=1.0325e-05) (hash(x)=38833525)
176
+ 1440 train 7.146225 (lr=1.0211e-05) (hash(x)=42715085)
177
+ 1450 train 7.009366 (lr=1.0097e-05) (hash(x)=42032181)
178
+ 1460 train 7.039539 (lr=9.9827e-06) (hash(x)=42012177)
179
+ 1470 train 6.960066 (lr=9.8676e-06) (hash(x)=41738021)
180
+ 1480 train 6.869416 (lr=9.7520e-06) (hash(x)=48868810)
181
+ 1490 train 7.356836 (lr=9.6360e-06) (hash(x)=50198350)
182
+ 1500 val loss 6.9743
183
+ 1500 val perplexity 1068.7821
184
+ 1500 train 6.961478 (lr=9.5195e-06) (hash(x)=44018572)
185
+ 1510 train 6.918033 (lr=9.4026e-06) (hash(x)=40114593)
186
+ 1520 train 6.839454 (lr=9.2854e-06) (hash(x)=41475808)
187
+ 1530 train 6.941401 (lr=9.1678e-06) (hash(x)=42156287)
188
+ 1540 train 6.972492 (lr=9.0500e-06) (hash(x)=41903473)
189
+ 1550 train 6.897877 (lr=8.9319e-06) (hash(x)=45895413)
190
+ 1560 train 6.969629 (lr=8.8136e-06) (hash(x)=56812849)
191
+ 1570 train 6.823680 (lr=8.6952e-06) (hash(x)=38509270)
192
+ 1580 train 6.828520 (lr=8.5766e-06) (hash(x)=39318554)
193
+ 1590 train 6.943930 (lr=8.4579e-06) (hash(x)=43078392)
194
+ 1600 val loss 6.9467
195
+ 1600 val perplexity 1039.7418
196
+ 1600 train 7.069693 (lr=8.3391e-06) (hash(x)=41122655)
197
+ 1610 train 6.998374 (lr=8.2203e-06) (hash(x)=36120625)
198
+ 1620 train 6.855536 (lr=8.1015e-06) (hash(x)=35765792)
199
+ 1630 train 7.095515 (lr=7.9828e-06) (hash(x)=47089042)
200
+ 1640 train 6.852379 (lr=7.8641e-06) (hash(x)=50711559)
201
+ 1650 train 6.869404 (lr=7.7456e-06) (hash(x)=45815369)
202
+ 1660 train 6.730164 (lr=7.6272e-06) (hash(x)=32816402)
203
+ 1670 train 6.833857 (lr=7.5090e-06) (hash(x)=37278001)
204
+ 1680 train 6.896299 (lr=7.3910e-06) (hash(x)=40915022)
205
+ 1690 train 6.734944 (lr=7.2733e-06) (hash(x)=39134730)
206
+ 1700 val loss 6.9317
207
+ 1700 val perplexity 1024.1831
208
+ 1700 train 6.818570 (lr=7.1560e-06) (hash(x)=43322472)
209
+ 1710 train 6.880149 (lr=7.0389e-06) (hash(x)=44220064)
210
+ 1720 train 6.938684 (lr=6.9222e-06) (hash(x)=38892807)
211
+ 1730 train 7.044489 (lr=6.8060e-06) (hash(x)=43115553)
212
+ 1740 train 6.879650 (lr=6.6901e-06) (hash(x)=39288672)
213
+ 1750 train 6.975536 (lr=6.5748e-06) (hash(x)=41124961)
214
+ 1760 train 6.849125 (lr=6.4600e-06) (hash(x)=36048907)
215
+ 1770 train 6.849308 (lr=6.3457e-06) (hash(x)=38027887)
216
+ 1780 train 6.819469 (lr=6.2320e-06) (hash(x)=38145978)
217
+ 1790 train 6.910053 (lr=6.1190e-06) (hash(x)=44351536)
218
+ 1800 val loss 6.9073
219
+ 1800 val perplexity 999.4962
220
+ 1800 train 6.904034 (lr=6.0066e-06) (hash(x)=39518627)
221
+ 1810 train 6.755105 (lr=5.8949e-06) (hash(x)=41542967)
222
+ 1820 train 6.800942 (lr=5.7839e-06) (hash(x)=38500785)
223
+ 1830 train 6.844421 (lr=5.6737e-06) (hash(x)=37888270)
224
+ 1840 train 6.719558 (lr=5.5643e-06) (hash(x)=30199535)
225
+ 1850 train 6.728424 (lr=5.4558e-06) (hash(x)=40458277)
226
+ 1860 train 6.704224 (lr=5.3481e-06) (hash(x)=42984408)
227
+ 1870 train 6.698699 (lr=5.2413e-06) (hash(x)=39240321)
228
+ 1880 train 6.707630 (lr=5.1354e-06) (hash(x)=37169036)
229
+ 1890 train 6.782664 (lr=5.0305e-06) (hash(x)=40156837)
230
+ 1900 val loss 6.8976
231
+ 1900 val perplexity 989.8618
232
+ 1900 train 6.889485 (lr=4.9266e-06) (hash(x)=36032188)
233
+ 1910 train 6.965537 (lr=4.8237e-06) (hash(x)=42795519)
234
+ 1920 train 6.892735 (lr=4.7219e-06) (hash(x)=35518925)
235
+ 1930 train 6.951318 (lr=4.6211e-06) (hash(x)=40438046)
236
+ 1940 train 6.762111 (lr=4.5215e-06) (hash(x)=38291653)
237
+ 1950 train 6.887587 (lr=4.4231e-06) (hash(x)=42821483)
238
+ 1960 train 6.850200 (lr=4.3258e-06) (hash(x)=37781932)
239
+ 1970 train 6.939731 (lr=4.2298e-06) (hash(x)=42543275)
240
+ 1980 train 6.837874 (lr=4.1350e-06) (hash(x)=37836462)
241
+ 1990 train 6.870891 (lr=4.0414e-06) (hash(x)=39708342)
242
+ 2000 val loss 6.8812
243
+ 2000 val perplexity 973.8149
244
+ 2000 train 6.770031 (lr=3.9492e-06) (hash(x)=38050986)
245
+ 2010 train 6.963208 (lr=3.8583e-06) (hash(x)=43963198)
246
+ 2020 train 6.689958 (lr=3.7688e-06) (hash(x)=35638171)
247
+ 2030 train 6.697921 (lr=3.6806e-06) (hash(x)=33906793)
248
+ 2040 train 6.709877 (lr=3.5939e-06) (hash(x)=35880906)
249
+ 2050 train 6.786647 (lr=3.5086e-06) (hash(x)=34453353)
250
+ 2060 train 6.726500 (lr=3.4248e-06) (hash(x)=42567743)
251
+ 2070 train 6.740697 (lr=3.3425e-06) (hash(x)=40686891)
252
+ 2080 train 6.589478 (lr=3.2617e-06) (hash(x)=33852944)
253
+ 2090 train 6.879174 (lr=3.1824e-06) (hash(x)=37270130)
254
+ 2100 val loss 6.8731
255
+ 2100 val perplexity 965.9142
256
+ 2100 train 6.931888 (lr=3.1047e-06) (hash(x)=41991671)
257
+ 2110 train 6.881395 (lr=3.0286e-06) (hash(x)=38825122)
258
+ 2120 train 6.913153 (lr=2.9542e-06) (hash(x)=44277711)
259
+ 2130 train 6.899714 (lr=2.8813e-06) (hash(x)=53809569)
260
+ 2140 train 6.776664 (lr=2.8102e-06) (hash(x)=36559771)
261
+ 2150 train 6.823159 (lr=2.7407e-06) (hash(x)=40161967)
262
+ 2160 train 6.801567 (lr=2.6729e-06) (hash(x)=33539846)
263
+ 2170 train 6.866282 (lr=2.6068e-06) (hash(x)=39682712)
264
+ 2180 train 6.914186 (lr=2.5425e-06) (hash(x)=35780230)
265
+ 2190 train 6.804833 (lr=2.4800e-06) (hash(x)=43980566)
266
+ 2200 val loss 6.8645
267
+ 2200 val perplexity 957.6674
268
+ 2200 train 6.776078 (lr=2.4192e-06) (hash(x)=34935313)
269
+ 2210 train 6.824680 (lr=2.3603e-06) (hash(x)=39724733)
270
+ 2220 train 7.287607 (lr=2.3032e-06) (hash(x)=43703966)
271
+ 2230 train 6.779405 (lr=2.2479e-06) (hash(x)=38399765)
272
+ 2240 train 6.694452 (lr=2.1945e-06) (hash(x)=40842546)
273
+ 2250 train 6.624052 (lr=2.1429e-06) (hash(x)=32055949)
274
+ 2260 train 6.625170 (lr=2.0933e-06) (hash(x)=35806944)
275
+ 2270 train 6.864573 (lr=2.0455e-06) (hash(x)=43553351)
276
+ 2280 train 6.897438 (lr=1.9997e-06) (hash(x)=33250224)
277
+ 2290 train 6.814785 (lr=1.9558e-06) (hash(x)=37657105)
278
+ 2300 val loss 6.8601
279
+ 2300 val perplexity 953.4459
280
+ 2300 train 6.655499 (lr=1.9139e-06) (hash(x)=40443248)
281
+ 2310 train 6.826280 (lr=1.8739e-06) (hash(x)=35689864)
282
+ 2320 train 7.021714 (lr=1.8359e-06) (hash(x)=44241023)
283
+ 2330 train 6.872594 (lr=1.7999e-06) (hash(x)=43375051)
284
+ 2340 train 6.915829 (lr=1.7659e-06) (hash(x)=41936227)
285
+ 2350 train 6.948425 (lr=1.7339e-06) (hash(x)=39892044)
286
+ 2360 train 6.752195 (lr=1.7039e-06) (hash(x)=42188350)
287
+ 2370 train 6.759420 (lr=1.6759e-06) (hash(x)=39758002)
288
+ 2380 train 6.729066 (lr=1.6500e-06) (hash(x)=34237373)
289
+ 2390 train 6.715007 (lr=1.6261e-06) (hash(x)=36323919)
290
+ 2400 val loss 6.8532
291
+ 2400 val perplexity 946.8890
292
+ 2400 train 6.707891 (lr=1.6043e-06) (hash(x)=34074032)
293
+ 2410 train 6.629775 (lr=1.5845e-06) (hash(x)=34145856)
294
+ 2420 train 6.720111 (lr=1.5668e-06) (hash(x)=37905974)
295
+ 2430 train 6.743615 (lr=1.5512e-06) (hash(x)=46570873)
296
+ 2440 train 6.764393 (lr=1.5376e-06) (hash(x)=37393321)
297
+ 2450 train 6.661545 (lr=1.5261e-06) (hash(x)=36905360)
298
+ 2460 train 6.765852 (lr=1.5167e-06) (hash(x)=40719368)
299
+ 2470 train 6.696073 (lr=1.5094e-06) (hash(x)=40499528)
300
+ 2480 train 6.810740 (lr=1.5042e-06) (hash(x)=41998640)
301
+ 2490 train 6.761917 (lr=1.5010e-06) (hash(x)=41290438)
302
+ 2499 val loss 6.8515
303
+ 2499 val perplexity 945.3420
4x_smaller_bs_half_lr_half_seq_len/model_02499.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3eb06466b0ff3a942193bc5258fda0a8e4dce50a9835ef93150cf826b42b2ed0
3
+ size 508831106
4x_smaller_bs_half_lr_half_seq_len/optimizer_02499.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5550341cd898e3dfd0a75907622f1270519bea0386c5f7efdab4f5a7b282dccf
3
+ size 992507142