andrew-healey commited on
Commit
7abd65e
·
verified ·
1 Parent(s): 0d61a2a

Upload folder using huggingface_hub

Browse files
logs/fix_compile_bug/disable_dynamo/args.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "logs/fix_compile_bug/disable_dynamo", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 12, "n_embd": 264, "head_dim": 22, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 4375, "warmup_steps": 250, "group": "fix_compile_bug", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1337, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "n_latent_masks", "selection_head_linear_combo_scale": 1.0, "disable_selection_head_linear_combo_bias": false, "assert_latent_matches_no_head": false, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 32, "total_batch_size": 131072, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": 1, "init_latent_masks_to_identity": true, "latent_mask_scale": null, "latent_mask_sigmoid": false, "S_layernorm": false, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.003, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "disable_dynamo"}
logs/fix_compile_bug/disable_dynamo/dataloader_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6031fd3e2855a036f7a5531cc24555aabd1115f9dd6618b8b2ca6f55279ef0b2
3
+ size 964
logs/fix_compile_bug/disable_dynamo/log2.txt ADDED
@@ -0,0 +1,529 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ max_steps: 4375
2
+ 0 val loss 11.2679
3
+ 0 val perplexity 78270.6641
4
+ 0 train 11.273981 (lr=4.1958e-06) (hash(x)=42405186)
5
+ 10 train 9.975639 (lr=4.6154e-05) (hash(x)=38970388)
6
+ 20 train 9.416135 (lr=8.8112e-05) (hash(x)=38844124)
7
+ 30 train 8.637611 (lr=1.3007e-04) (hash(x)=37597230)
8
+ 40 train 7.895756 (lr=1.7203e-04) (hash(x)=40798817)
9
+ 50 train 7.700012 (lr=2.1399e-04) (hash(x)=36405572)
10
+ 60 train 7.650867 (lr=2.5594e-04) (hash(x)=36066574)
11
+ 70 train 7.536690 (lr=2.9790e-04) (hash(x)=43811010)
12
+ 80 train 7.531866 (lr=3.3986e-04) (hash(x)=40628178)
13
+ 90 train 7.173455 (lr=3.8182e-04) (hash(x)=43681905)
14
+ 100 val loss 7.0869
15
+ 100 val perplexity 1196.2357
16
+ 100 train 6.970158 (lr=4.2378e-04) (hash(x)=42344017)
17
+ 110 train 7.016336 (lr=4.6573e-04) (hash(x)=39853582)
18
+ 120 train 6.827969 (lr=5.0769e-04) (hash(x)=39880304)
19
+ 130 train 6.595816 (lr=5.4965e-04) (hash(x)=36124250)
20
+ 140 train 6.529469 (lr=5.9161e-04) (hash(x)=45012802)
21
+ 150 train 6.381016 (lr=6.3357e-04) (hash(x)=36325989)
22
+ 160 train 6.290360 (lr=6.7552e-04) (hash(x)=40241802)
23
+ 170 train 6.293976 (lr=7.1748e-04) (hash(x)=42401819)
24
+ 180 train 6.203776 (lr=7.5944e-04) (hash(x)=40459165)
25
+ 190 train 6.184151 (lr=8.0140e-04) (hash(x)=41967324)
26
+ 200 val loss 6.1795
27
+ 200 val perplexity 482.7632
28
+ 200 train 6.227951 (lr=8.4336e-04) (hash(x)=45796175)
29
+ 210 train 6.128983 (lr=8.8531e-04) (hash(x)=35252224)
30
+ 220 train 6.038887 (lr=9.2727e-04) (hash(x)=40863261)
31
+ 230 train 5.878501 (lr=9.6923e-04) (hash(x)=40738876)
32
+ 240 train 5.875349 (lr=1.0112e-03) (hash(x)=46763138)
33
+ 250 train 5.924514 (lr=1.0531e-03) (hash(x)=46920901)
34
+ 260 train 5.706999 (lr=1.0951e-03) (hash(x)=43475159)
35
+ 270 train 5.721182 (lr=1.1371e-03) (hash(x)=47424242)
36
+ 280 train 5.694764 (lr=1.1790e-03) (hash(x)=40412635)
37
+ 290 train 5.621622 (lr=1.2210e-03) (hash(x)=31782544)
38
+ 300 val loss 5.6890
39
+ 300 val perplexity 295.5923
40
+ 300 train 5.579260 (lr=1.2629e-03) (hash(x)=34812634)
41
+ 310 train 5.493578 (lr=1.3049e-03) (hash(x)=44178651)
42
+ 320 train 5.656520 (lr=1.3469e-03) (hash(x)=43205331)
43
+ 330 train 5.600074 (lr=1.3888e-03) (hash(x)=41565398)
44
+ 340 train 5.618014 (lr=1.4308e-03) (hash(x)=41719738)
45
+ 350 train 5.468098 (lr=1.4727e-03) (hash(x)=46108058)
46
+ 360 train 5.431486 (lr=1.5147e-03) (hash(x)=37445628)
47
+ 370 train 5.313436 (lr=1.5566e-03) (hash(x)=36085988)
48
+ 380 train 5.382994 (lr=1.5986e-03) (hash(x)=37645228)
49
+ 390 train 5.382205 (lr=1.6406e-03) (hash(x)=35483844)
50
+ 400 val loss 5.3499
51
+ 400 val perplexity 210.5906
52
+ 400 train 5.355009 (lr=1.6825e-03) (hash(x)=41699751)
53
+ 410 train 5.197707 (lr=1.7245e-03) (hash(x)=32755762)
54
+ 420 train 5.192819 (lr=1.7664e-03) (hash(x)=35953145)
55
+ 430 train 5.181862 (lr=1.8084e-03) (hash(x)=35587716)
56
+ 440 train 5.127245 (lr=1.8503e-03) (hash(x)=39335888)
57
+ 450 train 5.163924 (lr=1.8923e-03) (hash(x)=44314559)
58
+ 460 train 5.238398 (lr=1.9343e-03) (hash(x)=42950490)
59
+ 470 train 5.305055 (lr=1.9762e-03) (hash(x)=41461061)
60
+ 480 train 5.228039 (lr=2.0182e-03) (hash(x)=41993931)
61
+ 490 train 5.143535 (lr=2.0601e-03) (hash(x)=37544536)
62
+ 500 val loss 5.1118
63
+ 500 val perplexity 165.9736
64
+ 500 train 5.105995 (lr=2.1021e-03) (hash(x)=40203267)
65
+ 510 train 5.111572 (lr=2.1441e-03) (hash(x)=41388334)
66
+ 520 train 5.063201 (lr=2.1860e-03) (hash(x)=34662125)
67
+ 530 train 5.142380 (lr=2.2280e-03) (hash(x)=37501995)
68
+ 540 train 5.057854 (lr=2.2699e-03) (hash(x)=41263260)
69
+ 550 train 4.970294 (lr=2.3119e-03) (hash(x)=49637102)
70
+ 560 train 4.888000 (lr=2.3538e-03) (hash(x)=47434359)
71
+ 570 train 4.858415 (lr=2.3958e-03) (hash(x)=42912781)
72
+ 580 train 4.932686 (lr=2.4378e-03) (hash(x)=48538862)
73
+ 590 train 4.779510 (lr=2.4797e-03) (hash(x)=38595222)
74
+ 600 val loss 4.9338
75
+ 600 val perplexity 138.9072
76
+ 600 train 4.868814 (lr=2.5217e-03) (hash(x)=37146098)
77
+ 610 train 4.907523 (lr=2.5636e-03) (hash(x)=50116590)
78
+ 620 train 4.870512 (lr=2.6056e-03) (hash(x)=41832213)
79
+ 630 train 4.979991 (lr=2.6476e-03) (hash(x)=39030069)
80
+ 640 train 4.832273 (lr=2.6895e-03) (hash(x)=46237222)
81
+ 650 train 4.845035 (lr=2.7315e-03) (hash(x)=42787835)
82
+ 660 train 4.755742 (lr=2.7734e-03) (hash(x)=39109857)
83
+ 670 train 4.849321 (lr=2.8154e-03) (hash(x)=39462485)
84
+ 680 train 4.739604 (lr=2.8573e-03) (hash(x)=41744849)
85
+ 690 train 4.757978 (lr=2.8993e-03) (hash(x)=38408104)
86
+ 700 val loss 4.7751
87
+ 700 val perplexity 118.5169
88
+ 700 train 4.661257 (lr=2.9413e-03) (hash(x)=41890521)
89
+ 710 train 4.725952 (lr=2.9832e-03) (hash(x)=41416330)
90
+ 720 train 4.627008 (lr=3.0000e-03) (hash(x)=38760874)
91
+ 730 train 4.717939 (lr=2.9999e-03) (hash(x)=37451366)
92
+ 740 train 4.613894 (lr=2.9997e-03) (hash(x)=36120996)
93
+ 750 train 4.714878 (lr=2.9994e-03) (hash(x)=34667979)
94
+ 760 train 4.708947 (lr=2.9990e-03) (hash(x)=50862178)
95
+ 770 train 4.653719 (lr=2.9985e-03) (hash(x)=41230008)
96
+ 780 train 4.555442 (lr=2.9979e-03) (hash(x)=36407773)
97
+ 790 train 4.564205 (lr=2.9972e-03) (hash(x)=42561722)
98
+ 800 val loss 4.6624
99
+ 800 val perplexity 105.8869
100
+ 800 train 4.539184 (lr=2.9964e-03) (hash(x)=48340441)
101
+ 810 train 4.608728 (lr=2.9955e-03) (hash(x)=36261826)
102
+ 820 train 4.426336 (lr=2.9945e-03) (hash(x)=37918881)
103
+ 830 train 4.543477 (lr=2.9934e-03) (hash(x)=38608334)
104
+ 840 train 4.282201 (lr=2.9922e-03) (hash(x)=39403505)
105
+ 850 train 4.398445 (lr=2.9909e-03) (hash(x)=43741441)
106
+ 860 train 4.351716 (lr=2.9896e-03) (hash(x)=38030739)
107
+ 870 train 4.652875 (lr=2.9881e-03) (hash(x)=36064710)
108
+ 880 train 4.812729 (lr=2.9865e-03) (hash(x)=40349968)
109
+ 890 train 4.567600 (lr=2.9848e-03) (hash(x)=38715806)
110
+ 900 val loss 4.5712
111
+ 900 val perplexity 96.6582
112
+ 900 train 4.633204 (lr=2.9830e-03) (hash(x)=35310433)
113
+ 910 train 4.521075 (lr=2.9811e-03) (hash(x)=37426246)
114
+ 920 train 4.507960 (lr=2.9792e-03) (hash(x)=39966506)
115
+ 930 train 4.523317 (lr=2.9771e-03) (hash(x)=37747046)
116
+ 940 train 4.478729 (lr=2.9749e-03) (hash(x)=42710584)
117
+ 950 train 4.501898 (lr=2.9726e-03) (hash(x)=41617662)
118
+ 960 train 4.497109 (lr=2.9703e-03) (hash(x)=40145473)
119
+ 970 train 4.485673 (lr=2.9678e-03) (hash(x)=38513059)
120
+ 980 train 4.486294 (lr=2.9652e-03) (hash(x)=38574237)
121
+ 990 train 4.401441 (lr=2.9626e-03) (hash(x)=37633181)
122
+ 1000 val loss 4.5003
123
+ 1000 val perplexity 90.0464
124
+ 1000 train 4.278323 (lr=2.9598e-03) (hash(x)=42781027)
125
+ 1010 train 4.420980 (lr=2.9570e-03) (hash(x)=48329642)
126
+ 1020 train 4.287330 (lr=2.9540e-03) (hash(x)=36577969)
127
+ 1030 train 4.257991 (lr=2.9510e-03) (hash(x)=39789757)
128
+ 1040 train 4.196259 (lr=2.9478e-03) (hash(x)=43293764)
129
+ 1050 train 4.534105 (lr=2.9446e-03) (hash(x)=40077223)
130
+ 1060 train 4.544160 (lr=2.9412e-03) (hash(x)=41035822)
131
+ 1070 train 4.456591 (lr=2.9378e-03) (hash(x)=42069615)
132
+ 1080 train 4.455237 (lr=2.9343e-03) (hash(x)=42668159)
133
+ 1090 train 4.429324 (lr=2.9307e-03) (hash(x)=36477372)
134
+ 1100 val loss 4.4191
135
+ 1100 val perplexity 83.0176
136
+ 1100 train 4.482910 (lr=2.9270e-03) (hash(x)=41357005)
137
+ 1110 train 4.439171 (lr=2.9231e-03) (hash(x)=42852875)
138
+ 1120 train 4.483402 (lr=2.9192e-03) (hash(x)=35310013)
139
+ 1130 train 4.478104 (lr=2.9152e-03) (hash(x)=51231839)
140
+ 1140 train 4.419302 (lr=2.9112e-03) (hash(x)=41431568)
141
+ 1150 train 4.400342 (lr=2.9070e-03) (hash(x)=41848748)
142
+ 1160 train 4.334244 (lr=2.9027e-03) (hash(x)=36702617)
143
+ 1170 train 4.339780 (lr=2.8983e-03) (hash(x)=40801937)
144
+ 1180 train 4.298719 (lr=2.8939e-03) (hash(x)=41936951)
145
+ 1190 train 4.188620 (lr=2.8893e-03) (hash(x)=41913508)
146
+ 1200 val loss 4.4159
147
+ 1200 val perplexity 82.7526
148
+ 1200 train 4.202028 (lr=2.8847e-03) (hash(x)=36842847)
149
+ 1210 train 4.298784 (lr=2.8800e-03) (hash(x)=30499991)
150
+ 1220 train 4.340014 (lr=2.8751e-03) (hash(x)=41929560)
151
+ 1230 train 4.485812 (lr=2.8702e-03) (hash(x)=47738317)
152
+ 1240 train 4.494402 (lr=2.8652e-03) (hash(x)=38256802)
153
+ 1250 train 4.481100 (lr=2.8601e-03) (hash(x)=28755106)
154
+ 1260 train 4.425727 (lr=2.8550e-03) (hash(x)=38336891)
155
+ 1270 train 4.378193 (lr=2.8497e-03) (hash(x)=36084046)
156
+ 1280 train 4.334064 (lr=2.8443e-03) (hash(x)=37921865)
157
+ 1290 train 4.417131 (lr=2.8389e-03) (hash(x)=38656933)
158
+ 1300 val loss 4.3660
159
+ 1300 val perplexity 78.7303
160
+ 1300 train 4.394099 (lr=2.8333e-03) (hash(x)=39331872)
161
+ 1310 train 4.353565 (lr=2.8277e-03) (hash(x)=35713543)
162
+ 1320 train 4.327202 (lr=2.8220e-03) (hash(x)=42221951)
163
+ 1330 train 4.306753 (lr=2.8162e-03) (hash(x)=49371177)
164
+ 1340 train 4.414113 (lr=2.8103e-03) (hash(x)=39825492)
165
+ 1350 train 4.340030 (lr=2.8044e-03) (hash(x)=38122709)
166
+ 1360 train 4.348192 (lr=2.7983e-03) (hash(x)=46034654)
167
+ 1370 train 4.082481 (lr=2.7922e-03) (hash(x)=36717950)
168
+ 1380 train 4.120973 (lr=2.7860e-03) (hash(x)=40410179)
169
+ 1390 train 4.107861 (lr=2.7797e-03) (hash(x)=46535981)
170
+ 1400 val loss 4.3467
171
+ 1400 val perplexity 77.2207
172
+ 1400 train 4.160233 (lr=2.7733e-03) (hash(x)=35556187)
173
+ 1410 train 4.265544 (lr=2.7668e-03) (hash(x)=47156509)
174
+ 1420 train 4.412834 (lr=2.7603e-03) (hash(x)=44281694)
175
+ 1430 train 4.396552 (lr=2.7536e-03) (hash(x)=38833525)
176
+ 1440 train 4.453507 (lr=2.7469e-03) (hash(x)=42715085)
177
+ 1450 train 4.492407 (lr=2.7401e-03) (hash(x)=42032181)
178
+ 1460 train 4.307829 (lr=2.7333e-03) (hash(x)=42012177)
179
+ 1470 train 4.321888 (lr=2.7263e-03) (hash(x)=41738021)
180
+ 1480 train 4.383871 (lr=2.7193e-03) (hash(x)=48868810)
181
+ 1490 train 4.347469 (lr=2.7121e-03) (hash(x)=50198350)
182
+ 1500 val loss 4.2934
183
+ 1500 val perplexity 73.2185
184
+ 1500 train 4.362706 (lr=2.7050e-03) (hash(x)=44018572)
185
+ 1510 train 4.266201 (lr=2.6977e-03) (hash(x)=40114593)
186
+ 1520 train 4.216835 (lr=2.6903e-03) (hash(x)=41475808)
187
+ 1530 train 4.290930 (lr=2.6829e-03) (hash(x)=44249710)
188
+ 1540 train 4.374002 (lr=2.6754e-03) (hash(x)=38440727)
189
+ 1550 train 4.366827 (lr=2.6678e-03) (hash(x)=38021994)
190
+ 1560 train 4.319133 (lr=2.6602e-03) (hash(x)=39234436)
191
+ 1570 train 4.217455 (lr=2.6525e-03) (hash(x)=45044408)
192
+ 1580 train 4.266973 (lr=2.6447e-03) (hash(x)=38890254)
193
+ 1590 train 4.232738 (lr=2.6368e-03) (hash(x)=39091171)
194
+ 1600 val loss 4.2598
195
+ 1600 val perplexity 70.7929
196
+ 1600 train 4.336029 (lr=2.6289e-03) (hash(x)=36528068)
197
+ 1610 train 4.221199 (lr=2.6208e-03) (hash(x)=41836863)
198
+ 1620 train 4.288796 (lr=2.6128e-03) (hash(x)=41871021)
199
+ 1630 train 4.370456 (lr=2.6046e-03) (hash(x)=41234817)
200
+ 1640 train 4.267187 (lr=2.5964e-03) (hash(x)=37912972)
201
+ 1650 train 4.193266 (lr=2.5881e-03) (hash(x)=43040972)
202
+ 1660 train 4.095904 (lr=2.5797e-03) (hash(x)=26922739)
203
+ 1670 train 4.166198 (lr=2.5713e-03) (hash(x)=39907717)
204
+ 1680 train 4.118997 (lr=2.5628e-03) (hash(x)=45004538)
205
+ 1690 train 4.105637 (lr=2.5542e-03) (hash(x)=39394756)
206
+ 1700 val loss 4.2452
207
+ 1700 val perplexity 69.7687
208
+ 1700 train 4.141019 (lr=2.5455e-03) (hash(x)=38805510)
209
+ 1710 train 4.054042 (lr=2.5368e-03) (hash(x)=34950642)
210
+ 1720 train 4.389088 (lr=2.5281e-03) (hash(x)=35167195)
211
+ 1730 train 4.385214 (lr=2.5192e-03) (hash(x)=46592240)
212
+ 1740 train 4.172699 (lr=2.5103e-03) (hash(x)=43879336)
213
+ 1750 train 4.285302 (lr=2.5014e-03) (hash(x)=42298161)
214
+ 1760 train 4.289519 (lr=2.4924e-03) (hash(x)=37358957)
215
+ 1770 train 4.234423 (lr=2.4833e-03) (hash(x)=41641994)
216
+ 1780 train 4.223020 (lr=2.4741e-03) (hash(x)=44388117)
217
+ 1790 train 4.242696 (lr=2.4649e-03) (hash(x)=45220046)
218
+ 1800 val loss 4.2041
219
+ 1800 val perplexity 66.9613
220
+ 1800 train 4.268264 (lr=2.4556e-03) (hash(x)=43748263)
221
+ 1810 train 4.077793 (lr=2.4463e-03) (hash(x)=26733445)
222
+ 1820 train 4.214490 (lr=2.4369e-03) (hash(x)=38491163)
223
+ 1830 train 4.215728 (lr=2.4275e-03) (hash(x)=35359413)
224
+ 1840 train 4.122804 (lr=2.4180e-03) (hash(x)=40482942)
225
+ 1850 train 4.013588 (lr=2.4084e-03) (hash(x)=35252918)
226
+ 1860 train 4.095462 (lr=2.3988e-03) (hash(x)=38221117)
227
+ 1870 train 4.142855 (lr=2.3891e-03) (hash(x)=41438728)
228
+ 1880 train 3.967093 (lr=2.3794e-03) (hash(x)=39801057)
229
+ 1890 train 4.166050 (lr=2.3696e-03) (hash(x)=35517965)
230
+ 1900 val loss 4.2127
231
+ 1900 val perplexity 67.5387
232
+ 1900 train 4.246020 (lr=2.3598e-03) (hash(x)=43795495)
233
+ 1910 train 4.236924 (lr=2.3499e-03) (hash(x)=47235154)
234
+ 1920 train 4.269849 (lr=2.3400e-03) (hash(x)=42139138)
235
+ 1930 train 4.215496 (lr=2.3300e-03) (hash(x)=38910301)
236
+ 1940 train 4.086425 (lr=2.3200e-03) (hash(x)=41819422)
237
+ 1950 train 4.211326 (lr=2.3099e-03) (hash(x)=44434112)
238
+ 1960 train 4.230421 (lr=2.2998e-03) (hash(x)=47468800)
239
+ 1970 train 4.282046 (lr=2.2896e-03) (hash(x)=40489494)
240
+ 1980 train 4.267266 (lr=2.2793e-03) (hash(x)=41660256)
241
+ 1990 train 4.077564 (lr=2.2691e-03) (hash(x)=36821154)
242
+ 2000 val loss 4.1867
243
+ 2000 val perplexity 65.8030
244
+ 2000 train 4.165920 (lr=2.2588e-03) (hash(x)=46230173)
245
+ 2010 train 4.140057 (lr=2.2484e-03) (hash(x)=38867399)
246
+ 2020 train 4.147109 (lr=2.2380e-03) (hash(x)=39221022)
247
+ 2030 train 4.118986 (lr=2.2275e-03) (hash(x)=40395859)
248
+ 2040 train 4.099389 (lr=2.2170e-03) (hash(x)=41260539)
249
+ 2050 train 4.094749 (lr=2.2065e-03) (hash(x)=40471948)
250
+ 2060 train 4.043067 (lr=2.1959e-03) (hash(x)=34614958)
251
+ 2070 train 4.071798 (lr=2.1853e-03) (hash(x)=40426097)
252
+ 2080 train 3.898314 (lr=2.1746e-03) (hash(x)=37618014)
253
+ 2090 train 4.204595 (lr=2.1639e-03) (hash(x)=43655438)
254
+ 2100 val loss 4.1433
255
+ 2100 val perplexity 63.0112
256
+ 2100 train 4.187691 (lr=2.1532e-03) (hash(x)=39728341)
257
+ 2110 train 4.159398 (lr=2.1424e-03) (hash(x)=40996707)
258
+ 2120 train 4.298484 (lr=2.1316e-03) (hash(x)=37072048)
259
+ 2130 train 4.114953 (lr=2.1208e-03) (hash(x)=37668129)
260
+ 2140 train 4.156023 (lr=2.1099e-03) (hash(x)=42862485)
261
+ 2150 train 4.322575 (lr=2.0990e-03) (hash(x)=40933735)
262
+ 2160 train 4.225547 (lr=2.0881e-03) (hash(x)=42952511)
263
+ 2170 train 4.098534 (lr=2.0771e-03) (hash(x)=38227732)
264
+ 2180 train 4.145338 (lr=2.0661e-03) (hash(x)=42919169)
265
+ 2190 train 4.131529 (lr=2.0550e-03) (hash(x)=38635637)
266
+ 2200 val loss 4.1393
267
+ 2200 val perplexity 62.7589
268
+ 2200 train 4.108107 (lr=2.0440e-03) (hash(x)=43974656)
269
+ 2210 train 4.095776 (lr=2.0329e-03) (hash(x)=35923510)
270
+ 2220 train 4.249701 (lr=2.0217e-03) (hash(x)=38698936)
271
+ 2230 train 3.989796 (lr=2.0106e-03) (hash(x)=41822854)
272
+ 2240 train 3.969564 (lr=1.9994e-03) (hash(x)=39069402)
273
+ 2250 train 4.022895 (lr=1.9882e-03) (hash(x)=38428746)
274
+ 2260 train 3.890714 (lr=1.9770e-03) (hash(x)=41190125)
275
+ 2270 train 4.249081 (lr=1.9657e-03) (hash(x)=39197540)
276
+ 2280 train 4.193214 (lr=1.9544e-03) (hash(x)=41964710)
277
+ 2290 train 4.057076 (lr=1.9431e-03) (hash(x)=35566499)
278
+ 2300 val loss 4.1265
279
+ 2300 val perplexity 61.9609
280
+ 2300 train 4.060265 (lr=1.9318e-03) (hash(x)=40710061)
281
+ 2310 train 4.024505 (lr=1.9205e-03) (hash(x)=34084675)
282
+ 2320 train 4.308280 (lr=1.9091e-03) (hash(x)=43192252)
283
+ 2330 train 4.207595 (lr=1.8977e-03) (hash(x)=39365674)
284
+ 2340 train 4.109126 (lr=1.8863e-03) (hash(x)=41693707)
285
+ 2350 train 4.162141 (lr=1.8749e-03) (hash(x)=41246790)
286
+ 2360 train 4.092557 (lr=1.8635e-03) (hash(x)=44379115)
287
+ 2370 train 4.051098 (lr=1.8520e-03) (hash(x)=37114071)
288
+ 2380 train 4.053510 (lr=1.8406e-03) (hash(x)=40531187)
289
+ 2390 train 4.159568 (lr=1.8291e-03) (hash(x)=35190591)
290
+ 2400 val loss 4.0920
291
+ 2400 val perplexity 59.8585
292
+ 2400 train 4.174000 (lr=1.8176e-03) (hash(x)=46483290)
293
+ 2410 train 4.015300 (lr=1.8061e-03) (hash(x)=36994364)
294
+ 2420 train 4.069708 (lr=1.7946e-03) (hash(x)=43774123)
295
+ 2430 train 4.122134 (lr=1.7830e-03) (hash(x)=39016150)
296
+ 2440 train 4.082635 (lr=1.7715e-03) (hash(x)=41424235)
297
+ 2450 train 3.906054 (lr=1.7600e-03) (hash(x)=40781424)
298
+ 2460 train 3.953524 (lr=1.7484e-03) (hash(x)=36871418)
299
+ 2470 train 3.926154 (lr=1.7368e-03) (hash(x)=40398678)
300
+ 2480 train 3.977890 (lr=1.7253e-03) (hash(x)=40019705)
301
+ 2490 train 4.025047 (lr=1.7137e-03) (hash(x)=43210658)
302
+ 2500 val loss 4.0838
303
+ 2500 val perplexity 59.3734
304
+ 2500 train 4.114532 (lr=1.7021e-03) (hash(x)=40992954)
305
+ 2510 train 4.154217 (lr=1.6906e-03) (hash(x)=40420845)
306
+ 2520 train 4.049014 (lr=1.6790e-03) (hash(x)=39278025)
307
+ 2530 train 4.033954 (lr=1.6674e-03) (hash(x)=40118798)
308
+ 2540 train 4.256076 (lr=1.6558e-03) (hash(x)=36453463)
309
+ 2550 train 4.053461 (lr=1.6442e-03) (hash(x)=38336991)
310
+ 2560 train 4.155300 (lr=1.6326e-03) (hash(x)=40307518)
311
+ 2570 train 3.963542 (lr=1.6210e-03) (hash(x)=31378688)
312
+ 2580 train 4.042953 (lr=1.6094e-03) (hash(x)=38888092)
313
+ 2590 train 4.095569 (lr=1.5979e-03) (hash(x)=43391661)
314
+ 2600 val loss 4.0566
315
+ 2600 val perplexity 57.7797
316
+ 2600 train 3.990071 (lr=1.5863e-03) (hash(x)=43784447)
317
+ 2610 train 4.043756 (lr=1.5747e-03) (hash(x)=44952165)
318
+ 2620 train 3.943068 (lr=1.5632e-03) (hash(x)=37357711)
319
+ 2630 train 4.015451 (lr=1.5516e-03) (hash(x)=41317604)
320
+ 2640 train 3.811038 (lr=1.5400e-03) (hash(x)=43473476)
321
+ 2650 train 3.964092 (lr=1.5285e-03) (hash(x)=36968500)
322
+ 2660 train 3.978471 (lr=1.5170e-03) (hash(x)=37853410)
323
+ 2670 train 3.840962 (lr=1.5054e-03) (hash(x)=55116905)
324
+ 2680 train 4.079848 (lr=1.4939e-03) (hash(x)=43789962)
325
+ 2690 train 3.959749 (lr=1.4824e-03) (hash(x)=47653410)
326
+ 2700 val loss 4.0468
327
+ 2700 val perplexity 57.2122
328
+ 2700 train 4.103662 (lr=1.4709e-03) (hash(x)=42034666)
329
+ 2710 train 4.224947 (lr=1.4594e-03) (hash(x)=34261377)
330
+ 2720 train 4.061224 (lr=1.4480e-03) (hash(x)=43776091)
331
+ 2730 train 4.094453 (lr=1.4365e-03) (hash(x)=38652143)
332
+ 2740 train 4.144126 (lr=1.4251e-03) (hash(x)=37399153)
333
+ 2750 train 4.080681 (lr=1.4137e-03) (hash(x)=41956544)
334
+ 2760 train 4.349882 (lr=1.4023e-03) (hash(x)=37661382)
335
+ 2770 train 3.994900 (lr=1.3909e-03) (hash(x)=45791682)
336
+ 2780 train 3.963238 (lr=1.3795e-03) (hash(x)=40218424)
337
+ 2790 train 3.982559 (lr=1.3682e-03) (hash(x)=38121125)
338
+ 2800 val loss 4.0294
339
+ 2800 val perplexity 56.2294
340
+ 2800 train 4.045098 (lr=1.3569e-03) (hash(x)=38892664)
341
+ 2810 train 4.023943 (lr=1.3456e-03) (hash(x)=41180129)
342
+ 2820 train 3.790727 (lr=1.3343e-03) (hash(x)=41017023)
343
+ 2830 train 3.893616 (lr=1.3230e-03) (hash(x)=42197711)
344
+ 2840 train 3.922186 (lr=1.3118e-03) (hash(x)=37993621)
345
+ 2850 train 3.830526 (lr=1.3006e-03) (hash(x)=34469615)
346
+ 2860 train 4.014174 (lr=1.2894e-03) (hash(x)=42013555)
347
+ 2870 train 4.069732 (lr=1.2783e-03) (hash(x)=37463962)
348
+ 2880 train 4.032180 (lr=1.2671e-03) (hash(x)=36874351)
349
+ 2890 train 3.996437 (lr=1.2560e-03) (hash(x)=43649626)
350
+ 2900 val loss 4.0124
351
+ 2900 val perplexity 55.2768
352
+ 2900 train 4.124608 (lr=1.2450e-03) (hash(x)=42022821)
353
+ 2910 train 3.986201 (lr=1.2339e-03) (hash(x)=36933935)
354
+ 2920 train 3.983370 (lr=1.2229e-03) (hash(x)=41313698)
355
+ 2930 train 4.023007 (lr=1.2119e-03) (hash(x)=36341574)
356
+ 2940 train 4.002404 (lr=1.2010e-03) (hash(x)=35505945)
357
+ 2950 train 4.040689 (lr=1.1901e-03) (hash(x)=47407907)
358
+ 2960 train 3.961760 (lr=1.1792e-03) (hash(x)=39217310)
359
+ 2970 train 3.973819 (lr=1.1684e-03) (hash(x)=35391019)
360
+ 2980 train 3.892808 (lr=1.1576e-03) (hash(x)=37929528)
361
+ 2990 train 4.500646 (lr=1.1468e-03) (hash(x)=42604487)
362
+ 3000 val loss 4.0054
363
+ 3000 val perplexity 54.8927
364
+ 3000 train 3.934869 (lr=1.1361e-03) (hash(x)=36439520)
365
+ 3010 train 3.787461 (lr=1.1254e-03) (hash(x)=36110173)
366
+ 3020 train 3.802625 (lr=1.1147e-03) (hash(x)=41712329)
367
+ 3030 train 3.856707 (lr=1.1041e-03) (hash(x)=39541717)
368
+ 3040 train 3.890872 (lr=1.0935e-03) (hash(x)=38203029)
369
+ 3050 train 4.218014 (lr=1.0830e-03) (hash(x)=45668569)
370
+ 3060 train 3.906355 (lr=1.0725e-03) (hash(x)=42053235)
371
+ 3070 train 3.894423 (lr=1.0620e-03) (hash(x)=38772023)
372
+ 3080 train 3.918390 (lr=1.0516e-03) (hash(x)=40842122)
373
+ 3090 train 3.866436 (lr=1.0412e-03) (hash(x)=43911820)
374
+ 3100 val loss 3.9956
375
+ 3100 val perplexity 54.3582
376
+ 3100 train 3.910733 (lr=1.0309e-03) (hash(x)=42366363)
377
+ 3110 train 3.900075 (lr=1.0207e-03) (hash(x)=42499370)
378
+ 3120 train 3.908826 (lr=1.0104e-03) (hash(x)=37964948)
379
+ 3130 train 4.023925 (lr=1.0002e-03) (hash(x)=37801102)
380
+ 3140 train 4.027692 (lr=9.9011e-04) (hash(x)=39187569)
381
+ 3150 train 4.049682 (lr=9.8002e-04) (hash(x)=37104415)
382
+ 3160 train 4.073526 (lr=9.6999e-04) (hash(x)=40414652)
383
+ 3170 train 3.764113 (lr=9.6000e-04) (hash(x)=39971875)
384
+ 3180 train 3.896595 (lr=9.5007e-04) (hash(x)=36823751)
385
+ 3190 train 3.935954 (lr=9.4019e-04) (hash(x)=41232481)
386
+ 3200 val loss 3.9770
387
+ 3200 val perplexity 53.3565
388
+ 3200 train 3.906213 (lr=9.3036e-04) (hash(x)=42263706)
389
+ 3210 train 3.932978 (lr=9.2058e-04) (hash(x)=42682932)
390
+ 3220 train 3.902016 (lr=9.1085e-04) (hash(x)=38173495)
391
+ 3230 train 3.855196 (lr=9.0118e-04) (hash(x)=42665215)
392
+ 3240 train 3.819067 (lr=8.9157e-04) (hash(x)=37430175)
393
+ 3250 train 3.878448 (lr=8.8201e-04) (hash(x)=39501818)
394
+ 3260 train 3.954237 (lr=8.7251e-04) (hash(x)=39316351)
395
+ 3270 train 4.091113 (lr=8.6307e-04) (hash(x)=33934131)
396
+ 3280 train 3.956439 (lr=8.5368e-04) (hash(x)=33684008)
397
+ 3290 train 3.958663 (lr=8.4435e-04) (hash(x)=41195688)
398
+ 3300 val loss 3.9790
399
+ 3300 val perplexity 53.4625
400
+ 3300 train 3.918606 (lr=8.3508e-04) (hash(x)=38146885)
401
+ 3310 train 3.865875 (lr=8.2588e-04) (hash(x)=41531159)
402
+ 3320 train 3.973217 (lr=8.1673e-04) (hash(x)=37465393)
403
+ 3330 train 4.001208 (lr=8.0764e-04) (hash(x)=41276314)
404
+ 3340 train 3.950613 (lr=7.9862e-04) (hash(x)=40035419)
405
+ 3350 train 3.964578 (lr=7.8966e-04) (hash(x)=38589416)
406
+ 3360 train 3.819809 (lr=7.8076e-04) (hash(x)=44616716)
407
+ 3370 train 3.939864 (lr=7.7192e-04) (hash(x)=40753545)
408
+ 3380 train 3.773901 (lr=7.6315e-04) (hash(x)=36064706)
409
+ 3390 train 3.741223 (lr=7.5445e-04) (hash(x)=35628005)
410
+ 3400 val loss 3.9593
411
+ 3400 val perplexity 52.4231
412
+ 3400 train 3.861558 (lr=7.4581e-04) (hash(x)=37242512)
413
+ 3410 train 3.916421 (lr=7.3724e-04) (hash(x)=49988219)
414
+ 3420 train 3.903453 (lr=7.2874e-04) (hash(x)=39309246)
415
+ 3430 train 4.015235 (lr=7.2030e-04) (hash(x)=41337256)
416
+ 3440 train 4.082232 (lr=7.1193e-04) (hash(x)=38422583)
417
+ 3450 train 3.869714 (lr=7.0363e-04) (hash(x)=42782162)
418
+ 3460 train 4.226733 (lr=6.9541e-04) (hash(x)=36355160)
419
+ 3470 train 3.936795 (lr=6.8725e-04) (hash(x)=43452584)
420
+ 3480 train 4.010543 (lr=6.7916e-04) (hash(x)=36009375)
421
+ 3490 train 3.879845 (lr=6.7114e-04) (hash(x)=44201141)
422
+ 3500 val loss 3.9419
423
+ 3500 val perplexity 51.5168
424
+ 3500 train 3.850629 (lr=6.6320e-04) (hash(x)=32974340)
425
+ 3510 train 3.838544 (lr=6.5533e-04) (hash(x)=38454893)
426
+ 3520 train 3.811043 (lr=6.4753e-04) (hash(x)=39406776)
427
+ 3530 train 3.861398 (lr=6.3981e-04) (hash(x)=40863480)
428
+ 3540 train 3.857097 (lr=6.3216e-04) (hash(x)=39891139)
429
+ 3550 train 4.000243 (lr=6.2458e-04) (hash(x)=44049608)
430
+ 3560 train 3.900741 (lr=6.1708e-04) (hash(x)=38049508)
431
+ 3570 train 4.006496 (lr=6.0966e-04) (hash(x)=39546422)
432
+ 3580 train 3.902916 (lr=6.0231e-04) (hash(x)=48257091)
433
+ 3590 train 3.835331 (lr=5.9504e-04) (hash(x)=36249953)
434
+ 3600 val loss 3.9306
435
+ 3600 val perplexity 50.9374
436
+ 3600 train 3.898257 (lr=5.8785e-04) (hash(x)=39324021)
437
+ 3610 train 3.876760 (lr=5.8074e-04) (hash(x)=35944408)
438
+ 3620 train 3.866680 (lr=5.7370e-04) (hash(x)=37992132)
439
+ 3630 train 3.894145 (lr=5.6675e-04) (hash(x)=37723468)
440
+ 3640 train 3.905966 (lr=5.5987e-04) (hash(x)=38300750)
441
+ 3650 train 3.926928 (lr=5.5308e-04) (hash(x)=41511149)
442
+ 3660 train 3.817177 (lr=5.4636e-04) (hash(x)=46384740)
443
+ 3670 train 3.900837 (lr=5.3973e-04) (hash(x)=40100004)
444
+ 3680 train 3.790198 (lr=5.3318e-04) (hash(x)=37889842)
445
+ 3690 train 3.908143 (lr=5.2671e-04) (hash(x)=40976637)
446
+ 3700 val loss 3.9256
447
+ 3700 val perplexity 50.6859
448
+ 3700 train 3.958560 (lr=5.2033e-04) (hash(x)=40138225)
449
+ 3710 train 3.944105 (lr=5.1402e-04) (hash(x)=38068714)
450
+ 3720 train 3.924678 (lr=5.0780e-04) (hash(x)=40819257)
451
+ 3730 train 3.876761 (lr=5.0167e-04) (hash(x)=29564095)
452
+ 3740 train 3.857092 (lr=4.9562e-04) (hash(x)=36509087)
453
+ 3750 train 4.013295 (lr=4.8965e-04) (hash(x)=41226118)
454
+ 3760 train 3.927763 (lr=4.8377e-04) (hash(x)=38317131)
455
+ 3770 train 3.924551 (lr=4.7798e-04) (hash(x)=41244909)
456
+ 3780 train 3.788678 (lr=4.7227e-04) (hash(x)=41412670)
457
+ 3790 train 3.755711 (lr=4.6665e-04) (hash(x)=34906259)
458
+ 3800 val loss 3.9183
459
+ 3800 val perplexity 50.3169
460
+ 3800 train 4.057327 (lr=4.6112e-04) (hash(x)=40968992)
461
+ 3810 train 3.773186 (lr=4.5567e-04) (hash(x)=35241519)
462
+ 3820 train 3.899414 (lr=4.5031e-04) (hash(x)=42755713)
463
+ 3830 train 3.854153 (lr=4.4504e-04) (hash(x)=47166492)
464
+ 3840 train 4.020204 (lr=4.3986e-04) (hash(x)=57200827)
465
+ 3850 train 3.883969 (lr=4.3477e-04) (hash(x)=44734592)
466
+ 3860 train 3.788623 (lr=4.2977e-04) (hash(x)=32738162)
467
+ 3870 train 3.967677 (lr=4.2486e-04) (hash(x)=39064476)
468
+ 3880 train 3.809404 (lr=4.2004e-04) (hash(x)=39921346)
469
+ 3890 train 3.933308 (lr=4.1530e-04) (hash(x)=35960386)
470
+ 3900 val loss 3.9108
471
+ 3900 val perplexity 49.9394
472
+ 3900 train 3.965215 (lr=4.1066e-04) (hash(x)=40314553)
473
+ 3910 train 3.913331 (lr=4.0611e-04) (hash(x)=43961115)
474
+ 3920 train 3.981851 (lr=4.0166e-04) (hash(x)=43423054)
475
+ 3930 train 4.169628 (lr=3.9729e-04) (hash(x)=34054777)
476
+ 3940 train 3.934535 (lr=3.9302e-04) (hash(x)=39706545)
477
+ 3950 train 3.908283 (lr=3.8884e-04) (hash(x)=37107026)
478
+ 3960 train 3.855681 (lr=3.8475e-04) (hash(x)=37669670)
479
+ 3970 train 3.931319 (lr=3.8076e-04) (hash(x)=32520030)
480
+ 3980 train 3.787637 (lr=3.7685e-04) (hash(x)=43036200)
481
+ 3990 train 3.833330 (lr=3.7305e-04) (hash(x)=39177193)
482
+ 4000 val loss 3.9016
483
+ 4000 val perplexity 49.4821
484
+ 4000 train 3.819140 (lr=3.6933e-04) (hash(x)=42698385)
485
+ 4010 train 3.772872 (lr=3.6572e-04) (hash(x)=36812123)
486
+ 4020 train 3.852390 (lr=3.6219e-04) (hash(x)=40477406)
487
+ 4030 train 3.887582 (lr=3.5876e-04) (hash(x)=43794844)
488
+ 4040 train 3.963847 (lr=3.5543e-04) (hash(x)=39871633)
489
+ 4050 train 3.905866 (lr=3.5219e-04) (hash(x)=44942955)
490
+ 4060 train 3.898297 (lr=3.4905e-04) (hash(x)=37704728)
491
+ 4070 train 4.381034 (lr=3.4600e-04) (hash(x)=44586874)
492
+ 4080 train 3.934383 (lr=3.4305e-04) (hash(x)=40698169)
493
+ 4090 train 4.026707 (lr=3.4019e-04) (hash(x)=56230743)
494
+ 4100 val loss 3.8920
495
+ 4100 val perplexity 49.0090
496
+ 4100 train 3.952113 (lr=3.3744e-04) (hash(x)=39745125)
497
+ 4110 train 3.894502 (lr=3.3477e-04) (hash(x)=38038631)
498
+ 4120 train 3.915252 (lr=3.3221e-04) (hash(x)=41225060)
499
+ 4130 train 3.943556 (lr=3.2974e-04) (hash(x)=43984411)
500
+ 4140 train 3.808182 (lr=3.2737e-04) (hash(x)=35070209)
501
+ 4150 train 3.822503 (lr=3.2510e-04) (hash(x)=44830085)
502
+ 4160 train 3.766563 (lr=3.2292e-04) (hash(x)=34909340)
503
+ 4170 train 3.967158 (lr=3.2085e-04) (hash(x)=39799332)
504
+ 4180 train 3.938090 (lr=3.1887e-04) (hash(x)=34673396)
505
+ 4190 train 3.966611 (lr=3.1699e-04) (hash(x)=43013076)
506
+ 4200 val loss 3.8850
507
+ 4200 val perplexity 48.6648
508
+ 4200 train 3.989354 (lr=3.1520e-04) (hash(x)=44540286)
509
+ 4210 train 3.944010 (lr=3.1352e-04) (hash(x)=39338462)
510
+ 4220 train 3.841686 (lr=3.1193e-04) (hash(x)=40697563)
511
+ 4230 train 3.953051 (lr=3.1044e-04) (hash(x)=36447380)
512
+ 4240 train 3.916899 (lr=3.0905e-04) (hash(x)=31542722)
513
+ 4250 train 3.818587 (lr=3.0776e-04) (hash(x)=38193385)
514
+ 4260 train 3.826108 (lr=3.0657e-04) (hash(x)=34604433)
515
+ 4270 train 3.813822 (lr=3.0548e-04) (hash(x)=34316175)
516
+ 4280 train 3.869526 (lr=3.0449e-04) (hash(x)=41834221)
517
+ 4290 train 3.872258 (lr=3.0359e-04) (hash(x)=40456686)
518
+ 4300 val loss 3.8863
519
+ 4300 val perplexity 48.7279
520
+ 4300 train 3.781909 (lr=3.0280e-04) (hash(x)=38688927)
521
+ 4310 train 3.961439 (lr=3.0210e-04) (hash(x)=41920949)
522
+ 4320 train 3.837213 (lr=3.0150e-04) (hash(x)=39588763)
523
+ 4330 train 3.877870 (lr=3.0101e-04) (hash(x)=39696346)
524
+ 4340 train 3.979786 (lr=3.0061e-04) (hash(x)=36406591)
525
+ 4350 train 3.949103 (lr=3.0031e-04) (hash(x)=34248339)
526
+ 4360 train 3.869088 (lr=3.0011e-04) (hash(x)=39031938)
527
+ 4370 train 3.898676 (lr=3.0001e-04) (hash(x)=38410485)
528
+ 4374 val loss 3.8786
529
+ 4374 val perplexity 48.3574
logs/fix_compile_bug/disable_dynamo/model_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5657bb3bb1b0e27e509e346fc962f2bb03b66978ffd11d334c52869b2cbe3b4c
3
+ size 97706546
logs/fix_compile_bug/disable_dynamo/optimizer_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8330e34f6cd87b094155ca00feb9b593ab1bf898bee64f7b1701f39bf40184f2
3
+ size 189135414