andrew-healey commited on
Commit
58b271c
·
verified ·
1 Parent(s): bce8d4e

Upload folder using huggingface_hub

Browse files
logs/fix_compile_bug/relu_graph_break/args.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "logs/fix_compile_bug/relu_graph_break", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 12, "n_embd": 264, "head_dim": 22, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 4375, "warmup_steps": 250, "group": "fix_compile_bug", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1337, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "n_latent_masks", "selection_head_linear_combo_scale": 1.0, "disable_selection_head_linear_combo_bias": false, "assert_latent_matches_no_head": false, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 32, "total_batch_size": 131072, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": 1, "init_latent_masks_to_identity": true, "latent_mask_scale": null, "latent_mask_sigmoid": false, "S_layernorm": false, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.003, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "relu_graph_break"}
logs/fix_compile_bug/relu_graph_break/dataloader_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6031fd3e2855a036f7a5531cc24555aabd1115f9dd6618b8b2ca6f55279ef0b2
3
+ size 964
logs/fix_compile_bug/relu_graph_break/log2.txt ADDED
@@ -0,0 +1,529 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ max_steps: 4375
2
+ 0 val loss 11.2679
3
+ 0 val perplexity 78271.0312
4
+ 0 train 11.273937 (lr=4.1958e-06) (hash(x)=42405186)
5
+ 10 train 9.975639 (lr=4.6154e-05) (hash(x)=38970388)
6
+ 20 train 9.416134 (lr=8.8112e-05) (hash(x)=38844124)
7
+ 30 train 8.637617 (lr=1.3007e-04) (hash(x)=37597230)
8
+ 40 train 7.895778 (lr=1.7203e-04) (hash(x)=40798817)
9
+ 50 train 7.699969 (lr=2.1399e-04) (hash(x)=36405572)
10
+ 60 train 7.650737 (lr=2.5594e-04) (hash(x)=36066574)
11
+ 70 train 7.536426 (lr=2.9790e-04) (hash(x)=43811010)
12
+ 80 train 7.532898 (lr=3.3986e-04) (hash(x)=40628178)
13
+ 90 train 7.165854 (lr=3.8182e-04) (hash(x)=43681905)
14
+ 100 val loss 7.0802
15
+ 100 val perplexity 1188.2007
16
+ 100 train 6.959349 (lr=4.2378e-04) (hash(x)=42344017)
17
+ 110 train 7.009254 (lr=4.6573e-04) (hash(x)=39853582)
18
+ 120 train 6.834777 (lr=5.0769e-04) (hash(x)=39880304)
19
+ 130 train 6.569974 (lr=5.4965e-04) (hash(x)=36124250)
20
+ 140 train 6.516436 (lr=5.9161e-04) (hash(x)=45012802)
21
+ 150 train 6.365746 (lr=6.3357e-04) (hash(x)=36325989)
22
+ 160 train 6.285093 (lr=6.7552e-04) (hash(x)=40241802)
23
+ 170 train 6.274589 (lr=7.1748e-04) (hash(x)=42401819)
24
+ 180 train 6.196488 (lr=7.5944e-04) (hash(x)=40459165)
25
+ 190 train 6.175265 (lr=8.0140e-04) (hash(x)=41967324)
26
+ 200 val loss 6.1659
27
+ 200 val perplexity 476.2204
28
+ 200 train 6.215501 (lr=8.4336e-04) (hash(x)=45796175)
29
+ 210 train 6.130268 (lr=8.8531e-04) (hash(x)=35252224)
30
+ 220 train 6.040919 (lr=9.2727e-04) (hash(x)=40863261)
31
+ 230 train 5.862932 (lr=9.6923e-04) (hash(x)=40738876)
32
+ 240 train 5.878960 (lr=1.0112e-03) (hash(x)=46763138)
33
+ 250 train 5.920741 (lr=1.0531e-03) (hash(x)=46920901)
34
+ 260 train 5.695687 (lr=1.0951e-03) (hash(x)=43475159)
35
+ 270 train 5.711456 (lr=1.1371e-03) (hash(x)=47424242)
36
+ 280 train 5.715854 (lr=1.1790e-03) (hash(x)=40412635)
37
+ 290 train 5.623221 (lr=1.2210e-03) (hash(x)=31782544)
38
+ 300 val loss 5.6950
39
+ 300 val perplexity 297.3723
40
+ 300 train 5.582193 (lr=1.2629e-03) (hash(x)=34812634)
41
+ 310 train 5.495749 (lr=1.3049e-03) (hash(x)=44178651)
42
+ 320 train 5.662175 (lr=1.3469e-03) (hash(x)=43205331)
43
+ 330 train 5.599583 (lr=1.3888e-03) (hash(x)=41565398)
44
+ 340 train 5.620196 (lr=1.4308e-03) (hash(x)=41719738)
45
+ 350 train 5.464106 (lr=1.4727e-03) (hash(x)=46108058)
46
+ 360 train 5.433536 (lr=1.5147e-03) (hash(x)=37445628)
47
+ 370 train 5.311428 (lr=1.5566e-03) (hash(x)=36085988)
48
+ 380 train 5.372780 (lr=1.5986e-03) (hash(x)=37645228)
49
+ 390 train 5.379781 (lr=1.6406e-03) (hash(x)=35483844)
50
+ 400 val loss 5.3488
51
+ 400 val perplexity 210.3615
52
+ 400 train 5.355000 (lr=1.6825e-03) (hash(x)=41699751)
53
+ 410 train 5.208506 (lr=1.7245e-03) (hash(x)=32755762)
54
+ 420 train 5.202258 (lr=1.7664e-03) (hash(x)=35953145)
55
+ 430 train 5.180703 (lr=1.8084e-03) (hash(x)=35587716)
56
+ 440 train 5.128015 (lr=1.8503e-03) (hash(x)=39335888)
57
+ 450 train 5.182366 (lr=1.8923e-03) (hash(x)=44314559)
58
+ 460 train 5.227795 (lr=1.9343e-03) (hash(x)=42950490)
59
+ 470 train 5.291437 (lr=1.9762e-03) (hash(x)=41461061)
60
+ 480 train 5.256894 (lr=2.0182e-03) (hash(x)=41993931)
61
+ 490 train 5.149775 (lr=2.0601e-03) (hash(x)=37544536)
62
+ 500 val loss 5.1231
63
+ 500 val perplexity 167.8584
64
+ 500 train 5.117267 (lr=2.1021e-03) (hash(x)=40203267)
65
+ 510 train 5.154401 (lr=2.1441e-03) (hash(x)=41388334)
66
+ 520 train 5.145373 (lr=2.1860e-03) (hash(x)=34662125)
67
+ 530 train 5.160545 (lr=2.2280e-03) (hash(x)=37501995)
68
+ 540 train 5.082015 (lr=2.2699e-03) (hash(x)=41263260)
69
+ 550 train 4.975661 (lr=2.3119e-03) (hash(x)=49637102)
70
+ 560 train 4.886620 (lr=2.3538e-03) (hash(x)=47434359)
71
+ 570 train 4.867888 (lr=2.3958e-03) (hash(x)=42912781)
72
+ 580 train 4.955208 (lr=2.4378e-03) (hash(x)=48538862)
73
+ 590 train 4.797276 (lr=2.4797e-03) (hash(x)=38595222)
74
+ 600 val loss 4.9460
75
+ 600 val perplexity 140.6151
76
+ 600 train 4.876371 (lr=2.5217e-03) (hash(x)=37146098)
77
+ 610 train 4.920711 (lr=2.5636e-03) (hash(x)=50116590)
78
+ 620 train 4.875394 (lr=2.6056e-03) (hash(x)=41832213)
79
+ 630 train 4.981278 (lr=2.6476e-03) (hash(x)=39030069)
80
+ 640 train 4.837834 (lr=2.6895e-03) (hash(x)=46237222)
81
+ 650 train 4.839774 (lr=2.7315e-03) (hash(x)=42787835)
82
+ 660 train 4.761314 (lr=2.7734e-03) (hash(x)=39109857)
83
+ 670 train 4.853862 (lr=2.8154e-03) (hash(x)=39462485)
84
+ 680 train 4.749719 (lr=2.8573e-03) (hash(x)=41744849)
85
+ 690 train 4.765985 (lr=2.8993e-03) (hash(x)=38408104)
86
+ 700 val loss 4.7870
87
+ 700 val perplexity 119.9387
88
+ 700 train 4.680850 (lr=2.9413e-03) (hash(x)=41890521)
89
+ 710 train 4.731206 (lr=2.9832e-03) (hash(x)=41416330)
90
+ 720 train 4.626122 (lr=3.0000e-03) (hash(x)=38760874)
91
+ 730 train 4.714638 (lr=2.9999e-03) (hash(x)=37451366)
92
+ 740 train 4.619176 (lr=2.9997e-03) (hash(x)=36120996)
93
+ 750 train 4.718946 (lr=2.9994e-03) (hash(x)=34667979)
94
+ 760 train 4.706579 (lr=2.9990e-03) (hash(x)=50862178)
95
+ 770 train 4.655852 (lr=2.9985e-03) (hash(x)=41230008)
96
+ 780 train 4.562075 (lr=2.9979e-03) (hash(x)=36407773)
97
+ 790 train 4.573715 (lr=2.9972e-03) (hash(x)=42561722)
98
+ 800 val loss 4.6692
99
+ 800 val perplexity 106.6148
100
+ 800 train 4.546550 (lr=2.9964e-03) (hash(x)=48340441)
101
+ 810 train 4.612732 (lr=2.9955e-03) (hash(x)=36261826)
102
+ 820 train 4.428444 (lr=2.9945e-03) (hash(x)=37918881)
103
+ 830 train 4.546818 (lr=2.9934e-03) (hash(x)=38608334)
104
+ 840 train 4.287247 (lr=2.9922e-03) (hash(x)=39403505)
105
+ 850 train 4.404721 (lr=2.9909e-03) (hash(x)=43741441)
106
+ 860 train 4.351874 (lr=2.9896e-03) (hash(x)=38030739)
107
+ 870 train 4.657729 (lr=2.9881e-03) (hash(x)=36064710)
108
+ 880 train 4.819062 (lr=2.9865e-03) (hash(x)=40349968)
109
+ 890 train 4.553944 (lr=2.9848e-03) (hash(x)=38715806)
110
+ 900 val loss 4.5689
111
+ 900 val perplexity 96.4343
112
+ 900 train 4.628375 (lr=2.9830e-03) (hash(x)=35310433)
113
+ 910 train 4.523985 (lr=2.9811e-03) (hash(x)=37426246)
114
+ 920 train 4.508109 (lr=2.9792e-03) (hash(x)=39966506)
115
+ 930 train 4.525064 (lr=2.9771e-03) (hash(x)=37747046)
116
+ 940 train 4.482506 (lr=2.9749e-03) (hash(x)=42710584)
117
+ 950 train 4.509185 (lr=2.9726e-03) (hash(x)=41617662)
118
+ 960 train 4.499293 (lr=2.9703e-03) (hash(x)=40145473)
119
+ 970 train 4.487870 (lr=2.9678e-03) (hash(x)=38513059)
120
+ 980 train 4.491925 (lr=2.9652e-03) (hash(x)=38574237)
121
+ 990 train 4.403806 (lr=2.9626e-03) (hash(x)=37633181)
122
+ 1000 val loss 4.4956
123
+ 1000 val perplexity 89.6211
124
+ 1000 train 4.272623 (lr=2.9598e-03) (hash(x)=42781027)
125
+ 1010 train 4.425265 (lr=2.9570e-03) (hash(x)=48329642)
126
+ 1020 train 4.291304 (lr=2.9540e-03) (hash(x)=36577969)
127
+ 1030 train 4.262452 (lr=2.9510e-03) (hash(x)=39789757)
128
+ 1040 train 4.197104 (lr=2.9478e-03) (hash(x)=43293764)
129
+ 1050 train 4.537367 (lr=2.9446e-03) (hash(x)=40077223)
130
+ 1060 train 4.544796 (lr=2.9412e-03) (hash(x)=41035822)
131
+ 1070 train 4.465870 (lr=2.9378e-03) (hash(x)=42069615)
132
+ 1080 train 4.450941 (lr=2.9343e-03) (hash(x)=42668159)
133
+ 1090 train 4.433876 (lr=2.9307e-03) (hash(x)=36477372)
134
+ 1100 val loss 4.4175
135
+ 1100 val perplexity 82.8853
136
+ 1100 train 4.481778 (lr=2.9270e-03) (hash(x)=41357005)
137
+ 1110 train 4.441025 (lr=2.9231e-03) (hash(x)=42852875)
138
+ 1120 train 4.482358 (lr=2.9192e-03) (hash(x)=35310013)
139
+ 1130 train 4.485731 (lr=2.9152e-03) (hash(x)=51231839)
140
+ 1140 train 4.421887 (lr=2.9112e-03) (hash(x)=41431568)
141
+ 1150 train 4.404210 (lr=2.9070e-03) (hash(x)=41848748)
142
+ 1160 train 4.332568 (lr=2.9027e-03) (hash(x)=36702617)
143
+ 1170 train 4.339007 (lr=2.8983e-03) (hash(x)=40801937)
144
+ 1180 train 4.291142 (lr=2.8939e-03) (hash(x)=41936951)
145
+ 1190 train 4.190557 (lr=2.8893e-03) (hash(x)=41913508)
146
+ 1200 val loss 4.4159
147
+ 1200 val perplexity 82.7567
148
+ 1200 train 4.202671 (lr=2.8847e-03) (hash(x)=36842847)
149
+ 1210 train 4.300759 (lr=2.8800e-03) (hash(x)=30499991)
150
+ 1220 train 4.341202 (lr=2.8751e-03) (hash(x)=41929560)
151
+ 1230 train 4.485182 (lr=2.8702e-03) (hash(x)=47738317)
152
+ 1240 train 4.501803 (lr=2.8652e-03) (hash(x)=38256802)
153
+ 1250 train 4.484981 (lr=2.8601e-03) (hash(x)=28755106)
154
+ 1260 train 4.426728 (lr=2.8550e-03) (hash(x)=38336891)
155
+ 1270 train 4.380210 (lr=2.8497e-03) (hash(x)=36084046)
156
+ 1280 train 4.321799 (lr=2.8443e-03) (hash(x)=37921865)
157
+ 1290 train 4.416545 (lr=2.8389e-03) (hash(x)=38656933)
158
+ 1300 val loss 4.3666
159
+ 1300 val perplexity 78.7789
160
+ 1300 train 4.398830 (lr=2.8333e-03) (hash(x)=39331872)
161
+ 1310 train 4.350943 (lr=2.8277e-03) (hash(x)=35713543)
162
+ 1320 train 4.333488 (lr=2.8220e-03) (hash(x)=42221951)
163
+ 1330 train 4.311072 (lr=2.8162e-03) (hash(x)=49371177)
164
+ 1340 train 4.420414 (lr=2.8103e-03) (hash(x)=39825492)
165
+ 1350 train 4.342120 (lr=2.8044e-03) (hash(x)=38122709)
166
+ 1360 train 4.346949 (lr=2.7983e-03) (hash(x)=46034654)
167
+ 1370 train 4.084059 (lr=2.7922e-03) (hash(x)=36717950)
168
+ 1380 train 4.124674 (lr=2.7860e-03) (hash(x)=40410179)
169
+ 1390 train 4.110501 (lr=2.7797e-03) (hash(x)=46535981)
170
+ 1400 val loss 4.3485
171
+ 1400 val perplexity 77.3654
172
+ 1400 train 4.162092 (lr=2.7733e-03) (hash(x)=35556187)
173
+ 1410 train 4.258179 (lr=2.7668e-03) (hash(x)=47156509)
174
+ 1420 train 4.416224 (lr=2.7603e-03) (hash(x)=44281694)
175
+ 1430 train 4.397044 (lr=2.7536e-03) (hash(x)=38833525)
176
+ 1440 train 4.463760 (lr=2.7469e-03) (hash(x)=42715085)
177
+ 1450 train 4.498690 (lr=2.7401e-03) (hash(x)=42032181)
178
+ 1460 train 4.302469 (lr=2.7333e-03) (hash(x)=42012177)
179
+ 1470 train 4.323844 (lr=2.7263e-03) (hash(x)=41738021)
180
+ 1480 train 4.386064 (lr=2.7193e-03) (hash(x)=48868810)
181
+ 1490 train 4.346935 (lr=2.7121e-03) (hash(x)=50198350)
182
+ 1500 val loss 4.2944
183
+ 1500 val perplexity 73.2889
184
+ 1500 train 4.364525 (lr=2.7050e-03) (hash(x)=44018572)
185
+ 1510 train 4.265894 (lr=2.6977e-03) (hash(x)=40114593)
186
+ 1520 train 4.222320 (lr=2.6903e-03) (hash(x)=41475808)
187
+ 1530 train 4.292872 (lr=2.6829e-03) (hash(x)=44249710)
188
+ 1540 train 4.375224 (lr=2.6754e-03) (hash(x)=38440727)
189
+ 1550 train 4.363383 (lr=2.6678e-03) (hash(x)=38021994)
190
+ 1560 train 4.327667 (lr=2.6602e-03) (hash(x)=39234436)
191
+ 1570 train 4.221068 (lr=2.6525e-03) (hash(x)=45044408)
192
+ 1580 train 4.272184 (lr=2.6447e-03) (hash(x)=38890254)
193
+ 1590 train 4.229146 (lr=2.6368e-03) (hash(x)=39091171)
194
+ 1600 val loss 4.2574
195
+ 1600 val perplexity 70.6260
196
+ 1600 train 4.335165 (lr=2.6289e-03) (hash(x)=36528068)
197
+ 1610 train 4.221858 (lr=2.6208e-03) (hash(x)=41836863)
198
+ 1620 train 4.288374 (lr=2.6128e-03) (hash(x)=41871021)
199
+ 1630 train 4.377075 (lr=2.6046e-03) (hash(x)=41234817)
200
+ 1640 train 4.271412 (lr=2.5964e-03) (hash(x)=37912972)
201
+ 1650 train 4.199685 (lr=2.5881e-03) (hash(x)=43040972)
202
+ 1660 train 4.094000 (lr=2.5797e-03) (hash(x)=26922739)
203
+ 1670 train 4.169180 (lr=2.5713e-03) (hash(x)=39907717)
204
+ 1680 train 4.117653 (lr=2.5628e-03) (hash(x)=45004538)
205
+ 1690 train 4.104366 (lr=2.5542e-03) (hash(x)=39394756)
206
+ 1700 val loss 4.2443
207
+ 1700 val perplexity 69.7040
208
+ 1700 train 4.142566 (lr=2.5455e-03) (hash(x)=38805510)
209
+ 1710 train 4.060070 (lr=2.5368e-03) (hash(x)=34950642)
210
+ 1720 train 4.392437 (lr=2.5281e-03) (hash(x)=35167195)
211
+ 1730 train 4.384059 (lr=2.5192e-03) (hash(x)=46592240)
212
+ 1740 train 4.169741 (lr=2.5103e-03) (hash(x)=43879336)
213
+ 1750 train 4.283555 (lr=2.5014e-03) (hash(x)=42298161)
214
+ 1760 train 4.292733 (lr=2.4924e-03) (hash(x)=37358957)
215
+ 1770 train 4.230499 (lr=2.4833e-03) (hash(x)=41641994)
216
+ 1780 train 4.224138 (lr=2.4741e-03) (hash(x)=44388117)
217
+ 1790 train 4.241033 (lr=2.4649e-03) (hash(x)=45220046)
218
+ 1800 val loss 4.2031
219
+ 1800 val perplexity 66.8958
220
+ 1800 train 4.267235 (lr=2.4556e-03) (hash(x)=43748263)
221
+ 1810 train 4.078368 (lr=2.4463e-03) (hash(x)=26733445)
222
+ 1820 train 4.216148 (lr=2.4369e-03) (hash(x)=38491163)
223
+ 1830 train 4.210117 (lr=2.4275e-03) (hash(x)=35359413)
224
+ 1840 train 4.125063 (lr=2.4180e-03) (hash(x)=40482942)
225
+ 1850 train 4.012699 (lr=2.4084e-03) (hash(x)=35252918)
226
+ 1860 train 4.090386 (lr=2.3988e-03) (hash(x)=38221117)
227
+ 1870 train 4.138335 (lr=2.3891e-03) (hash(x)=41438728)
228
+ 1880 train 3.969471 (lr=2.3794e-03) (hash(x)=39801057)
229
+ 1890 train 4.163966 (lr=2.3696e-03) (hash(x)=35517965)
230
+ 1900 val loss 4.2154
231
+ 1900 val perplexity 67.7222
232
+ 1900 train 4.245860 (lr=2.3598e-03) (hash(x)=43795495)
233
+ 1910 train 4.239686 (lr=2.3499e-03) (hash(x)=47235154)
234
+ 1920 train 4.273353 (lr=2.3400e-03) (hash(x)=42139138)
235
+ 1930 train 4.213113 (lr=2.3300e-03) (hash(x)=38910301)
236
+ 1940 train 4.088310 (lr=2.3200e-03) (hash(x)=41819422)
237
+ 1950 train 4.210087 (lr=2.3099e-03) (hash(x)=44434112)
238
+ 1960 train 4.229791 (lr=2.2998e-03) (hash(x)=47468800)
239
+ 1970 train 4.281806 (lr=2.2896e-03) (hash(x)=40489494)
240
+ 1980 train 4.263423 (lr=2.2793e-03) (hash(x)=41660256)
241
+ 1990 train 4.086990 (lr=2.2691e-03) (hash(x)=36821154)
242
+ 2000 val loss 4.1907
243
+ 2000 val perplexity 66.0720
244
+ 2000 train 4.170735 (lr=2.2588e-03) (hash(x)=46230173)
245
+ 2010 train 4.140513 (lr=2.2484e-03) (hash(x)=38867399)
246
+ 2020 train 4.143517 (lr=2.2380e-03) (hash(x)=39221022)
247
+ 2030 train 4.119156 (lr=2.2275e-03) (hash(x)=40395859)
248
+ 2040 train 4.099733 (lr=2.2170e-03) (hash(x)=41260539)
249
+ 2050 train 4.097055 (lr=2.2065e-03) (hash(x)=40471948)
250
+ 2060 train 4.046801 (lr=2.1959e-03) (hash(x)=34614958)
251
+ 2070 train 4.069624 (lr=2.1853e-03) (hash(x)=40426097)
252
+ 2080 train 3.897176 (lr=2.1746e-03) (hash(x)=37618014)
253
+ 2090 train 4.202407 (lr=2.1639e-03) (hash(x)=43655438)
254
+ 2100 val loss 4.1440
255
+ 2100 val perplexity 63.0538
256
+ 2100 train 4.186776 (lr=2.1532e-03) (hash(x)=39728341)
257
+ 2110 train 4.160409 (lr=2.1424e-03) (hash(x)=40996707)
258
+ 2120 train 4.302773 (lr=2.1316e-03) (hash(x)=37072048)
259
+ 2130 train 4.115414 (lr=2.1208e-03) (hash(x)=37668129)
260
+ 2140 train 4.159514 (lr=2.1099e-03) (hash(x)=42862485)
261
+ 2150 train 4.319764 (lr=2.0990e-03) (hash(x)=40933735)
262
+ 2160 train 4.222715 (lr=2.0881e-03) (hash(x)=42952511)
263
+ 2170 train 4.090984 (lr=2.0771e-03) (hash(x)=38227732)
264
+ 2180 train 4.144982 (lr=2.0661e-03) (hash(x)=42919169)
265
+ 2190 train 4.131997 (lr=2.0550e-03) (hash(x)=38635637)
266
+ 2200 val loss 4.1405
267
+ 2200 val perplexity 62.8366
268
+ 2200 train 4.102549 (lr=2.0440e-03) (hash(x)=43974656)
269
+ 2210 train 4.096525 (lr=2.0329e-03) (hash(x)=35923510)
270
+ 2220 train 4.245987 (lr=2.0217e-03) (hash(x)=38698936)
271
+ 2230 train 3.992659 (lr=2.0106e-03) (hash(x)=41822854)
272
+ 2240 train 3.968671 (lr=1.9994e-03) (hash(x)=39069402)
273
+ 2250 train 4.019383 (lr=1.9882e-03) (hash(x)=38428746)
274
+ 2260 train 3.892169 (lr=1.9770e-03) (hash(x)=41190125)
275
+ 2270 train 4.247071 (lr=1.9657e-03) (hash(x)=39197540)
276
+ 2280 train 4.190046 (lr=1.9544e-03) (hash(x)=41964710)
277
+ 2290 train 4.058774 (lr=1.9431e-03) (hash(x)=35566499)
278
+ 2300 val loss 4.1276
279
+ 2300 val perplexity 62.0260
280
+ 2300 train 4.057745 (lr=1.9318e-03) (hash(x)=40710061)
281
+ 2310 train 4.021409 (lr=1.9205e-03) (hash(x)=34084675)
282
+ 2320 train 4.308216 (lr=1.9091e-03) (hash(x)=43192252)
283
+ 2330 train 4.209381 (lr=1.8977e-03) (hash(x)=39365674)
284
+ 2340 train 4.114611 (lr=1.8863e-03) (hash(x)=41693707)
285
+ 2350 train 4.162870 (lr=1.8749e-03) (hash(x)=41246790)
286
+ 2360 train 4.094388 (lr=1.8635e-03) (hash(x)=44379115)
287
+ 2370 train 4.050648 (lr=1.8520e-03) (hash(x)=37114071)
288
+ 2380 train 4.050638 (lr=1.8406e-03) (hash(x)=40531187)
289
+ 2390 train 4.155513 (lr=1.8291e-03) (hash(x)=35190591)
290
+ 2400 val loss 4.0912
291
+ 2400 val perplexity 59.8144
292
+ 2400 train 4.171890 (lr=1.8176e-03) (hash(x)=46483290)
293
+ 2410 train 4.015283 (lr=1.8061e-03) (hash(x)=36994364)
294
+ 2420 train 4.066062 (lr=1.7946e-03) (hash(x)=43774123)
295
+ 2430 train 4.121202 (lr=1.7830e-03) (hash(x)=39016150)
296
+ 2440 train 4.082554 (lr=1.7715e-03) (hash(x)=41424235)
297
+ 2450 train 3.905015 (lr=1.7600e-03) (hash(x)=40781424)
298
+ 2460 train 3.950707 (lr=1.7484e-03) (hash(x)=36871418)
299
+ 2470 train 3.926774 (lr=1.7368e-03) (hash(x)=40398678)
300
+ 2480 train 3.973672 (lr=1.7253e-03) (hash(x)=40019705)
301
+ 2490 train 4.021952 (lr=1.7137e-03) (hash(x)=43210658)
302
+ 2500 val loss 4.0837
303
+ 2500 val perplexity 59.3669
304
+ 2500 train 4.107913 (lr=1.7021e-03) (hash(x)=40992954)
305
+ 2510 train 4.161224 (lr=1.6906e-03) (hash(x)=40420845)
306
+ 2520 train 4.045031 (lr=1.6790e-03) (hash(x)=39278025)
307
+ 2530 train 4.030315 (lr=1.6674e-03) (hash(x)=40118798)
308
+ 2540 train 4.254988 (lr=1.6558e-03) (hash(x)=36453463)
309
+ 2550 train 4.056834 (lr=1.6442e-03) (hash(x)=38336991)
310
+ 2560 train 4.155058 (lr=1.6326e-03) (hash(x)=40307518)
311
+ 2570 train 3.966041 (lr=1.6210e-03) (hash(x)=31378688)
312
+ 2580 train 4.040754 (lr=1.6094e-03) (hash(x)=38888092)
313
+ 2590 train 4.095564 (lr=1.5979e-03) (hash(x)=43391661)
314
+ 2600 val loss 4.0542
315
+ 2600 val perplexity 57.6395
316
+ 2600 train 3.991055 (lr=1.5863e-03) (hash(x)=43784447)
317
+ 2610 train 4.041187 (lr=1.5747e-03) (hash(x)=44952165)
318
+ 2620 train 3.939271 (lr=1.5632e-03) (hash(x)=37357711)
319
+ 2630 train 4.014759 (lr=1.5516e-03) (hash(x)=41317604)
320
+ 2640 train 3.807003 (lr=1.5400e-03) (hash(x)=43473476)
321
+ 2650 train 3.962617 (lr=1.5285e-03) (hash(x)=36968500)
322
+ 2660 train 3.973705 (lr=1.5170e-03) (hash(x)=37853410)
323
+ 2670 train 3.843600 (lr=1.5054e-03) (hash(x)=55116905)
324
+ 2680 train 4.080128 (lr=1.4939e-03) (hash(x)=43789962)
325
+ 2690 train 3.961427 (lr=1.4824e-03) (hash(x)=47653410)
326
+ 2700 val loss 4.0431
327
+ 2700 val perplexity 57.0000
328
+ 2700 train 4.098878 (lr=1.4709e-03) (hash(x)=42034666)
329
+ 2710 train 4.216893 (lr=1.4594e-03) (hash(x)=34261377)
330
+ 2720 train 4.066793 (lr=1.4480e-03) (hash(x)=43776091)
331
+ 2730 train 4.093709 (lr=1.4365e-03) (hash(x)=38652143)
332
+ 2740 train 4.141800 (lr=1.4251e-03) (hash(x)=37399153)
333
+ 2750 train 4.077436 (lr=1.4137e-03) (hash(x)=41956544)
334
+ 2760 train 4.350531 (lr=1.4023e-03) (hash(x)=37661382)
335
+ 2770 train 3.990008 (lr=1.3909e-03) (hash(x)=45791682)
336
+ 2780 train 3.965220 (lr=1.3795e-03) (hash(x)=40218424)
337
+ 2790 train 3.984706 (lr=1.3682e-03) (hash(x)=38121125)
338
+ 2800 val loss 4.0295
339
+ 2800 val perplexity 56.2332
340
+ 2800 train 4.042178 (lr=1.3569e-03) (hash(x)=38892664)
341
+ 2810 train 4.024083 (lr=1.3456e-03) (hash(x)=41180129)
342
+ 2820 train 3.791167 (lr=1.3343e-03) (hash(x)=41017023)
343
+ 2830 train 3.895333 (lr=1.3230e-03) (hash(x)=42197711)
344
+ 2840 train 3.923236 (lr=1.3118e-03) (hash(x)=37993621)
345
+ 2850 train 3.824634 (lr=1.3006e-03) (hash(x)=34469615)
346
+ 2860 train 4.011956 (lr=1.2894e-03) (hash(x)=42013555)
347
+ 2870 train 4.069555 (lr=1.2783e-03) (hash(x)=37463962)
348
+ 2880 train 4.026474 (lr=1.2671e-03) (hash(x)=36874351)
349
+ 2890 train 3.997091 (lr=1.2560e-03) (hash(x)=43649626)
350
+ 2900 val loss 4.0123
351
+ 2900 val perplexity 55.2732
352
+ 2900 train 4.123693 (lr=1.2450e-03) (hash(x)=42022821)
353
+ 2910 train 3.986369 (lr=1.2339e-03) (hash(x)=36933935)
354
+ 2920 train 3.985058 (lr=1.2229e-03) (hash(x)=41313698)
355
+ 2930 train 4.020407 (lr=1.2119e-03) (hash(x)=36341574)
356
+ 2940 train 3.998677 (lr=1.2010e-03) (hash(x)=35505945)
357
+ 2950 train 4.044476 (lr=1.1901e-03) (hash(x)=47407907)
358
+ 2960 train 3.962711 (lr=1.1792e-03) (hash(x)=39217310)
359
+ 2970 train 3.973529 (lr=1.1684e-03) (hash(x)=35391019)
360
+ 2980 train 3.896692 (lr=1.1576e-03) (hash(x)=37929528)
361
+ 2990 train 4.499238 (lr=1.1468e-03) (hash(x)=42604487)
362
+ 3000 val loss 4.0030
363
+ 3000 val perplexity 54.7607
364
+ 3000 train 3.937917 (lr=1.1361e-03) (hash(x)=36439520)
365
+ 3010 train 3.790381 (lr=1.1254e-03) (hash(x)=36110173)
366
+ 3020 train 3.801156 (lr=1.1147e-03) (hash(x)=41712329)
367
+ 3030 train 3.854483 (lr=1.1041e-03) (hash(x)=39541717)
368
+ 3040 train 3.889730 (lr=1.0935e-03) (hash(x)=38203029)
369
+ 3050 train 4.215052 (lr=1.0830e-03) (hash(x)=45668569)
370
+ 3060 train 3.906381 (lr=1.0725e-03) (hash(x)=42053235)
371
+ 3070 train 3.894653 (lr=1.0620e-03) (hash(x)=38772023)
372
+ 3080 train 3.919325 (lr=1.0516e-03) (hash(x)=40842122)
373
+ 3090 train 3.864968 (lr=1.0412e-03) (hash(x)=43911820)
374
+ 3100 val loss 3.9942
375
+ 3100 val perplexity 54.2799
376
+ 3100 train 3.908470 (lr=1.0309e-03) (hash(x)=42366363)
377
+ 3110 train 3.897624 (lr=1.0207e-03) (hash(x)=42499370)
378
+ 3120 train 3.906368 (lr=1.0104e-03) (hash(x)=37964948)
379
+ 3130 train 4.026742 (lr=1.0002e-03) (hash(x)=37801102)
380
+ 3140 train 4.025426 (lr=9.9011e-04) (hash(x)=39187569)
381
+ 3150 train 4.049002 (lr=9.8002e-04) (hash(x)=37104415)
382
+ 3160 train 4.069867 (lr=9.6999e-04) (hash(x)=40414652)
383
+ 3170 train 3.762482 (lr=9.6000e-04) (hash(x)=39971875)
384
+ 3180 train 3.889255 (lr=9.5007e-04) (hash(x)=36823751)
385
+ 3190 train 3.929301 (lr=9.4019e-04) (hash(x)=41232481)
386
+ 3200 val loss 3.9756
387
+ 3200 val perplexity 53.2837
388
+ 3200 train 3.902184 (lr=9.3036e-04) (hash(x)=42263706)
389
+ 3210 train 3.931210 (lr=9.2058e-04) (hash(x)=42682932)
390
+ 3220 train 3.899759 (lr=9.1085e-04) (hash(x)=38173495)
391
+ 3230 train 3.854675 (lr=9.0118e-04) (hash(x)=42665215)
392
+ 3240 train 3.815650 (lr=8.9157e-04) (hash(x)=37430175)
393
+ 3250 train 3.879018 (lr=8.8201e-04) (hash(x)=39501818)
394
+ 3260 train 3.951764 (lr=8.7251e-04) (hash(x)=39316351)
395
+ 3270 train 4.088800 (lr=8.6307e-04) (hash(x)=33934131)
396
+ 3280 train 3.958935 (lr=8.5368e-04) (hash(x)=33684008)
397
+ 3290 train 3.956379 (lr=8.4435e-04) (hash(x)=41195688)
398
+ 3300 val loss 3.9790
399
+ 3300 val perplexity 53.4635
400
+ 3300 train 3.920529 (lr=8.3508e-04) (hash(x)=38146885)
401
+ 3310 train 3.861522 (lr=8.2588e-04) (hash(x)=41531159)
402
+ 3320 train 3.973843 (lr=8.1673e-04) (hash(x)=37465393)
403
+ 3330 train 4.000519 (lr=8.0764e-04) (hash(x)=41276314)
404
+ 3340 train 3.946972 (lr=7.9862e-04) (hash(x)=40035419)
405
+ 3350 train 3.958266 (lr=7.8966e-04) (hash(x)=38589416)
406
+ 3360 train 3.814271 (lr=7.8076e-04) (hash(x)=44616716)
407
+ 3370 train 3.944501 (lr=7.7192e-04) (hash(x)=40753545)
408
+ 3380 train 3.771914 (lr=7.6315e-04) (hash(x)=36064706)
409
+ 3390 train 3.742320 (lr=7.5445e-04) (hash(x)=35628005)
410
+ 3400 val loss 3.9567
411
+ 3400 val perplexity 52.2840
412
+ 3400 train 3.858490 (lr=7.4581e-04) (hash(x)=37242512)
413
+ 3410 train 3.914055 (lr=7.3724e-04) (hash(x)=49988219)
414
+ 3420 train 3.902751 (lr=7.2874e-04) (hash(x)=39309246)
415
+ 3430 train 4.013696 (lr=7.2030e-04) (hash(x)=41337256)
416
+ 3440 train 4.081922 (lr=7.1193e-04) (hash(x)=38422583)
417
+ 3450 train 3.864373 (lr=7.0363e-04) (hash(x)=42782162)
418
+ 3460 train 4.231926 (lr=6.9541e-04) (hash(x)=36355160)
419
+ 3470 train 3.938464 (lr=6.8725e-04) (hash(x)=43452584)
420
+ 3480 train 4.009171 (lr=6.7916e-04) (hash(x)=36009375)
421
+ 3490 train 3.877250 (lr=6.7114e-04) (hash(x)=44201141)
422
+ 3500 val loss 3.9422
423
+ 3500 val perplexity 51.5334
424
+ 3500 train 3.844806 (lr=6.6320e-04) (hash(x)=32974340)
425
+ 3510 train 3.838912 (lr=6.5533e-04) (hash(x)=38454893)
426
+ 3520 train 3.811737 (lr=6.4753e-04) (hash(x)=39406776)
427
+ 3530 train 3.862582 (lr=6.3981e-04) (hash(x)=40863480)
428
+ 3540 train 3.857033 (lr=6.3216e-04) (hash(x)=39891139)
429
+ 3550 train 3.998603 (lr=6.2458e-04) (hash(x)=44049608)
430
+ 3560 train 3.897249 (lr=6.1708e-04) (hash(x)=38049508)
431
+ 3570 train 4.005656 (lr=6.0966e-04) (hash(x)=39546422)
432
+ 3580 train 3.901345 (lr=6.0231e-04) (hash(x)=48257091)
433
+ 3590 train 3.831905 (lr=5.9504e-04) (hash(x)=36249953)
434
+ 3600 val loss 3.9298
435
+ 3600 val perplexity 50.8977
436
+ 3600 train 3.895782 (lr=5.8785e-04) (hash(x)=39324021)
437
+ 3610 train 3.874030 (lr=5.8074e-04) (hash(x)=35944408)
438
+ 3620 train 3.868089 (lr=5.7370e-04) (hash(x)=37992132)
439
+ 3630 train 3.891168 (lr=5.6675e-04) (hash(x)=37723468)
440
+ 3640 train 3.904763 (lr=5.5987e-04) (hash(x)=38300750)
441
+ 3650 train 3.927681 (lr=5.5308e-04) (hash(x)=41511149)
442
+ 3660 train 3.813385 (lr=5.4636e-04) (hash(x)=46384740)
443
+ 3670 train 3.895442 (lr=5.3973e-04) (hash(x)=40100004)
444
+ 3680 train 3.783933 (lr=5.3318e-04) (hash(x)=37889842)
445
+ 3690 train 3.907008 (lr=5.2671e-04) (hash(x)=40976637)
446
+ 3700 val loss 3.9253
447
+ 3700 val perplexity 50.6698
448
+ 3700 train 3.960986 (lr=5.2033e-04) (hash(x)=40138225)
449
+ 3710 train 3.942853 (lr=5.1402e-04) (hash(x)=38068714)
450
+ 3720 train 3.923973 (lr=5.0780e-04) (hash(x)=40819257)
451
+ 3730 train 3.876613 (lr=5.0167e-04) (hash(x)=29564095)
452
+ 3740 train 3.857780 (lr=4.9562e-04) (hash(x)=36509087)
453
+ 3750 train 4.009620 (lr=4.8965e-04) (hash(x)=41226118)
454
+ 3760 train 3.930945 (lr=4.8377e-04) (hash(x)=38317131)
455
+ 3770 train 3.919691 (lr=4.7798e-04) (hash(x)=41244909)
456
+ 3780 train 3.787496 (lr=4.7227e-04) (hash(x)=41412670)
457
+ 3790 train 3.750819 (lr=4.6665e-04) (hash(x)=34906259)
458
+ 3800 val loss 3.9185
459
+ 3800 val perplexity 50.3243
460
+ 3800 train 4.053521 (lr=4.6112e-04) (hash(x)=40968992)
461
+ 3810 train 3.773163 (lr=4.5567e-04) (hash(x)=35241519)
462
+ 3820 train 3.897375 (lr=4.5031e-04) (hash(x)=42755713)
463
+ 3830 train 3.854164 (lr=4.4504e-04) (hash(x)=47166492)
464
+ 3840 train 4.016672 (lr=4.3986e-04) (hash(x)=57200827)
465
+ 3850 train 3.880161 (lr=4.3477e-04) (hash(x)=44734592)
466
+ 3860 train 3.786560 (lr=4.2977e-04) (hash(x)=32738162)
467
+ 3870 train 3.966586 (lr=4.2486e-04) (hash(x)=39064476)
468
+ 3880 train 3.808678 (lr=4.2004e-04) (hash(x)=39921346)
469
+ 3890 train 3.931864 (lr=4.1530e-04) (hash(x)=35960386)
470
+ 3900 val loss 3.9099
471
+ 3900 val perplexity 49.8922
472
+ 3900 train 3.962744 (lr=4.1066e-04) (hash(x)=40314553)
473
+ 3910 train 3.911427 (lr=4.0611e-04) (hash(x)=43961115)
474
+ 3920 train 3.980652 (lr=4.0166e-04) (hash(x)=43423054)
475
+ 3930 train 4.160213 (lr=3.9729e-04) (hash(x)=34054777)
476
+ 3940 train 3.935376 (lr=3.9302e-04) (hash(x)=39706545)
477
+ 3950 train 3.908807 (lr=3.8884e-04) (hash(x)=37107026)
478
+ 3960 train 3.854841 (lr=3.8475e-04) (hash(x)=37669670)
479
+ 3970 train 3.928967 (lr=3.8076e-04) (hash(x)=32520030)
480
+ 3980 train 3.788168 (lr=3.7685e-04) (hash(x)=43036200)
481
+ 3990 train 3.828465 (lr=3.7305e-04) (hash(x)=39177193)
482
+ 4000 val loss 3.9009
483
+ 4000 val perplexity 49.4469
484
+ 4000 train 3.817120 (lr=3.6933e-04) (hash(x)=42698385)
485
+ 4010 train 3.768872 (lr=3.6572e-04) (hash(x)=36812123)
486
+ 4020 train 3.850057 (lr=3.6219e-04) (hash(x)=40477406)
487
+ 4030 train 3.886528 (lr=3.5876e-04) (hash(x)=43794844)
488
+ 4040 train 3.964525 (lr=3.5543e-04) (hash(x)=39871633)
489
+ 4050 train 3.907602 (lr=3.5219e-04) (hash(x)=44942955)
490
+ 4060 train 3.897296 (lr=3.4905e-04) (hash(x)=37704728)
491
+ 4070 train 4.381867 (lr=3.4600e-04) (hash(x)=44586874)
492
+ 4080 train 3.933474 (lr=3.4305e-04) (hash(x)=40698169)
493
+ 4090 train 4.022396 (lr=3.4019e-04) (hash(x)=56230743)
494
+ 4100 val loss 3.8913
495
+ 4100 val perplexity 48.9768
496
+ 4100 train 3.948353 (lr=3.3744e-04) (hash(x)=39745125)
497
+ 4110 train 3.891147 (lr=3.3477e-04) (hash(x)=38038631)
498
+ 4120 train 3.918485 (lr=3.3221e-04) (hash(x)=41225060)
499
+ 4130 train 3.941681 (lr=3.2974e-04) (hash(x)=43984411)
500
+ 4140 train 3.812963 (lr=3.2737e-04) (hash(x)=35070209)
501
+ 4150 train 3.822048 (lr=3.2510e-04) (hash(x)=44830085)
502
+ 4160 train 3.767134 (lr=3.2292e-04) (hash(x)=34909340)
503
+ 4170 train 3.967909 (lr=3.2085e-04) (hash(x)=39799332)
504
+ 4180 train 3.937305 (lr=3.1887e-04) (hash(x)=34673396)
505
+ 4190 train 3.965976 (lr=3.1699e-04) (hash(x)=43013076)
506
+ 4200 val loss 3.8844
507
+ 4200 val perplexity 48.6356
508
+ 4200 train 3.988840 (lr=3.1520e-04) (hash(x)=44540286)
509
+ 4210 train 3.943343 (lr=3.1352e-04) (hash(x)=39338462)
510
+ 4220 train 3.839639 (lr=3.1193e-04) (hash(x)=40697563)
511
+ 4230 train 3.954925 (lr=3.1044e-04) (hash(x)=36447380)
512
+ 4240 train 3.918387 (lr=3.0905e-04) (hash(x)=31542722)
513
+ 4250 train 3.816908 (lr=3.0776e-04) (hash(x)=38193385)
514
+ 4260 train 3.818686 (lr=3.0657e-04) (hash(x)=34604433)
515
+ 4270 train 3.812875 (lr=3.0548e-04) (hash(x)=34316175)
516
+ 4280 train 3.869412 (lr=3.0449e-04) (hash(x)=41834221)
517
+ 4290 train 3.871763 (lr=3.0359e-04) (hash(x)=40456686)
518
+ 4300 val loss 3.8861
519
+ 4300 val perplexity 48.7219
520
+ 4300 train 3.780109 (lr=3.0280e-04) (hash(x)=38688927)
521
+ 4310 train 3.959104 (lr=3.0210e-04) (hash(x)=41920949)
522
+ 4320 train 3.839458 (lr=3.0150e-04) (hash(x)=39588763)
523
+ 4330 train 3.878519 (lr=3.0101e-04) (hash(x)=39696346)
524
+ 4340 train 3.973913 (lr=3.0061e-04) (hash(x)=36406591)
525
+ 4350 train 3.945446 (lr=3.0031e-04) (hash(x)=34248339)
526
+ 4360 train 3.866366 (lr=3.0011e-04) (hash(x)=39031938)
527
+ 4370 train 3.896869 (lr=3.0001e-04) (hash(x)=38410485)
528
+ 4374 val loss 3.8786
529
+ 4374 val perplexity 48.3566
logs/fix_compile_bug/relu_graph_break/model_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ad47e4518a2b9ad4725fff47f4c3c998f316dd96ad9d88dc9c13a0232c270177
3
+ size 97706546
logs/fix_compile_bug/relu_graph_break/optimizer_04374.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:88a4bb66d319aba0ed71c1a574934b0daf8342a4f0b33acd3e328d0209cdb37e
3
+ size 189135414