Upload folder using huggingface_hub
Browse files
scaling_mha_granularity_2/hd_128_seed_1339/dataloader_04374.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 964
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6031fd3e2855a036f7a5531cc24555aabd1115f9dd6618b8b2ca6f55279ef0b2
|
| 3 |
size 964
|
scaling_mha_granularity_2/hd_128_seed_1339/log2.txt
CHANGED
|
@@ -1,529 +1,529 @@
|
|
| 1 |
max_steps: 4375
|
| 2 |
-
0 val loss 11.
|
| 3 |
-
0 val perplexity
|
| 4 |
-
0 train 11.334114 (lr=
|
| 5 |
-
10 train 9.
|
| 6 |
-
20 train
|
| 7 |
-
30 train
|
| 8 |
-
40 train 7.
|
| 9 |
-
50 train 7.
|
| 10 |
-
60 train 7.
|
| 11 |
-
70 train
|
| 12 |
-
80 train
|
| 13 |
-
90 train 6.
|
| 14 |
-
100 val loss 6.
|
| 15 |
-
100 val perplexity
|
| 16 |
-
100 train 6.
|
| 17 |
-
110 train 6.
|
| 18 |
-
120 train 6.
|
| 19 |
-
130 train 6.
|
| 20 |
-
140 train 6.
|
| 21 |
-
150 train 6.
|
| 22 |
-
160 train 6.
|
| 23 |
-
170 train 6.
|
| 24 |
-
180 train 6.
|
| 25 |
-
190 train 6.
|
| 26 |
-
200 val loss 6.
|
| 27 |
-
200 val perplexity
|
| 28 |
-
200 train 6.
|
| 29 |
-
210 train 6.
|
| 30 |
-
220 train 6.
|
| 31 |
-
230 train 6.
|
| 32 |
-
240 train 5.
|
| 33 |
-
250 train
|
| 34 |
-
260 train 5.
|
| 35 |
-
270 train 5.
|
| 36 |
-
280 train 5.
|
| 37 |
-
290 train 5.
|
| 38 |
-
300 val loss 5.
|
| 39 |
-
300 val perplexity
|
| 40 |
-
300 train 5.
|
| 41 |
-
310 train 5.
|
| 42 |
-
320 train 5.
|
| 43 |
-
330 train 5.
|
| 44 |
-
340 train 5.
|
| 45 |
-
350 train 5.
|
| 46 |
-
360 train 5.
|
| 47 |
-
370 train 5.
|
| 48 |
-
380 train 5.
|
| 49 |
-
390 train 5.
|
| 50 |
-
400 val loss 5.
|
| 51 |
-
400 val perplexity
|
| 52 |
-
400 train 5.
|
| 53 |
-
410 train 5.
|
| 54 |
-
420 train 5.
|
| 55 |
-
430 train 5.
|
| 56 |
-
440 train 5.
|
| 57 |
-
450 train 5.
|
| 58 |
-
460 train 5.
|
| 59 |
-
470 train
|
| 60 |
-
480 train 5.
|
| 61 |
-
490 train
|
| 62 |
-
500 val loss 5.
|
| 63 |
-
500 val perplexity
|
| 64 |
-
500 train
|
| 65 |
-
510 train 5.
|
| 66 |
-
520 train 5.
|
| 67 |
-
530 train 5.
|
| 68 |
-
540 train 5.
|
| 69 |
-
550 train 5.
|
| 70 |
-
560 train 5.
|
| 71 |
-
570 train 5.
|
| 72 |
-
580 train 5.
|
| 73 |
-
590 train 5.
|
| 74 |
-
600 val loss 5.
|
| 75 |
-
600 val perplexity
|
| 76 |
-
600 train 5.
|
| 77 |
-
610 train
|
| 78 |
-
620 train
|
| 79 |
-
630 train
|
| 80 |
-
640 train
|
| 81 |
-
650 train 4.
|
| 82 |
-
660 train 4.
|
| 83 |
-
670 train 4.
|
| 84 |
-
680 train 4.
|
| 85 |
-
690 train 4.
|
| 86 |
-
700 val loss
|
| 87 |
-
700 val perplexity
|
| 88 |
-
700 train 4.
|
| 89 |
-
710 train
|
| 90 |
-
720 train
|
| 91 |
-
730 train
|
| 92 |
-
740 train
|
| 93 |
-
750 train 4.
|
| 94 |
-
760 train 4.
|
| 95 |
-
770 train 4.
|
| 96 |
-
780 train
|
| 97 |
-
790 train
|
| 98 |
-
800 val loss 4.
|
| 99 |
-
800 val perplexity
|
| 100 |
-
800 train 4.
|
| 101 |
-
810 train 4.
|
| 102 |
-
820 train 4.
|
| 103 |
-
830 train 4.
|
| 104 |
-
840 train 4.
|
| 105 |
-
850 train 4.
|
| 106 |
-
860 train 4.
|
| 107 |
-
870 train
|
| 108 |
-
880 train 4.
|
| 109 |
-
890 train 4.
|
| 110 |
-
900 val loss 4.
|
| 111 |
-
900 val perplexity
|
| 112 |
-
900 train 4.
|
| 113 |
-
910 train 4.
|
| 114 |
-
920 train 4.
|
| 115 |
-
930 train 4.
|
| 116 |
-
940 train 4.
|
| 117 |
-
950 train 4.
|
| 118 |
-
960 train 4.
|
| 119 |
-
970 train 4.
|
| 120 |
-
980 train 4.
|
| 121 |
-
990 train 4.
|
| 122 |
-
1000 val loss 4.
|
| 123 |
-
1000 val perplexity
|
| 124 |
-
1000 train 4.
|
| 125 |
-
1010 train 4.
|
| 126 |
-
1020 train 4.
|
| 127 |
-
1030 train 4.
|
| 128 |
-
1040 train 4.
|
| 129 |
-
1050 train 4.
|
| 130 |
-
1060 train 4.
|
| 131 |
-
1070 train 4.
|
| 132 |
-
1080 train 4.
|
| 133 |
-
1090 train 4.
|
| 134 |
-
1100 val loss 4.
|
| 135 |
-
1100 val perplexity
|
| 136 |
-
1100 train 4.
|
| 137 |
-
1110 train 4.
|
| 138 |
-
1120 train 4.
|
| 139 |
-
1130 train 4.
|
| 140 |
-
1140 train 4.
|
| 141 |
-
1150 train 4.
|
| 142 |
-
1160 train 4.
|
| 143 |
-
1170 train 4.
|
| 144 |
-
1180 train 4.
|
| 145 |
-
1190 train 4.
|
| 146 |
-
1200 val loss 4.
|
| 147 |
-
1200 val perplexity
|
| 148 |
-
1200 train 4.
|
| 149 |
-
1210 train 4.
|
| 150 |
-
1220 train 4.
|
| 151 |
-
1230 train 4.
|
| 152 |
-
1240 train 4.
|
| 153 |
-
1250 train 4.
|
| 154 |
-
1260 train 4.
|
| 155 |
-
1270 train 4.
|
| 156 |
-
1280 train 4.
|
| 157 |
-
1290 train 4.
|
| 158 |
-
1300 val loss 4.
|
| 159 |
-
1300 val perplexity
|
| 160 |
-
1300 train 4.
|
| 161 |
-
1310 train 4.
|
| 162 |
-
1320 train 4.
|
| 163 |
-
1330 train 4.
|
| 164 |
-
1340 train 4.
|
| 165 |
-
1350 train 4.
|
| 166 |
-
1360 train 4.
|
| 167 |
-
1370 train 4.
|
| 168 |
-
1380 train 4.
|
| 169 |
-
1390 train 4.
|
| 170 |
-
1400 val loss 4.
|
| 171 |
-
1400 val perplexity
|
| 172 |
-
1400 train 4.
|
| 173 |
-
1410 train 4.
|
| 174 |
-
1420 train 4.
|
| 175 |
-
1430 train 4.
|
| 176 |
-
1440 train 4.
|
| 177 |
-
1450 train 4.
|
| 178 |
-
1460 train 4.
|
| 179 |
-
1470 train 4.
|
| 180 |
-
1480 train 4.
|
| 181 |
-
1490 train 4.
|
| 182 |
-
1500 val loss 4.
|
| 183 |
-
1500 val perplexity
|
| 184 |
-
1500 train 4.
|
| 185 |
-
1510 train 4.
|
| 186 |
-
1520 train 4.
|
| 187 |
-
1530 train 4.
|
| 188 |
-
1540 train 4.
|
| 189 |
-
1550 train 4.
|
| 190 |
-
1560 train 4.
|
| 191 |
-
1570 train 4.
|
| 192 |
-
1580 train 4.
|
| 193 |
-
1590 train 4.
|
| 194 |
-
1600 val loss 4.
|
| 195 |
-
1600 val perplexity
|
| 196 |
-
1600 train 4.
|
| 197 |
-
1610 train 4.
|
| 198 |
-
1620 train 4.
|
| 199 |
-
1630 train 4.
|
| 200 |
-
1640 train 4.
|
| 201 |
-
1650 train 4.
|
| 202 |
-
1660 train 4.
|
| 203 |
-
1670 train 4.
|
| 204 |
-
1680 train 4.
|
| 205 |
-
1690 train 4.
|
| 206 |
-
1700 val loss 4.
|
| 207 |
-
1700 val perplexity
|
| 208 |
-
1700 train 4.
|
| 209 |
-
1710 train 4.
|
| 210 |
-
1720 train 4.
|
| 211 |
-
1730 train 4.
|
| 212 |
-
1740 train 4.
|
| 213 |
-
1750 train 4.
|
| 214 |
-
1760 train 4.
|
| 215 |
-
1770 train 4.
|
| 216 |
-
1780 train 4.
|
| 217 |
-
1790 train 4.
|
| 218 |
-
1800 val loss 4.
|
| 219 |
-
1800 val perplexity
|
| 220 |
-
1800 train 4.
|
| 221 |
-
1810 train 4.
|
| 222 |
-
1820 train 4.
|
| 223 |
-
1830 train 4.
|
| 224 |
-
1840 train 4.
|
| 225 |
-
1850 train 4.
|
| 226 |
-
1860 train 4.
|
| 227 |
-
1870 train 4.
|
| 228 |
-
1880 train 4.
|
| 229 |
-
1890 train 4.
|
| 230 |
-
1900 val loss 4.
|
| 231 |
-
1900 val perplexity
|
| 232 |
-
1900 train 4.
|
| 233 |
-
1910 train 4.
|
| 234 |
-
1920 train 4.
|
| 235 |
-
1930 train 4.
|
| 236 |
-
1940 train 4.
|
| 237 |
-
1950 train 4.
|
| 238 |
-
1960 train 4.
|
| 239 |
-
1970 train 4.
|
| 240 |
-
1980 train 4.
|
| 241 |
-
1990 train 4.
|
| 242 |
-
2000 val loss 4.
|
| 243 |
-
2000 val perplexity
|
| 244 |
-
2000 train 4.
|
| 245 |
-
2010 train 4.
|
| 246 |
-
2020 train 4.
|
| 247 |
-
2030 train 4.
|
| 248 |
-
2040 train 4.
|
| 249 |
-
2050 train 4.
|
| 250 |
-
2060 train 4.
|
| 251 |
-
2070 train 4.
|
| 252 |
-
2080 train 4.
|
| 253 |
-
2090 train 4.
|
| 254 |
-
2100 val loss 4.
|
| 255 |
-
2100 val perplexity
|
| 256 |
-
2100 train 4.
|
| 257 |
-
2110 train 4.
|
| 258 |
-
2120 train 4.
|
| 259 |
-
2130 train 4.
|
| 260 |
-
2140 train 4.
|
| 261 |
-
2150 train 4.
|
| 262 |
-
2160 train 4.
|
| 263 |
-
2170 train 4.
|
| 264 |
-
2180 train 4.
|
| 265 |
-
2190 train 4.
|
| 266 |
-
2200 val loss 4.
|
| 267 |
-
2200 val perplexity
|
| 268 |
-
2200 train 4.
|
| 269 |
-
2210 train 4.
|
| 270 |
-
2220 train 4.
|
| 271 |
-
2230 train 4.
|
| 272 |
-
2240 train 4.
|
| 273 |
-
2250 train 4.
|
| 274 |
-
2260 train 4.
|
| 275 |
-
2270 train 4.
|
| 276 |
-
2280 train 4.
|
| 277 |
-
2290 train 4.
|
| 278 |
-
2300 val loss 4.
|
| 279 |
-
2300 val perplexity
|
| 280 |
-
2300 train 4.
|
| 281 |
-
2310 train 4.
|
| 282 |
-
2320 train 4.
|
| 283 |
-
2330 train 4.
|
| 284 |
-
2340 train 4.
|
| 285 |
-
2350 train 4.
|
| 286 |
-
2360 train 4.
|
| 287 |
-
2370 train 4.
|
| 288 |
-
2380 train 4.
|
| 289 |
-
2390 train 4.
|
| 290 |
-
2400 val loss 4.
|
| 291 |
-
2400 val perplexity
|
| 292 |
-
2400 train 4.
|
| 293 |
-
2410 train
|
| 294 |
-
2420 train 4.
|
| 295 |
-
2430 train 4.
|
| 296 |
-
2440 train 4.
|
| 297 |
-
2450 train 4.
|
| 298 |
-
2460 train
|
| 299 |
-
2470 train
|
| 300 |
-
2480 train 4.
|
| 301 |
-
2490 train 4.
|
| 302 |
-
2500 val loss 4.
|
| 303 |
-
2500 val perplexity
|
| 304 |
-
2500 train 4.
|
| 305 |
-
2510 train 4.
|
| 306 |
-
2520 train 4.
|
| 307 |
-
2530 train 4.
|
| 308 |
-
2540 train 4.
|
| 309 |
-
2550 train 4.
|
| 310 |
-
2560 train 4.
|
| 311 |
-
2570 train 4.
|
| 312 |
-
2580 train 4.
|
| 313 |
-
2590 train 4.
|
| 314 |
-
2600 val loss 4.
|
| 315 |
-
2600 val perplexity
|
| 316 |
-
2600 train 4.
|
| 317 |
-
2610 train
|
| 318 |
-
2620 train 3.
|
| 319 |
-
2630 train 3.
|
| 320 |
-
2640 train 3.
|
| 321 |
-
2650 train
|
| 322 |
-
2660 train 4.
|
| 323 |
-
2670 train 4.
|
| 324 |
-
2680 train 4.
|
| 325 |
-
2690 train 4.
|
| 326 |
-
2700 val loss 4.
|
| 327 |
-
2700 val perplexity
|
| 328 |
-
2700 train 4.
|
| 329 |
-
2710 train 4.
|
| 330 |
-
2720 train 4.
|
| 331 |
-
2730 train 4.
|
| 332 |
-
2740 train 4.
|
| 333 |
-
2750 train 4.
|
| 334 |
-
2760 train 4.
|
| 335 |
-
2770 train 4.
|
| 336 |
-
2780 train 4.
|
| 337 |
-
2790 train 4.
|
| 338 |
-
2800 val loss 4.
|
| 339 |
-
2800 val perplexity
|
| 340 |
-
2800 train
|
| 341 |
-
2810 train 3.
|
| 342 |
-
2820 train 3.
|
| 343 |
-
2830 train
|
| 344 |
-
2840 train 3.
|
| 345 |
-
2850 train 4.
|
| 346 |
-
2860 train 4.
|
| 347 |
-
2870 train 4.
|
| 348 |
-
2880 train 4.
|
| 349 |
-
2890 train 4.
|
| 350 |
-
2900 val loss 4.
|
| 351 |
-
2900 val perplexity
|
| 352 |
-
2900 train 4.
|
| 353 |
-
2910 train 4.
|
| 354 |
-
2920 train 4.
|
| 355 |
-
2930 train 4.
|
| 356 |
-
2940 train 4.
|
| 357 |
-
2950 train 4.
|
| 358 |
-
2960 train 4.
|
| 359 |
-
2970 train
|
| 360 |
-
2980 train
|
| 361 |
-
2990 train 3.
|
| 362 |
-
3000 val loss 4.
|
| 363 |
-
3000 val perplexity
|
| 364 |
-
3000 train 3.
|
| 365 |
-
3010 train 3.
|
| 366 |
-
3020 train 3.
|
| 367 |
-
3030 train 4.
|
| 368 |
-
3040 train 4.
|
| 369 |
-
3050 train 4.
|
| 370 |
-
3060 train 4.
|
| 371 |
-
3070 train 4.
|
| 372 |
-
3080 train 4.
|
| 373 |
-
3090 train 4.
|
| 374 |
-
3100 val loss 4.
|
| 375 |
-
3100 val perplexity 59.
|
| 376 |
-
3100 train 4.
|
| 377 |
-
3110 train 4.
|
| 378 |
-
3120 train
|
| 379 |
-
3130 train 3.
|
| 380 |
-
3140 train 3.
|
| 381 |
-
3150 train 4.
|
| 382 |
-
3160 train 4.
|
| 383 |
-
3170 train 4.
|
| 384 |
-
3180 train 4.
|
| 385 |
-
3190 train 4.
|
| 386 |
-
3200 val loss 4.
|
| 387 |
-
3200 val perplexity 58.
|
| 388 |
-
3200 train 4.
|
| 389 |
-
3210 train 4.
|
| 390 |
-
3220 train 4.
|
| 391 |
-
3230 train 4.
|
| 392 |
-
3240 train 4.
|
| 393 |
-
3250 train 4.
|
| 394 |
-
3260 train 4.
|
| 395 |
-
3270 train 4.
|
| 396 |
-
3280 train 4.
|
| 397 |
-
3290 train 4.
|
| 398 |
-
3300 val loss 4.
|
| 399 |
-
3300 val perplexity
|
| 400 |
-
3300 train
|
| 401 |
-
3310 train 3.
|
| 402 |
-
3320 train
|
| 403 |
-
3330 train 3.
|
| 404 |
-
3340 train 4.
|
| 405 |
-
3350 train 4.
|
| 406 |
-
3360 train 4.
|
| 407 |
-
3370 train 4.
|
| 408 |
-
3380 train 4.
|
| 409 |
-
3390 train 4.
|
| 410 |
-
3400 val loss 4.
|
| 411 |
-
3400 val perplexity
|
| 412 |
-
3400 train 4.
|
| 413 |
-
3410 train 4.
|
| 414 |
-
3420 train 4.
|
| 415 |
-
3430 train 4.
|
| 416 |
-
3440 train 4.
|
| 417 |
-
3450 train 4.
|
| 418 |
-
3460 train
|
| 419 |
-
3470 train 4.
|
| 420 |
-
3480 train
|
| 421 |
-
3490 train 3.
|
| 422 |
-
3500 val loss 4.
|
| 423 |
-
3500 val perplexity 56.
|
| 424 |
-
3500 train 3.
|
| 425 |
-
3510 train
|
| 426 |
-
3520 train
|
| 427 |
-
3530 train 4.
|
| 428 |
-
3540 train 4.
|
| 429 |
-
3550 train 4.
|
| 430 |
-
3560 train 4.
|
| 431 |
-
3570 train 4.
|
| 432 |
-
3580 train
|
| 433 |
-
3590 train
|
| 434 |
-
3600 val loss 4.
|
| 435 |
-
3600 val perplexity 55.
|
| 436 |
-
3600 train 4.
|
| 437 |
-
3610 train 4.
|
| 438 |
-
3620 train 4.
|
| 439 |
-
3630 train
|
| 440 |
-
3640 train 4.
|
| 441 |
-
3650 train 4.
|
| 442 |
-
3660 train 3.
|
| 443 |
-
3670 train 3.
|
| 444 |
-
3680 train 3.
|
| 445 |
-
3690 train 3.
|
| 446 |
-
3700 val loss 4.
|
| 447 |
-
3700 val perplexity 55.
|
| 448 |
-
3700 train
|
| 449 |
-
3710 train 4.
|
| 450 |
-
3720 train 4.
|
| 451 |
-
3730 train 4.
|
| 452 |
-
3740 train 4.
|
| 453 |
-
3750 train 4.
|
| 454 |
-
3760 train 4.
|
| 455 |
-
3770 train 4.
|
| 456 |
-
3780 train 4.
|
| 457 |
-
3790 train 4.
|
| 458 |
-
3800 val loss 4.
|
| 459 |
-
3800 val perplexity 54.
|
| 460 |
-
3800 train
|
| 461 |
-
3810 train
|
| 462 |
-
3820 train 4.
|
| 463 |
-
3830 train 4.
|
| 464 |
-
3840 train
|
| 465 |
-
3850 train 4.
|
| 466 |
-
3860 train
|
| 467 |
-
3870 train
|
| 468 |
-
3880 train
|
| 469 |
-
3890 train 4.
|
| 470 |
-
3900 val loss 3.
|
| 471 |
-
3900 val perplexity 54.
|
| 472 |
-
3900 train
|
| 473 |
-
3910 train 4.
|
| 474 |
-
3920 train 4.
|
| 475 |
-
3930 train 4.
|
| 476 |
-
3940 train
|
| 477 |
-
3950 train 4.
|
| 478 |
-
3960 train
|
| 479 |
-
3970 train 4.
|
| 480 |
-
3980 train 4.
|
| 481 |
-
3990 train 3.
|
| 482 |
-
4000 val loss 3.
|
| 483 |
-
4000 val perplexity 54.
|
| 484 |
-
4000 train 3.
|
| 485 |
-
4010 train
|
| 486 |
-
4020 train 3.
|
| 487 |
-
4030 train
|
| 488 |
-
4040 train 4.
|
| 489 |
-
4050 train
|
| 490 |
-
4060 train 4.
|
| 491 |
-
4070 train
|
| 492 |
-
4080 train 4.
|
| 493 |
-
4090 train
|
| 494 |
-
4100 val loss 3.
|
| 495 |
-
4100 val perplexity 53.
|
| 496 |
-
4100 train 4.
|
| 497 |
-
4110 train 4.
|
| 498 |
-
4120 train
|
| 499 |
-
4130 train 3.
|
| 500 |
-
4140 train 3.
|
| 501 |
-
4150 train 3.
|
| 502 |
-
4160 train 3.
|
| 503 |
-
4170 train
|
| 504 |
-
4180 train
|
| 505 |
-
4190 train 3.
|
| 506 |
-
4200 val loss 3.
|
| 507 |
-
4200 val perplexity
|
| 508 |
-
4200 train 3.
|
| 509 |
-
4210 train
|
| 510 |
-
4220 train 4.
|
| 511 |
-
4230 train 4.
|
| 512 |
-
4240 train 4.
|
| 513 |
-
4250 train 4.
|
| 514 |
-
4260 train
|
| 515 |
-
4270 train
|
| 516 |
-
4280 train 3.
|
| 517 |
-
4290 train 3.
|
| 518 |
-
4300 val loss 3.
|
| 519 |
-
4300 val perplexity 53.
|
| 520 |
-
4300 train
|
| 521 |
-
4310 train 3.
|
| 522 |
-
4320 train
|
| 523 |
-
4330 train 4.
|
| 524 |
-
4340 train
|
| 525 |
-
4350 train
|
| 526 |
-
4360 train 3.
|
| 527 |
-
4370 train 4.
|
| 528 |
-
4374 val loss 3.
|
| 529 |
-
4374 val perplexity
|
|
|
|
| 1 |
max_steps: 4375
|
| 2 |
+
0 val loss 11.3296
|
| 3 |
+
0 val perplexity 83249.8203
|
| 4 |
+
0 train 11.334114 (lr=1.2000e-05) (hash(x)=165975646)
|
| 5 |
+
10 train 9.589714 (lr=1.3200e-04) (hash(x)=148013305)
|
| 6 |
+
20 train 8.207848 (lr=2.5200e-04) (hash(x)=148456869)
|
| 7 |
+
30 train 7.620950 (lr=3.7200e-04) (hash(x)=161872433)
|
| 8 |
+
40 train 7.358727 (lr=4.9200e-04) (hash(x)=149957686)
|
| 9 |
+
50 train 7.172750 (lr=6.1200e-04) (hash(x)=172543864)
|
| 10 |
+
60 train 7.168416 (lr=7.3200e-04) (hash(x)=179765771)
|
| 11 |
+
70 train 6.798749 (lr=8.5200e-04) (hash(x)=157826813)
|
| 12 |
+
80 train 6.798883 (lr=9.7200e-04) (hash(x)=164392066)
|
| 13 |
+
90 train 6.661848 (lr=1.0920e-03) (hash(x)=155935551)
|
| 14 |
+
100 val loss 6.6047
|
| 15 |
+
100 val perplexity 738.5858
|
| 16 |
+
100 train 6.491619 (lr=1.2120e-03) (hash(x)=163224052)
|
| 17 |
+
110 train 6.518770 (lr=1.3320e-03) (hash(x)=152737029)
|
| 18 |
+
120 train 6.384511 (lr=1.4520e-03) (hash(x)=160672632)
|
| 19 |
+
130 train 6.262604 (lr=1.5720e-03) (hash(x)=163241107)
|
| 20 |
+
140 train 6.281211 (lr=1.6920e-03) (hash(x)=158622777)
|
| 21 |
+
150 train 6.363368 (lr=1.8120e-03) (hash(x)=155912425)
|
| 22 |
+
160 train 6.344102 (lr=1.9320e-03) (hash(x)=171017827)
|
| 23 |
+
170 train 6.243967 (lr=2.0520e-03) (hash(x)=166742757)
|
| 24 |
+
180 train 6.180585 (lr=2.1720e-03) (hash(x)=159924472)
|
| 25 |
+
190 train 6.157166 (lr=2.2920e-03) (hash(x)=153494648)
|
| 26 |
+
200 val loss 6.0887
|
| 27 |
+
200 val perplexity 440.8489
|
| 28 |
+
200 train 6.142984 (lr=2.4120e-03) (hash(x)=161358058)
|
| 29 |
+
210 train 6.066473 (lr=2.5320e-03) (hash(x)=145035701)
|
| 30 |
+
220 train 6.001383 (lr=2.6520e-03) (hash(x)=158648875)
|
| 31 |
+
230 train 6.121311 (lr=2.7720e-03) (hash(x)=164635025)
|
| 32 |
+
240 train 5.850390 (lr=2.8920e-03) (hash(x)=149742497)
|
| 33 |
+
250 train 5.899929 (lr=3.0000e-03) (hash(x)=167005069)
|
| 34 |
+
260 train 5.771887 (lr=3.0000e-03) (hash(x)=163726440)
|
| 35 |
+
270 train 5.749442 (lr=2.9998e-03) (hash(x)=151610423)
|
| 36 |
+
280 train 5.691358 (lr=2.9996e-03) (hash(x)=161421866)
|
| 37 |
+
290 train 5.607024 (lr=2.9994e-03) (hash(x)=138340911)
|
| 38 |
+
300 val loss 5.6924
|
| 39 |
+
300 val perplexity 296.6138
|
| 40 |
+
300 train 5.454905 (lr=2.9990e-03) (hash(x)=146894081)
|
| 41 |
+
310 train 5.407044 (lr=2.9986e-03) (hash(x)=158876217)
|
| 42 |
+
320 train 5.483162 (lr=2.9981e-03) (hash(x)=169472178)
|
| 43 |
+
330 train 5.598046 (lr=2.9975e-03) (hash(x)=157958346)
|
| 44 |
+
340 train 5.616897 (lr=2.9968e-03) (hash(x)=175684344)
|
| 45 |
+
350 train 5.601511 (lr=2.9961e-03) (hash(x)=159051114)
|
| 46 |
+
360 train 5.533087 (lr=2.9953e-03) (hash(x)=152215772)
|
| 47 |
+
370 train 5.540503 (lr=2.9944e-03) (hash(x)=161579675)
|
| 48 |
+
380 train 5.355555 (lr=2.9934e-03) (hash(x)=157026983)
|
| 49 |
+
390 train 5.330606 (lr=2.9923e-03) (hash(x)=156370364)
|
| 50 |
+
400 val loss 5.3227
|
| 51 |
+
400 val perplexity 204.9415
|
| 52 |
+
400 train 5.322221 (lr=2.9912e-03) (hash(x)=154702603)
|
| 53 |
+
410 train 5.279710 (lr=2.9900e-03) (hash(x)=165261055)
|
| 54 |
+
420 train 5.277102 (lr=2.9887e-03) (hash(x)=147555294)
|
| 55 |
+
430 train 5.314711 (lr=2.9873e-03) (hash(x)=166093328)
|
| 56 |
+
440 train 5.215892 (lr=2.9859e-03) (hash(x)=165254485)
|
| 57 |
+
450 train 5.216350 (lr=2.9844e-03) (hash(x)=140386917)
|
| 58 |
+
460 train 5.118093 (lr=2.9828e-03) (hash(x)=166690055)
|
| 59 |
+
470 train 4.917212 (lr=2.9811e-03) (hash(x)=151425273)
|
| 60 |
+
480 train 5.047673 (lr=2.9793e-03) (hash(x)=180686459)
|
| 61 |
+
490 train 4.870615 (lr=2.9775e-03) (hash(x)=160473697)
|
| 62 |
+
500 val loss 5.1584
|
| 63 |
+
500 val perplexity 173.8936
|
| 64 |
+
500 train 4.964884 (lr=2.9756e-03) (hash(x)=154665548)
|
| 65 |
+
510 train 5.078107 (lr=2.9736e-03) (hash(x)=165066653)
|
| 66 |
+
520 train 5.130062 (lr=2.9716e-03) (hash(x)=155375501)
|
| 67 |
+
530 train 5.246745 (lr=2.9694e-03) (hash(x)=157291678)
|
| 68 |
+
540 train 5.157645 (lr=2.9672e-03) (hash(x)=158751844)
|
| 69 |
+
550 train 5.114317 (lr=2.9649e-03) (hash(x)=169683891)
|
| 70 |
+
560 train 5.143666 (lr=2.9625e-03) (hash(x)=144456796)
|
| 71 |
+
570 train 5.116349 (lr=2.9601e-03) (hash(x)=158365992)
|
| 72 |
+
580 train 5.042166 (lr=2.9576e-03) (hash(x)=158188684)
|
| 73 |
+
590 train 5.050703 (lr=2.9550e-03) (hash(x)=164060384)
|
| 74 |
+
600 val loss 5.0012
|
| 75 |
+
600 val perplexity 148.5954
|
| 76 |
+
600 train 5.056788 (lr=2.9523e-03) (hash(x)=154544350)
|
| 77 |
+
610 train 4.966457 (lr=2.9496e-03) (hash(x)=160151177)
|
| 78 |
+
620 train 4.942435 (lr=2.9468e-03) (hash(x)=151113169)
|
| 79 |
+
630 train 4.853900 (lr=2.9439e-03) (hash(x)=162956682)
|
| 80 |
+
640 train 4.847920 (lr=2.9409e-03) (hash(x)=160228084)
|
| 81 |
+
650 train 4.806967 (lr=2.9378e-03) (hash(x)=145530386)
|
| 82 |
+
660 train 4.727909 (lr=2.9347e-03) (hash(x)=163254968)
|
| 83 |
+
670 train 4.720093 (lr=2.9315e-03) (hash(x)=176416192)
|
| 84 |
+
680 train 4.621030 (lr=2.9283e-03) (hash(x)=150118428)
|
| 85 |
+
690 train 4.656602 (lr=2.9249e-03) (hash(x)=150414917)
|
| 86 |
+
700 val loss 4.8629
|
| 87 |
+
700 val perplexity 129.4005
|
| 88 |
+
700 train 4.630126 (lr=2.9215e-03) (hash(x)=162596235)
|
| 89 |
+
710 train 4.910139 (lr=2.9180e-03) (hash(x)=159363258)
|
| 90 |
+
720 train 4.880068 (lr=2.9144e-03) (hash(x)=166036967)
|
| 91 |
+
730 train 4.894136 (lr=2.9108e-03) (hash(x)=159938291)
|
| 92 |
+
740 train 4.853999 (lr=2.9071e-03) (hash(x)=176297617)
|
| 93 |
+
750 train 4.793022 (lr=2.9033e-03) (hash(x)=149845530)
|
| 94 |
+
760 train 4.836494 (lr=2.8994e-03) (hash(x)=152777309)
|
| 95 |
+
770 train 4.780898 (lr=2.8955e-03) (hash(x)=168903075)
|
| 96 |
+
780 train 4.805902 (lr=2.8915e-03) (hash(x)=166843738)
|
| 97 |
+
790 train 4.776241 (lr=2.8874e-03) (hash(x)=166358104)
|
| 98 |
+
800 val loss 4.7443
|
| 99 |
+
800 val perplexity 114.9245
|
| 100 |
+
800 train 4.724244 (lr=2.8833e-03) (hash(x)=153477747)
|
| 101 |
+
810 train 4.633126 (lr=2.8791e-03) (hash(x)=157954385)
|
| 102 |
+
820 train 4.654481 (lr=2.8748e-03) (hash(x)=174383505)
|
| 103 |
+
830 train 4.731977 (lr=2.8704e-03) (hash(x)=145226181)
|
| 104 |
+
840 train 4.676006 (lr=2.8660e-03) (hash(x)=157665743)
|
| 105 |
+
850 train 4.641937 (lr=2.8615e-03) (hash(x)=158565836)
|
| 106 |
+
860 train 4.749852 (lr=2.8569e-03) (hash(x)=171168105)
|
| 107 |
+
870 train 4.747385 (lr=2.8523e-03) (hash(x)=159017777)
|
| 108 |
+
880 train 4.716692 (lr=2.8476e-03) (hash(x)=168156768)
|
| 109 |
+
890 train 4.707652 (lr=2.8428e-03) (hash(x)=145645245)
|
| 110 |
+
900 val loss 4.6663
|
| 111 |
+
900 val perplexity 106.3069
|
| 112 |
+
900 train 4.655340 (lr=2.8379e-03) (hash(x)=151245521)
|
| 113 |
+
910 train 4.657638 (lr=2.8330e-03) (hash(x)=182614672)
|
| 114 |
+
920 train 4.693944 (lr=2.8280e-03) (hash(x)=167377432)
|
| 115 |
+
930 train 4.588878 (lr=2.8230e-03) (hash(x)=171347228)
|
| 116 |
+
940 train 4.601975 (lr=2.8178e-03) (hash(x)=171588755)
|
| 117 |
+
950 train 4.598028 (lr=2.8127e-03) (hash(x)=159560168)
|
| 118 |
+
960 train 4.640528 (lr=2.8074e-03) (hash(x)=171801357)
|
| 119 |
+
970 train 4.598306 (lr=2.8021e-03) (hash(x)=172513884)
|
| 120 |
+
980 train 4.537321 (lr=2.7967e-03) (hash(x)=170659685)
|
| 121 |
+
990 train 4.641826 (lr=2.7912e-03) (hash(x)=165436358)
|
| 122 |
+
1000 val loss 4.5850
|
| 123 |
+
1000 val perplexity 98.0058
|
| 124 |
+
1000 train 4.612329 (lr=2.7857e-03) (hash(x)=167684690)
|
| 125 |
+
1010 train 4.787028 (lr=2.7801e-03) (hash(x)=127225492)
|
| 126 |
+
1020 train 4.682377 (lr=2.7744e-03) (hash(x)=167575323)
|
| 127 |
+
1030 train 4.647320 (lr=2.7687e-03) (hash(x)=161266302)
|
| 128 |
+
1040 train 4.552930 (lr=2.7629e-03) (hash(x)=155607661)
|
| 129 |
+
1050 train 4.624825 (lr=2.7571e-03) (hash(x)=165685359)
|
| 130 |
+
1060 train 4.621967 (lr=2.7512e-03) (hash(x)=160417824)
|
| 131 |
+
1070 train 4.628582 (lr=2.7452e-03) (hash(x)=173313882)
|
| 132 |
+
1080 train 4.514922 (lr=2.7391e-03) (hash(x)=165485047)
|
| 133 |
+
1090 train 4.548283 (lr=2.7330e-03) (hash(x)=159848305)
|
| 134 |
+
1100 val loss 4.5289
|
| 135 |
+
1100 val perplexity 92.6528
|
| 136 |
+
1100 train 4.579960 (lr=2.7269e-03) (hash(x)=170890601)
|
| 137 |
+
1110 train 4.505666 (lr=2.7206e-03) (hash(x)=172689983)
|
| 138 |
+
1120 train 4.536810 (lr=2.7143e-03) (hash(x)=154762859)
|
| 139 |
+
1130 train 4.566061 (lr=2.7080e-03) (hash(x)=168196871)
|
| 140 |
+
1140 train 4.418510 (lr=2.7016e-03) (hash(x)=156715577)
|
| 141 |
+
1150 train 4.649400 (lr=2.6951e-03) (hash(x)=166381788)
|
| 142 |
+
1160 train 4.584499 (lr=2.6886e-03) (hash(x)=162095130)
|
| 143 |
+
1170 train 4.562027 (lr=2.6820e-03) (hash(x)=166094750)
|
| 144 |
+
1180 train 4.502573 (lr=2.6753e-03) (hash(x)=154965753)
|
| 145 |
+
1190 train 4.534716 (lr=2.6686e-03) (hash(x)=172766447)
|
| 146 |
+
1200 val loss 4.4599
|
| 147 |
+
1200 val perplexity 86.4808
|
| 148 |
+
1200 train 4.396911 (lr=2.6618e-03) (hash(x)=161185334)
|
| 149 |
+
1210 train 4.476095 (lr=2.6550e-03) (hash(x)=167224144)
|
| 150 |
+
1220 train 4.458037 (lr=2.6481e-03) (hash(x)=167252866)
|
| 151 |
+
1230 train 4.384659 (lr=2.6411e-03) (hash(x)=155856924)
|
| 152 |
+
1240 train 4.454168 (lr=2.6341e-03) (hash(x)=155956446)
|
| 153 |
+
1250 train 4.298338 (lr=2.6270e-03) (hash(x)=140794796)
|
| 154 |
+
1260 train 4.454971 (lr=2.6199e-03) (hash(x)=166863869)
|
| 155 |
+
1270 train 4.378241 (lr=2.6127e-03) (hash(x)=166021892)
|
| 156 |
+
1280 train 4.449367 (lr=2.6055e-03) (hash(x)=170503648)
|
| 157 |
+
1290 train 4.465008 (lr=2.5982e-03) (hash(x)=164012719)
|
| 158 |
+
1300 val loss 4.4191
|
| 159 |
+
1300 val perplexity 83.0204
|
| 160 |
+
1300 train 4.431269 (lr=2.5909e-03) (hash(x)=156519834)
|
| 161 |
+
1310 train 4.619505 (lr=2.5835e-03) (hash(x)=165146652)
|
| 162 |
+
1320 train 4.369074 (lr=2.5760e-03) (hash(x)=174391990)
|
| 163 |
+
1330 train 4.409378 (lr=2.5685e-03) (hash(x)=161131201)
|
| 164 |
+
1340 train 4.515519 (lr=2.5609e-03) (hash(x)=162240912)
|
| 165 |
+
1350 train 4.336414 (lr=2.5533e-03) (hash(x)=152462931)
|
| 166 |
+
1360 train 4.450696 (lr=2.5457e-03) (hash(x)=150443278)
|
| 167 |
+
1370 train 4.367658 (lr=2.5379e-03) (hash(x)=153721711)
|
| 168 |
+
1380 train 4.363069 (lr=2.5302e-03) (hash(x)=158070628)
|
| 169 |
+
1390 train 4.412385 (lr=2.5223e-03) (hash(x)=163318827)
|
| 170 |
+
1400 val loss 4.4054
|
| 171 |
+
1400 val perplexity 81.8917
|
| 172 |
+
1400 train 4.370369 (lr=2.5145e-03) (hash(x)=154210336)
|
| 173 |
+
1410 train 4.525505 (lr=2.5066e-03) (hash(x)=162393933)
|
| 174 |
+
1420 train 4.593505 (lr=2.4986e-03) (hash(x)=156659308)
|
| 175 |
+
1430 train 4.382072 (lr=2.4906e-03) (hash(x)=171243439)
|
| 176 |
+
1440 train 4.513375 (lr=2.4825e-03) (hash(x)=178459521)
|
| 177 |
+
1450 train 4.436228 (lr=2.4744e-03) (hash(x)=159166350)
|
| 178 |
+
1460 train 4.349923 (lr=2.4662e-03) (hash(x)=173554569)
|
| 179 |
+
1470 train 4.390570 (lr=2.4580e-03) (hash(x)=158362521)
|
| 180 |
+
1480 train 4.436795 (lr=2.4497e-03) (hash(x)=165899207)
|
| 181 |
+
1490 train 4.346364 (lr=2.4414e-03) (hash(x)=156470939)
|
| 182 |
+
1500 val loss 4.3692
|
| 183 |
+
1500 val perplexity 78.9842
|
| 184 |
+
1500 train 4.405301 (lr=2.4331e-03) (hash(x)=164197780)
|
| 185 |
+
1510 train 4.349495 (lr=2.4247e-03) (hash(x)=146709896)
|
| 186 |
+
1520 train 4.380601 (lr=2.4162e-03) (hash(x)=151304838)
|
| 187 |
+
1530 train 4.273098 (lr=2.4077e-03) (hash(x)=158179918)
|
| 188 |
+
1540 train 4.303412 (lr=2.3992e-03) (hash(x)=161256064)
|
| 189 |
+
1550 train 4.345532 (lr=2.3906e-03) (hash(x)=151591940)
|
| 190 |
+
1560 train 4.280371 (lr=2.3820e-03) (hash(x)=147120266)
|
| 191 |
+
1570 train 4.431362 (lr=2.3734e-03) (hash(x)=181404631)
|
| 192 |
+
1580 train 4.337122 (lr=2.3647e-03) (hash(x)=157486637)
|
| 193 |
+
1590 train 4.474000 (lr=2.3559e-03) (hash(x)=156440796)
|
| 194 |
+
1600 val loss 4.3434
|
| 195 |
+
1600 val perplexity 76.9667
|
| 196 |
+
1600 train 4.376644 (lr=2.3471e-03) (hash(x)=172479098)
|
| 197 |
+
1610 train 4.516253 (lr=2.3383e-03) (hash(x)=166059760)
|
| 198 |
+
1620 train 4.388950 (lr=2.3294e-03) (hash(x)=165353726)
|
| 199 |
+
1630 train 4.361284 (lr=2.3205e-03) (hash(x)=159323060)
|
| 200 |
+
1640 train 4.458383 (lr=2.3116e-03) (hash(x)=168531548)
|
| 201 |
+
1650 train 4.308321 (lr=2.3026e-03) (hash(x)=151279450)
|
| 202 |
+
1660 train 4.362363 (lr=2.2936e-03) (hash(x)=149136290)
|
| 203 |
+
1670 train 4.295530 (lr=2.2845e-03) (hash(x)=175415017)
|
| 204 |
+
1680 train 4.186372 (lr=2.2754e-03) (hash(x)=160981851)
|
| 205 |
+
1690 train 4.243690 (lr=2.2663e-03) (hash(x)=162044381)
|
| 206 |
+
1700 val loss 4.3476
|
| 207 |
+
1700 val perplexity 77.2918
|
| 208 |
+
1700 train 4.423730 (lr=2.2572e-03) (hash(x)=154630837)
|
| 209 |
+
1710 train 4.412186 (lr=2.2480e-03) (hash(x)=176778401)
|
| 210 |
+
1720 train 4.412152 (lr=2.2387e-03) (hash(x)=154384524)
|
| 211 |
+
1730 train 4.391443 (lr=2.2294e-03) (hash(x)=160342845)
|
| 212 |
+
1740 train 4.389899 (lr=2.2201e-03) (hash(x)=167176158)
|
| 213 |
+
1750 train 4.321714 (lr=2.2108e-03) (hash(x)=165200240)
|
| 214 |
+
1760 train 4.393856 (lr=2.2014e-03) (hash(x)=152577632)
|
| 215 |
+
1770 train 4.262097 (lr=2.1920e-03) (hash(x)=140044301)
|
| 216 |
+
1780 train 4.346868 (lr=2.1826e-03) (hash(x)=175722780)
|
| 217 |
+
1790 train 4.353307 (lr=2.1731e-03) (hash(x)=156350845)
|
| 218 |
+
1800 val loss 4.3014
|
| 219 |
+
1800 val perplexity 73.7999
|
| 220 |
+
1800 train 4.398052 (lr=2.1637e-03) (hash(x)=155062509)
|
| 221 |
+
1810 train 4.267949 (lr=2.1541e-03) (hash(x)=163860874)
|
| 222 |
+
1820 train 4.141739 (lr=2.1446e-03) (hash(x)=158079613)
|
| 223 |
+
1830 train 4.261207 (lr=2.1350e-03) (hash(x)=155660923)
|
| 224 |
+
1840 train 4.233153 (lr=2.1254e-03) (hash(x)=158022360)
|
| 225 |
+
1850 train 4.222778 (lr=2.1158e-03) (hash(x)=168432793)
|
| 226 |
+
1860 train 4.384053 (lr=2.1061e-03) (hash(x)=157042286)
|
| 227 |
+
1870 train 4.374121 (lr=2.0964e-03) (hash(x)=156443358)
|
| 228 |
+
1880 train 4.264701 (lr=2.0867e-03) (hash(x)=155942931)
|
| 229 |
+
1890 train 4.370747 (lr=2.0769e-03) (hash(x)=157175884)
|
| 230 |
+
1900 val loss 4.2760
|
| 231 |
+
1900 val perplexity 71.9552
|
| 232 |
+
1900 train 4.279358 (lr=2.0672e-03) (hash(x)=162546009)
|
| 233 |
+
1910 train 4.254579 (lr=2.0574e-03) (hash(x)=151139265)
|
| 234 |
+
1920 train 4.289567 (lr=2.0476e-03) (hash(x)=154498634)
|
| 235 |
+
1930 train 4.163076 (lr=2.0377e-03) (hash(x)=157286832)
|
| 236 |
+
1940 train 4.254378 (lr=2.0279e-03) (hash(x)=138309582)
|
| 237 |
+
1950 train 4.239661 (lr=2.0180e-03) (hash(x)=158819213)
|
| 238 |
+
1960 train 4.150318 (lr=2.0081e-03) (hash(x)=151883403)
|
| 239 |
+
1970 train 4.340694 (lr=1.9982e-03) (hash(x)=194409548)
|
| 240 |
+
1980 train 4.238113 (lr=1.9882e-03) (hash(x)=160739690)
|
| 241 |
+
1990 train 4.269607 (lr=1.9783e-03) (hash(x)=166445519)
|
| 242 |
+
2000 val loss 4.2623
|
| 243 |
+
2000 val perplexity 70.9697
|
| 244 |
+
2000 train 4.287572 (lr=1.9683e-03) (hash(x)=159673845)
|
| 245 |
+
2010 train 4.302210 (lr=1.9583e-03) (hash(x)=168806069)
|
| 246 |
+
2020 train 4.319132 (lr=1.9483e-03) (hash(x)=160581536)
|
| 247 |
+
2030 train 4.272835 (lr=1.9382e-03) (hash(x)=156141720)
|
| 248 |
+
2040 train 4.389905 (lr=1.9282e-03) (hash(x)=170531572)
|
| 249 |
+
2050 train 4.230220 (lr=1.9181e-03) (hash(x)=160037105)
|
| 250 |
+
2060 train 4.460987 (lr=1.9080e-03) (hash(x)=150757653)
|
| 251 |
+
2070 train 4.282402 (lr=1.8979e-03) (hash(x)=166026751)
|
| 252 |
+
2080 train 4.297229 (lr=1.8878e-03) (hash(x)=157758002)
|
| 253 |
+
2090 train 4.146669 (lr=1.8777e-03) (hash(x)=154520168)
|
| 254 |
+
2100 val loss 4.2316
|
| 255 |
+
2100 val perplexity 68.8305
|
| 256 |
+
2100 train 4.180870 (lr=1.8675e-03) (hash(x)=164198408)
|
| 257 |
+
2110 train 4.174623 (lr=1.8574e-03) (hash(x)=152821408)
|
| 258 |
+
2120 train 4.186363 (lr=1.8472e-03) (hash(x)=158276857)
|
| 259 |
+
2130 train 4.329699 (lr=1.8370e-03) (hash(x)=163521176)
|
| 260 |
+
2140 train 4.364914 (lr=1.8268e-03) (hash(x)=168732683)
|
| 261 |
+
2150 train 4.268414 (lr=1.8166e-03) (hash(x)=149299542)
|
| 262 |
+
2160 train 4.298939 (lr=1.8064e-03) (hash(x)=157444871)
|
| 263 |
+
2170 train 4.310306 (lr=1.7962e-03) (hash(x)=143188824)
|
| 264 |
+
2180 train 4.280026 (lr=1.7860e-03) (hash(x)=158976812)
|
| 265 |
+
2190 train 4.246160 (lr=1.7758e-03) (hash(x)=165191158)
|
| 266 |
+
2200 val loss 4.2160
|
| 267 |
+
2200 val perplexity 67.7592
|
| 268 |
+
2200 train 4.219477 (lr=1.7655e-03) (hash(x)=152995704)
|
| 269 |
+
2210 train 4.275416 (lr=1.7553e-03) (hash(x)=168187659)
|
| 270 |
+
2220 train 4.175550 (lr=1.7450e-03) (hash(x)=155775581)
|
| 271 |
+
2230 train 4.085768 (lr=1.7348e-03) (hash(x)=154680350)
|
| 272 |
+
2240 train 4.176087 (lr=1.7245e-03) (hash(x)=147471278)
|
| 273 |
+
2250 train 4.119020 (lr=1.7142e-03) (hash(x)=159695346)
|
| 274 |
+
2260 train 4.145622 (lr=1.7040e-03) (hash(x)=151025882)
|
| 275 |
+
2270 train 4.115958 (lr=1.6937e-03) (hash(x)=156521158)
|
| 276 |
+
2280 train 4.436255 (lr=1.6834e-03) (hash(x)=156767674)
|
| 277 |
+
2290 train 4.259054 (lr=1.6731e-03) (hash(x)=164457648)
|
| 278 |
+
2300 val loss 4.1949
|
| 279 |
+
2300 val perplexity 66.3475
|
| 280 |
+
2300 train 4.198524 (lr=1.6629e-03) (hash(x)=169509455)
|
| 281 |
+
2310 train 4.277474 (lr=1.6526e-03) (hash(x)=152810614)
|
| 282 |
+
2320 train 4.364194 (lr=1.6423e-03) (hash(x)=158237154)
|
| 283 |
+
2330 train 4.229253 (lr=1.6320e-03) (hash(x)=170356103)
|
| 284 |
+
2340 train 4.317828 (lr=1.6217e-03) (hash(x)=158128269)
|
| 285 |
+
2350 train 4.193096 (lr=1.6114e-03) (hash(x)=138332801)
|
| 286 |
+
2360 train 4.257848 (lr=1.6012e-03) (hash(x)=170994284)
|
| 287 |
+
2370 train 4.351678 (lr=1.5909e-03) (hash(x)=173415638)
|
| 288 |
+
2380 train 4.225911 (lr=1.5806e-03) (hash(x)=154689617)
|
| 289 |
+
2390 train 4.255297 (lr=1.5704e-03) (hash(x)=161793283)
|
| 290 |
+
2400 val loss 4.1873
|
| 291 |
+
2400 val perplexity 65.8436
|
| 292 |
+
2400 train 4.166761 (lr=1.5601e-03) (hash(x)=152419100)
|
| 293 |
+
2410 train 3.985188 (lr=1.5498e-03) (hash(x)=156948391)
|
| 294 |
+
2420 train 4.107901 (lr=1.5396e-03) (hash(x)=158156048)
|
| 295 |
+
2430 train 4.107360 (lr=1.5294e-03) (hash(x)=160653203)
|
| 296 |
+
2440 train 4.079838 (lr=1.5191e-03) (hash(x)=161323851)
|
| 297 |
+
2450 train 4.030266 (lr=1.5089e-03) (hash(x)=159253476)
|
| 298 |
+
2460 train 3.930699 (lr=1.4987e-03) (hash(x)=144756346)
|
| 299 |
+
2470 train 3.942721 (lr=1.4885e-03) (hash(x)=159239815)
|
| 300 |
+
2480 train 4.222439 (lr=1.4783e-03) (hash(x)=155086855)
|
| 301 |
+
2490 train 4.241613 (lr=1.4681e-03) (hash(x)=170243577)
|
| 302 |
+
2500 val loss 4.1600
|
| 303 |
+
2500 val perplexity 64.0719
|
| 304 |
+
2500 train 4.197385 (lr=1.4579e-03) (hash(x)=171811405)
|
| 305 |
+
2510 train 4.210459 (lr=1.4477e-03) (hash(x)=155717902)
|
| 306 |
+
2520 train 4.196662 (lr=1.4375e-03) (hash(x)=160901663)
|
| 307 |
+
2530 train 4.200081 (lr=1.4274e-03) (hash(x)=158088736)
|
| 308 |
+
2540 train 4.235577 (lr=1.4173e-03) (hash(x)=157067679)
|
| 309 |
+
2550 train 4.187673 (lr=1.4071e-03) (hash(x)=157320007)
|
| 310 |
+
2560 train 4.105568 (lr=1.3970e-03) (hash(x)=153565165)
|
| 311 |
+
2570 train 4.130184 (lr=1.3869e-03) (hash(x)=160010797)
|
| 312 |
+
2580 train 4.058619 (lr=1.3769e-03) (hash(x)=168786235)
|
| 313 |
+
2590 train 4.135674 (lr=1.3668e-03) (hash(x)=149540363)
|
| 314 |
+
2600 val loss 4.1444
|
| 315 |
+
2600 val perplexity 63.0822
|
| 316 |
+
2600 train 4.109813 (lr=1.3568e-03) (hash(x)=152837043)
|
| 317 |
+
2610 train 4.053783 (lr=1.3467e-03) (hash(x)=160015391)
|
| 318 |
+
2620 train 3.999572 (lr=1.3367e-03) (hash(x)=170364902)
|
| 319 |
+
2630 train 3.924249 (lr=1.3267e-03) (hash(x)=159798423)
|
| 320 |
+
2640 train 3.956265 (lr=1.3168e-03) (hash(x)=139413884)
|
| 321 |
+
2650 train 4.134111 (lr=1.3068e-03) (hash(x)=163952153)
|
| 322 |
+
2660 train 4.154635 (lr=1.2969e-03) (hash(x)=163253609)
|
| 323 |
+
2670 train 4.352928 (lr=1.2870e-03) (hash(x)=158449696)
|
| 324 |
+
2680 train 4.244215 (lr=1.2771e-03) (hash(x)=160774597)
|
| 325 |
+
2690 train 4.123965 (lr=1.2672e-03) (hash(x)=164911880)
|
| 326 |
+
2700 val loss 4.1374
|
| 327 |
+
2700 val perplexity 62.6398
|
| 328 |
+
2700 train 4.250805 (lr=1.2573e-03) (hash(x)=157220460)
|
| 329 |
+
2710 train 4.357726 (lr=1.2475e-03) (hash(x)=157423915)
|
| 330 |
+
2720 train 4.095804 (lr=1.2377e-03) (hash(x)=151126931)
|
| 331 |
+
2730 train 4.073377 (lr=1.2279e-03) (hash(x)=161890897)
|
| 332 |
+
2740 train 4.050958 (lr=1.2182e-03) (hash(x)=156373676)
|
| 333 |
+
2750 train 4.122328 (lr=1.2085e-03) (hash(x)=165480571)
|
| 334 |
+
2760 train 4.103857 (lr=1.1988e-03) (hash(x)=130397806)
|
| 335 |
+
2770 train 4.101247 (lr=1.1891e-03) (hash(x)=156871436)
|
| 336 |
+
2780 train 4.047716 (lr=1.1794e-03) (hash(x)=168114417)
|
| 337 |
+
2790 train 4.088237 (lr=1.1698e-03) (hash(x)=156585785)
|
| 338 |
+
2800 val loss 4.1222
|
| 339 |
+
2800 val perplexity 61.6965
|
| 340 |
+
2800 train 3.941895 (lr=1.1602e-03) (hash(x)=163983863)
|
| 341 |
+
2810 train 3.914798 (lr=1.1506e-03) (hash(x)=160405761)
|
| 342 |
+
2820 train 3.935615 (lr=1.1411e-03) (hash(x)=163831817)
|
| 343 |
+
2830 train 3.987344 (lr=1.1316e-03) (hash(x)=150112598)
|
| 344 |
+
2840 train 3.956249 (lr=1.1221e-03) (hash(x)=162075903)
|
| 345 |
+
2850 train 4.197121 (lr=1.1127e-03) (hash(x)=165111358)
|
| 346 |
+
2860 train 4.179041 (lr=1.1033e-03) (hash(x)=153236786)
|
| 347 |
+
2870 train 4.320436 (lr=1.0939e-03) (hash(x)=161816361)
|
| 348 |
+
2880 train 4.167611 (lr=1.0845e-03) (hash(x)=151864522)
|
| 349 |
+
2890 train 4.020428 (lr=1.0752e-03) (hash(x)=156002477)
|
| 350 |
+
2900 val loss 4.0972
|
| 351 |
+
2900 val perplexity 60.1736
|
| 352 |
+
2900 train 4.075690 (lr=1.0659e-03) (hash(x)=154611558)
|
| 353 |
+
2910 train 4.261486 (lr=1.0567e-03) (hash(x)=150322906)
|
| 354 |
+
2920 train 4.144260 (lr=1.0474e-03) (hash(x)=136550018)
|
| 355 |
+
2930 train 4.161936 (lr=1.0383e-03) (hash(x)=163368039)
|
| 356 |
+
2940 train 4.048949 (lr=1.0291e-03) (hash(x)=140876015)
|
| 357 |
+
2950 train 4.087054 (lr=1.0200e-03) (hash(x)=166845778)
|
| 358 |
+
2960 train 4.049969 (lr=1.0109e-03) (hash(x)=163143219)
|
| 359 |
+
2970 train 3.997180 (lr=1.0019e-03) (hash(x)=153818849)
|
| 360 |
+
2980 train 4.066802 (lr=9.9289e-04) (hash(x)=156917850)
|
| 361 |
+
2990 train 3.997714 (lr=9.8392e-04) (hash(x)=173189206)
|
| 362 |
+
3000 val loss 4.0977
|
| 363 |
+
3000 val perplexity 60.1994
|
| 364 |
+
3000 train 3.945580 (lr=9.7500e-04) (hash(x)=149025011)
|
| 365 |
+
3010 train 3.824667 (lr=9.6612e-04) (hash(x)=159783526)
|
| 366 |
+
3020 train 3.944181 (lr=9.5727e-04) (hash(x)=153505500)
|
| 367 |
+
3030 train 4.219179 (lr=9.4847e-04) (hash(x)=156235961)
|
| 368 |
+
3040 train 4.060788 (lr=9.3970e-04) (hash(x)=167294230)
|
| 369 |
+
3050 train 4.143058 (lr=9.3098e-04) (hash(x)=166095533)
|
| 370 |
+
3060 train 4.120775 (lr=9.2230e-04) (hash(x)=118432481)
|
| 371 |
+
3070 train 4.124558 (lr=9.1366e-04) (hash(x)=165105881)
|
| 372 |
+
3080 train 4.202753 (lr=9.0506e-04) (hash(x)=165228401)
|
| 373 |
+
3090 train 4.201431 (lr=8.9651e-04) (hash(x)=159692678)
|
| 374 |
+
3100 val loss 4.0796
|
| 375 |
+
3100 val perplexity 59.1226
|
| 376 |
+
3100 train 4.107082 (lr=8.8800e-04) (hash(x)=158682056)
|
| 377 |
+
3110 train 4.174942 (lr=8.7954e-04) (hash(x)=152985093)
|
| 378 |
+
3120 train 3.867309 (lr=8.7112e-04) (hash(x)=175378301)
|
| 379 |
+
3130 train 3.897981 (lr=8.6274e-04) (hash(x)=162798550)
|
| 380 |
+
3140 train 3.975251 (lr=8.5441e-04) (hash(x)=155589161)
|
| 381 |
+
3150 train 4.118763 (lr=8.4613e-04) (hash(x)=159716727)
|
| 382 |
+
3160 train 4.170940 (lr=8.3789e-04) (hash(x)=166758382)
|
| 383 |
+
3170 train 4.149882 (lr=8.2970e-04) (hash(x)=170684998)
|
| 384 |
+
3180 train 4.166050 (lr=8.2156e-04) (hash(x)=145890459)
|
| 385 |
+
3190 train 4.113280 (lr=8.1347e-04) (hash(x)=131736018)
|
| 386 |
+
3200 val loss 4.0624
|
| 387 |
+
3200 val perplexity 58.1162
|
| 388 |
+
3200 train 4.141879 (lr=8.0542e-04) (hash(x)=163155085)
|
| 389 |
+
3210 train 4.130290 (lr=7.9742e-04) (hash(x)=158434140)
|
| 390 |
+
3220 train 4.135093 (lr=7.8948e-04) (hash(x)=153045472)
|
| 391 |
+
3230 train 4.095717 (lr=7.8158e-04) (hash(x)=162298119)
|
| 392 |
+
3240 train 4.156842 (lr=7.7373e-04) (hash(x)=154940868)
|
| 393 |
+
3250 train 4.048419 (lr=7.6594e-04) (hash(x)=146164261)
|
| 394 |
+
3260 train 4.064459 (lr=7.5819e-04) (hash(x)=180063993)
|
| 395 |
+
3270 train 4.005987 (lr=7.5050e-04) (hash(x)=145912749)
|
| 396 |
+
3280 train 4.038804 (lr=7.4286e-04) (hash(x)=160485262)
|
| 397 |
+
3290 train 4.034842 (lr=7.3527e-04) (hash(x)=161184342)
|
| 398 |
+
3300 val loss 4.0549
|
| 399 |
+
3300 val perplexity 57.6801
|
| 400 |
+
3300 train 3.941289 (lr=7.2774e-04) (hash(x)=152327761)
|
| 401 |
+
3310 train 3.969263 (lr=7.2026e-04) (hash(x)=153447140)
|
| 402 |
+
3320 train 4.061533 (lr=7.1283e-04) (hash(x)=147161965)
|
| 403 |
+
3330 train 3.859341 (lr=7.0545e-04) (hash(x)=156739807)
|
| 404 |
+
3340 train 4.121389 (lr=6.9814e-04) (hash(x)=159957642)
|
| 405 |
+
3350 train 4.160736 (lr=6.9087e-04) (hash(x)=166328571)
|
| 406 |
+
3360 train 4.151203 (lr=6.8367e-04) (hash(x)=151475934)
|
| 407 |
+
3370 train 4.115513 (lr=6.7651e-04) (hash(x)=153931271)
|
| 408 |
+
3380 train 4.112267 (lr=6.6942e-04) (hash(x)=171487011)
|
| 409 |
+
3390 train 4.209892 (lr=6.6238e-04) (hash(x)=151804212)
|
| 410 |
+
3400 val loss 4.0421
|
| 411 |
+
3400 val perplexity 56.9445
|
| 412 |
+
3400 train 4.113207 (lr=6.5540e-04) (hash(x)=153631674)
|
| 413 |
+
3410 train 4.162016 (lr=6.4848e-04) (hash(x)=152678595)
|
| 414 |
+
3420 train 4.137099 (lr=6.4161e-04) (hash(x)=153365144)
|
| 415 |
+
3430 train 4.141204 (lr=6.3480e-04) (hash(x)=166893474)
|
| 416 |
+
3440 train 4.094139 (lr=6.2806e-04) (hash(x)=165535548)
|
| 417 |
+
3450 train 4.135862 (lr=6.2137e-04) (hash(x)=144370685)
|
| 418 |
+
3460 train 4.009230 (lr=6.1474e-04) (hash(x)=154254791)
|
| 419 |
+
3470 train 4.048392 (lr=6.0817e-04) (hash(x)=164497095)
|
| 420 |
+
3480 train 4.061123 (lr=6.0166e-04) (hash(x)=168522376)
|
| 421 |
+
3490 train 3.918683 (lr=5.9521e-04) (hash(x)=153896630)
|
| 422 |
+
3500 val loss 4.0363
|
| 423 |
+
3500 val perplexity 56.6141
|
| 424 |
+
3500 train 3.796441 (lr=5.8883e-04) (hash(x)=154741045)
|
| 425 |
+
3510 train 3.991416 (lr=5.8250e-04) (hash(x)=164266299)
|
| 426 |
+
3520 train 3.917408 (lr=5.7624e-04) (hash(x)=158654950)
|
| 427 |
+
3530 train 4.155674 (lr=5.7004e-04) (hash(x)=158230617)
|
| 428 |
+
3540 train 4.089488 (lr=5.6390e-04) (hash(x)=161601342)
|
| 429 |
+
3550 train 4.220429 (lr=5.5783e-04) (hash(x)=167990373)
|
| 430 |
+
3560 train 4.058076 (lr=5.5182e-04) (hash(x)=169303878)
|
| 431 |
+
3570 train 4.146748 (lr=5.4587e-04) (hash(x)=144079588)
|
| 432 |
+
3580 train 3.988824 (lr=5.3998e-04) (hash(x)=166458297)
|
| 433 |
+
3590 train 4.039760 (lr=5.3416e-04) (hash(x)=164303280)
|
| 434 |
+
3600 val loss 4.0200
|
| 435 |
+
3600 val perplexity 55.6997
|
| 436 |
+
3600 train 4.077363 (lr=5.2841e-04) (hash(x)=159231031)
|
| 437 |
+
3610 train 4.122448 (lr=5.2272e-04) (hash(x)=162424857)
|
| 438 |
+
3620 train 4.015436 (lr=5.1710e-04) (hash(x)=154559262)
|
| 439 |
+
3630 train 4.035576 (lr=5.1154e-04) (hash(x)=156608060)
|
| 440 |
+
3640 train 4.046021 (lr=5.0604e-04) (hash(x)=157446611)
|
| 441 |
+
3650 train 4.065583 (lr=5.0062e-04) (hash(x)=174610578)
|
| 442 |
+
3660 train 3.975127 (lr=4.9526e-04) (hash(x)=158275381)
|
| 443 |
+
3670 train 3.881399 (lr=4.8997e-04) (hash(x)=157437517)
|
| 444 |
+
3680 train 3.826926 (lr=4.8474e-04) (hash(x)=165060839)
|
| 445 |
+
3690 train 3.928437 (lr=4.7958e-04) (hash(x)=169543799)
|
| 446 |
+
3700 val loss 4.0208
|
| 447 |
+
3700 val perplexity 55.7448
|
| 448 |
+
3700 train 4.048800 (lr=4.7449e-04) (hash(x)=140978922)
|
| 449 |
+
3710 train 4.203963 (lr=4.6947e-04) (hash(x)=155829787)
|
| 450 |
+
3720 train 4.081343 (lr=4.6452e-04) (hash(x)=171433940)
|
| 451 |
+
3730 train 4.090452 (lr=4.5963e-04) (hash(x)=167899435)
|
| 452 |
+
3740 train 4.379403 (lr=4.5482e-04) (hash(x)=164355528)
|
| 453 |
+
3750 train 4.072184 (lr=4.5007e-04) (hash(x)=162327765)
|
| 454 |
+
3760 train 4.113864 (lr=4.4540e-04) (hash(x)=156040613)
|
| 455 |
+
3770 train 4.109349 (lr=4.4079e-04) (hash(x)=160180518)
|
| 456 |
+
3780 train 4.026843 (lr=4.3625e-04) (hash(x)=150016441)
|
| 457 |
+
3790 train 4.123217 (lr=4.3179e-04) (hash(x)=159325196)
|
| 458 |
+
3800 val loss 4.0067
|
| 459 |
+
3800 val perplexity 54.9655
|
| 460 |
+
3800 train 4.002306 (lr=4.2739e-04) (hash(x)=151622199)
|
| 461 |
+
3810 train 3.903182 (lr=4.2307e-04) (hash(x)=150340066)
|
| 462 |
+
3820 train 4.060265 (lr=4.1881e-04) (hash(x)=185510998)
|
| 463 |
+
3830 train 4.061746 (lr=4.1463e-04) (hash(x)=167230996)
|
| 464 |
+
3840 train 3.986496 (lr=4.1052e-04) (hash(x)=165310104)
|
| 465 |
+
3850 train 4.039975 (lr=4.0648e-04) (hash(x)=163449872)
|
| 466 |
+
3860 train 4.020473 (lr=4.0252e-04) (hash(x)=159560710)
|
| 467 |
+
3870 train 3.917598 (lr=3.9862e-04) (hash(x)=160392242)
|
| 468 |
+
3880 train 4.009766 (lr=3.9480e-04) (hash(x)=145396414)
|
| 469 |
+
3890 train 4.039589 (lr=3.9105e-04) (hash(x)=151853804)
|
| 470 |
+
3900 val loss 3.9993
|
| 471 |
+
3900 val perplexity 54.5626
|
| 472 |
+
3900 train 3.988944 (lr=3.8738e-04) (hash(x)=171224252)
|
| 473 |
+
3910 train 4.072381 (lr=3.8378e-04) (hash(x)=160454722)
|
| 474 |
+
3920 train 4.077506 (lr=3.8025e-04) (hash(x)=164424859)
|
| 475 |
+
3930 train 4.006749 (lr=3.7679e-04) (hash(x)=151089700)
|
| 476 |
+
3940 train 4.055073 (lr=3.7341e-04) (hash(x)=160835936)
|
| 477 |
+
3950 train 4.004356 (lr=3.7010e-04) (hash(x)=155067663)
|
| 478 |
+
3960 train 4.001670 (lr=3.6687e-04) (hash(x)=155486171)
|
| 479 |
+
3970 train 4.065631 (lr=3.6371e-04) (hash(x)=159851797)
|
| 480 |
+
3980 train 4.033099 (lr=3.6063e-04) (hash(x)=156210123)
|
| 481 |
+
3990 train 3.897400 (lr=3.5762e-04) (hash(x)=155100451)
|
| 482 |
+
4000 val loss 3.9945
|
| 483 |
+
4000 val perplexity 54.2968
|
| 484 |
+
4000 train 3.983969 (lr=3.5468e-04) (hash(x)=154881551)
|
| 485 |
+
4010 train 4.053800 (lr=3.5183e-04) (hash(x)=163349043)
|
| 486 |
+
4020 train 3.983883 (lr=3.4904e-04) (hash(x)=155886322)
|
| 487 |
+
4030 train 4.110051 (lr=3.4633e-04) (hash(x)=159537959)
|
| 488 |
+
4040 train 4.043459 (lr=3.4370e-04) (hash(x)=152602379)
|
| 489 |
+
4050 train 4.034940 (lr=3.4114e-04) (hash(x)=161610706)
|
| 490 |
+
4060 train 4.008632 (lr=3.3866e-04) (hash(x)=154804860)
|
| 491 |
+
4070 train 3.980394 (lr=3.3626e-04) (hash(x)=162296585)
|
| 492 |
+
4080 train 4.067740 (lr=3.3393e-04) (hash(x)=157756706)
|
| 493 |
+
4090 train 4.096369 (lr=3.3168e-04) (hash(x)=160386646)
|
| 494 |
+
4100 val loss 3.9821
|
| 495 |
+
4100 val perplexity 53.6282
|
| 496 |
+
4100 train 4.033006 (lr=3.2950e-04) (hash(x)=163637730)
|
| 497 |
+
4110 train 4.041345 (lr=3.2740e-04) (hash(x)=161965728)
|
| 498 |
+
4120 train 4.042684 (lr=3.2538e-04) (hash(x)=156423546)
|
| 499 |
+
4130 train 3.982870 (lr=3.2343e-04) (hash(x)=163053830)
|
| 500 |
+
4140 train 3.971513 (lr=3.2156e-04) (hash(x)=153198863)
|
| 501 |
+
4150 train 3.973744 (lr=3.1977e-04) (hash(x)=159899969)
|
| 502 |
+
4160 train 3.937757 (lr=3.1806e-04) (hash(x)=164336555)
|
| 503 |
+
4170 train 3.920032 (lr=3.1642e-04) (hash(x)=159546648)
|
| 504 |
+
4180 train 3.987313 (lr=3.1486e-04) (hash(x)=159128343)
|
| 505 |
+
4190 train 3.961296 (lr=3.1338e-04) (hash(x)=190404156)
|
| 506 |
+
4200 val loss 3.9902
|
| 507 |
+
4200 val perplexity 54.0643
|
| 508 |
+
4200 train 3.982952 (lr=3.1197e-04) (hash(x)=160936605)
|
| 509 |
+
4210 train 4.004089 (lr=3.1065e-04) (hash(x)=155496712)
|
| 510 |
+
4220 train 4.030804 (lr=3.0940e-04) (hash(x)=155907702)
|
| 511 |
+
4230 train 4.024456 (lr=3.0822e-04) (hash(x)=159562930)
|
| 512 |
+
4240 train 4.018976 (lr=3.0713e-04) (hash(x)=169512573)
|
| 513 |
+
4250 train 4.057533 (lr=3.0611e-04) (hash(x)=155431435)
|
| 514 |
+
4260 train 4.045427 (lr=3.0517e-04) (hash(x)=155467567)
|
| 515 |
+
4270 train 3.915394 (lr=3.0431e-04) (hash(x)=160350168)
|
| 516 |
+
4280 train 3.984566 (lr=3.0353e-04) (hash(x)=155154478)
|
| 517 |
+
4290 train 3.882757 (lr=3.0283e-04) (hash(x)=161981515)
|
| 518 |
+
4300 val loss 3.9778
|
| 519 |
+
4300 val perplexity 53.4000
|
| 520 |
+
4300 train 4.063294 (lr=3.0220e-04) (hash(x)=147992100)
|
| 521 |
+
4310 train 3.965492 (lr=3.0165e-04) (hash(x)=162522212)
|
| 522 |
+
4320 train 3.976317 (lr=3.0118e-04) (hash(x)=156565952)
|
| 523 |
+
4330 train 4.106852 (lr=3.0079e-04) (hash(x)=158960384)
|
| 524 |
+
4340 train 4.014628 (lr=3.0048e-04) (hash(x)=154314808)
|
| 525 |
+
4350 train 4.034998 (lr=3.0024e-04) (hash(x)=174193276)
|
| 526 |
+
4360 train 3.960939 (lr=3.0009e-04) (hash(x)=157693382)
|
| 527 |
+
4370 train 4.153555 (lr=3.0001e-04) (hash(x)=149511108)
|
| 528 |
+
4374 val loss 3.9727
|
| 529 |
+
4374 val perplexity 53.1301
|
scaling_mha_granularity_2/hd_128_seed_1339/model_04374.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 89692138
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3220aa11fad715d7b996ee186b2067b27379b5f3c4bebf6148d445c5a9721318
|
| 3 |
size 89692138
|
scaling_mha_granularity_2/hd_128_seed_1339/optimizer_04374.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 179406342
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6fb4f3e6a9d9bb5816a16d2a6a37d53b2053eb3d4d55ac99073674b0b87e053f
|
| 3 |
size 179406342
|