| { | |
| "best_metric": 1.4917341470718384, | |
| "best_model_checkpoint": "output/pyrokinesis/checkpoint-1755", | |
| "epoch": 9.0, | |
| "global_step": 1755, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.0001369775513380068, | |
| "loss": 2.5666, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00013631164801696083, | |
| "loss": 2.2799, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00013520660867542716, | |
| "loss": 2.1886, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00013366959991757417, | |
| "loss": 2.1192, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00013171058983499535, | |
| "loss": 2.0709, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00012934228335981018, | |
| "loss": 2.0524, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00012658003986830432, | |
| "loss": 2.1101, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.0001234417735694802, | |
| "loss": 1.9895, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00011994783732453754, | |
| "loss": 2.1651, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00011612089065075853, | |
| "loss": 2.0113, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00011198575276584287, | |
| "loss": 2.0278, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00010756924162575728, | |
| "loss": 2.0199, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00010290000000000001, | |
| "loss": 1.9674, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 9.800830971224951e-05, | |
| "loss": 1.9432, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 9.292589525111794e-05, | |
| "loss": 1.8305, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 8.768571802466866e-05, | |
| "loss": 1.9299, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 8.232176259303665e-05, | |
| "loss": 1.9406, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 7.686881626551515e-05, | |
| "loss": 1.9433, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 7.136224349150589e-05, | |
| "loss": 1.8757, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 6.583775650849414e-05, | |
| "loss": 1.8865, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 6.033118373448485e-05, | |
| "loss": 1.9857, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 5.487823740696336e-05, | |
| "loss": 1.9976, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.951428197533135e-05, | |
| "loss": 1.9144, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 4.427410474888205e-05, | |
| "loss": 1.8636, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 3.9191690287750474e-05, | |
| "loss": 1.9044, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 3.4300000000000014e-05, | |
| "loss": 1.8643, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 2.963075837424272e-05, | |
| "loss": 1.924, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 2.5214247234157127e-05, | |
| "loss": 1.8417, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 2.1079109349241483e-05, | |
| "loss": 1.8783, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.725216267546246e-05, | |
| "loss": 1.8383, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.3758226430519834e-05, | |
| "loss": 1.854, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.061996013169569e-05, | |
| "loss": 1.779, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 7.857716640189809e-06, | |
| "loss": 1.8699, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 5.4894101650046585e-06, | |
| "loss": 1.863, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 3.53040008242582e-06, | |
| "loss": 1.9041, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.993391324572832e-06, | |
| "loss": 1.7998, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 8.883519830391788e-07, | |
| "loss": 1.9326, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 2.2244866199319883e-07, | |
| "loss": 1.8503, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.0, | |
| "loss": 1.7822, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 1.7307456731796265, | |
| "eval_runtime": 11.5169, | |
| "eval_samples_per_second": 21.36, | |
| "eval_steps_per_second": 2.692, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 2.2244866199319123e-07, | |
| "loss": 1.8197, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 8.883519830391788e-07, | |
| "loss": 1.7915, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.9933913245728244e-06, | |
| "loss": 1.8277, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 3.53040008242582e-06, | |
| "loss": 1.8665, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 5.4894101650046585e-06, | |
| "loss": 1.8632, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 7.857716640189785e-06, | |
| "loss": 1.8499, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.0619960131695675e-05, | |
| "loss": 1.851, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.3758226430519804e-05, | |
| "loss": 1.8915, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.7252162675462473e-05, | |
| "loss": 1.896, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 2.1079109349241466e-05, | |
| "loss": 1.9118, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 2.5214247234157134e-05, | |
| "loss": 1.8666, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 2.9630758374242703e-05, | |
| "loss": 1.8693, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 3.429999999999997e-05, | |
| "loss": 1.8738, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 3.919169028775049e-05, | |
| "loss": 1.7909, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 4.427410474888204e-05, | |
| "loss": 1.7761, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 4.9514281975331363e-05, | |
| "loss": 1.8239, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 5.487823740696335e-05, | |
| "loss": 1.845, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 6.03311837344848e-05, | |
| "loss": 1.851, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 6.583775650849411e-05, | |
| "loss": 1.7638, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 7.136224349150585e-05, | |
| "loss": 1.9163, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 7.686881626551516e-05, | |
| "loss": 1.7542, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 8.232176259303664e-05, | |
| "loss": 1.92, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 8.768571802466861e-05, | |
| "loss": 1.8303, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 9.292589525111794e-05, | |
| "loss": 1.7917, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 9.800830971224949e-05, | |
| "loss": 1.7499, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 0.00010290000000000001, | |
| "loss": 1.7705, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 0.00010756924162575728, | |
| "loss": 1.8492, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 0.00011198575276584284, | |
| "loss": 1.8068, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 0.00011612089065075851, | |
| "loss": 1.7533, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 0.0001199478373245375, | |
| "loss": 1.8224, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 0.0001234417735694802, | |
| "loss": 1.738, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 0.00012658003986830432, | |
| "loss": 1.8536, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 0.0001293422833598102, | |
| "loss": 1.7585, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 0.00013171058983499532, | |
| "loss": 1.7811, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 0.00013366959991757417, | |
| "loss": 1.801, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 0.00013520660867542716, | |
| "loss": 1.7785, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 0.00013631164801696083, | |
| "loss": 1.756, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 0.0001369775513380068, | |
| "loss": 1.8216, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 0.0001372, | |
| "loss": 1.6845, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 1.6902183294296265, | |
| "eval_runtime": 11.5295, | |
| "eval_samples_per_second": 21.337, | |
| "eval_steps_per_second": 2.689, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 0.0001369775513380068, | |
| "loss": 1.8284, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 0.00013631164801696085, | |
| "loss": 1.7341, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 0.00013520660867542716, | |
| "loss": 1.7068, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 0.00013366959991757417, | |
| "loss": 1.8545, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 0.00013171058983499535, | |
| "loss": 1.7491, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 0.00012934228335981023, | |
| "loss": 1.7728, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 0.00012658003986830435, | |
| "loss": 1.8086, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 0.00012344177356948016, | |
| "loss": 1.7479, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 0.00011994783732453754, | |
| "loss": 1.8108, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 0.00011612089065075854, | |
| "loss": 1.7602, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 0.00011198575276584292, | |
| "loss": 1.6977, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 0.00010756924162575734, | |
| "loss": 1.7494, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 0.00010289999999999998, | |
| "loss": 1.6501, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 9.800830971224951e-05, | |
| "loss": 1.6887, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 9.292589525111797e-05, | |
| "loss": 1.68, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 8.76857180246687e-05, | |
| "loss": 1.6036, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 8.232176259303673e-05, | |
| "loss": 1.6931, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 7.686881626551514e-05, | |
| "loss": 1.7376, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 7.136224349150589e-05, | |
| "loss": 1.68, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 6.583775650849415e-05, | |
| "loss": 1.7219, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 6.03311837344849e-05, | |
| "loss": 1.7081, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 5.4878237406963316e-05, | |
| "loss": 1.6974, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 4.951428197533134e-05, | |
| "loss": 1.6991, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 4.427410474888207e-05, | |
| "loss": 1.6574, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 3.919169028775052e-05, | |
| "loss": 1.6158, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 3.4300000000000054e-05, | |
| "loss": 1.7649, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 2.9630758374242683e-05, | |
| "loss": 1.6256, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 2.521424723415712e-05, | |
| "loss": 1.6033, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 2.107910934924149e-05, | |
| "loss": 1.7139, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 1.7252162675462497e-05, | |
| "loss": 1.6601, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 1.3758226430519865e-05, | |
| "loss": 1.7914, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 1.0619960131695668e-05, | |
| "loss": 1.6809, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 7.8577166401898e-06, | |
| "loss": 1.653, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 5.489410165004666e-06, | |
| "loss": 1.7064, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 3.5304000824258354e-06, | |
| "loss": 1.7002, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 1.9933913245728472e-06, | |
| "loss": 1.5901, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 8.883519830391712e-07, | |
| "loss": 1.6791, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 2.2244866199319123e-07, | |
| "loss": 1.6187, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 0.0, | |
| "loss": 1.6345, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_loss": 1.5962461233139038, | |
| "eval_runtime": 11.5211, | |
| "eval_samples_per_second": 21.352, | |
| "eval_steps_per_second": 2.691, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 2.2244866199319123e-07, | |
| "loss": 1.6608, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 8.883519830391636e-07, | |
| "loss": 1.659, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 1.9933913245728396e-06, | |
| "loss": 1.6294, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 3.5304000824258126e-06, | |
| "loss": 1.6214, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 5.489410165004651e-06, | |
| "loss": 1.6499, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 7.857716640189778e-06, | |
| "loss": 1.6205, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 1.0619960131695638e-05, | |
| "loss": 1.6322, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 1.3758226430519834e-05, | |
| "loss": 1.6142, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 1.725216267546246e-05, | |
| "loss": 1.7086, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 2.107910934924146e-05, | |
| "loss": 1.6773, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 2.5214247234157073e-05, | |
| "loss": 1.737, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 2.9630758374242642e-05, | |
| "loss": 1.5679, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 3.4300000000000014e-05, | |
| "loss": 1.6389, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 3.9191690287750474e-05, | |
| "loss": 1.653, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 4.427410474888202e-05, | |
| "loss": 1.6666, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 4.951428197533129e-05, | |
| "loss": 1.5802, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 5.487823740696327e-05, | |
| "loss": 1.5621, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 6.033118373448485e-05, | |
| "loss": 1.5473, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 6.58377565084941e-05, | |
| "loss": 1.6707, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 7.136224349150583e-05, | |
| "loss": 1.639, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 7.68688162655151e-05, | |
| "loss": 1.6476, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 8.232176259303666e-05, | |
| "loss": 1.704, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 8.768571802466866e-05, | |
| "loss": 1.5819, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 9.292589525111793e-05, | |
| "loss": 1.5765, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 9.800830971224949e-05, | |
| "loss": 1.6459, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 0.00010289999999999994, | |
| "loss": 1.6142, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 0.00010756924162575731, | |
| "loss": 1.6049, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 0.00011198575276584287, | |
| "loss": 1.6587, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 0.0001161208906507585, | |
| "loss": 1.559, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 0.0001199478373245375, | |
| "loss": 1.695, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 0.00012344177356948013, | |
| "loss": 1.6336, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 0.00012658003986830432, | |
| "loss": 1.5868, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 0.00012934228335981018, | |
| "loss": 1.5476, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 0.00013171058983499532, | |
| "loss": 1.6785, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 0.00013366959991757417, | |
| "loss": 1.8145, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 0.00013520660867542716, | |
| "loss": 1.6926, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 0.00013631164801696083, | |
| "loss": 1.6759, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 0.00013697755133800678, | |
| "loss": 1.6476, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 0.0001372, | |
| "loss": 1.697, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_loss": 1.5882556438446045, | |
| "eval_runtime": 11.5211, | |
| "eval_samples_per_second": 21.352, | |
| "eval_steps_per_second": 2.691, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 0.0001369775513380068, | |
| "loss": 1.6285, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 0.00013631164801696085, | |
| "loss": 1.5054, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 0.0001352066086754272, | |
| "loss": 1.612, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 0.00013366959991757425, | |
| "loss": 1.6493, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 0.0001317105898349953, | |
| "loss": 1.5501, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 0.00012934228335981015, | |
| "loss": 1.5358, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 0.0001265800398683043, | |
| "loss": 1.6262, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 0.0001234417735694802, | |
| "loss": 1.59, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 0.00011994783732453755, | |
| "loss": 1.5257, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 0.00011612089065075855, | |
| "loss": 1.5782, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 0.00011198575276584294, | |
| "loss": 1.6528, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 0.00010756924162575738, | |
| "loss": 1.64, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "learning_rate": 0.00010290000000000009, | |
| "loss": 1.5902, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "learning_rate": 9.800830971224965e-05, | |
| "loss": 1.5726, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 9.292589525111788e-05, | |
| "loss": 1.5698, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 8.76857180246686e-05, | |
| "loss": 1.5357, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 8.232176259303662e-05, | |
| "loss": 1.6191, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "learning_rate": 7.686881626551516e-05, | |
| "loss": 1.5824, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 7.13622434915059e-05, | |
| "loss": 1.5826, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "learning_rate": 6.583775650849417e-05, | |
| "loss": 1.5749, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 4.54, | |
| "learning_rate": 6.033118373448492e-05, | |
| "loss": 1.5323, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "learning_rate": 5.487823740696345e-05, | |
| "loss": 1.5293, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "learning_rate": 4.951428197533147e-05, | |
| "loss": 1.6373, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 4.42741047488822e-05, | |
| "loss": 1.5182, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 3.919169028775043e-05, | |
| "loss": 1.5626, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "learning_rate": 3.429999999999996e-05, | |
| "loss": 1.6378, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 4.69, | |
| "learning_rate": 2.9630758374242696e-05, | |
| "loss": 1.5834, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "learning_rate": 2.5214247234157127e-05, | |
| "loss": 1.6246, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 2.1079109349241507e-05, | |
| "loss": 1.5866, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 1.7252162675462504e-05, | |
| "loss": 1.6128, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 1.3758226430519873e-05, | |
| "loss": 1.4767, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "learning_rate": 1.0619960131695736e-05, | |
| "loss": 1.5603, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 7.857716640189861e-06, | |
| "loss": 1.5784, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 4.87, | |
| "learning_rate": 5.4894101650047195e-06, | |
| "loss": 1.6181, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 4.9, | |
| "learning_rate": 3.5304000824257973e-06, | |
| "loss": 1.5659, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "learning_rate": 1.9933913245728244e-06, | |
| "loss": 1.523, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 4.95, | |
| "learning_rate": 8.883519830391788e-07, | |
| "loss": 1.5859, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 2.2244866199319883e-07, | |
| "loss": 1.5622, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 0.0, | |
| "loss": 1.5034, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_loss": 1.5320487022399902, | |
| "eval_runtime": 11.5033, | |
| "eval_samples_per_second": 21.385, | |
| "eval_steps_per_second": 2.695, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 5.03, | |
| "learning_rate": 2.2244866199319123e-07, | |
| "loss": 1.549, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 5.05, | |
| "learning_rate": 8.883519830391636e-07, | |
| "loss": 1.5255, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 5.08, | |
| "learning_rate": 1.9933913245728015e-06, | |
| "loss": 1.5572, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 5.1, | |
| "learning_rate": 3.5304000824257744e-06, | |
| "loss": 1.5562, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 5.13, | |
| "learning_rate": 5.489410165004689e-06, | |
| "loss": 1.5428, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "learning_rate": 7.857716640189824e-06, | |
| "loss": 1.458, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 5.18, | |
| "learning_rate": 1.061996013169569e-05, | |
| "loss": 1.5417, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "learning_rate": 1.375822643051982e-05, | |
| "loss": 1.5147, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 5.23, | |
| "learning_rate": 1.725216267546245e-05, | |
| "loss": 1.4622, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 5.26, | |
| "learning_rate": 2.1079109349241446e-05, | |
| "loss": 1.5768, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "learning_rate": 2.5214247234157066e-05, | |
| "loss": 1.48, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 5.31, | |
| "learning_rate": 2.963075837424263e-05, | |
| "loss": 1.4608, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 5.33, | |
| "learning_rate": 3.429999999999989e-05, | |
| "loss": 1.511, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 5.36, | |
| "learning_rate": 3.919169028775035e-05, | |
| "loss": 1.4951, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "learning_rate": 4.4274104748882125e-05, | |
| "loss": 1.4858, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "learning_rate": 4.951428197533139e-05, | |
| "loss": 1.5487, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 5.44, | |
| "learning_rate": 5.487823740696337e-05, | |
| "loss": 1.4313, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "learning_rate": 6.033118373448483e-05, | |
| "loss": 1.58, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 5.49, | |
| "learning_rate": 6.583775650849409e-05, | |
| "loss": 1.5329, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 5.51, | |
| "learning_rate": 7.136224349150582e-05, | |
| "loss": 1.5095, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 5.54, | |
| "learning_rate": 7.686881626551508e-05, | |
| "loss": 1.4715, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 5.56, | |
| "learning_rate": 8.232176259303654e-05, | |
| "loss": 1.5381, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 5.59, | |
| "learning_rate": 8.768571802466852e-05, | |
| "loss": 1.4487, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 5.62, | |
| "learning_rate": 9.292589525111778e-05, | |
| "loss": 1.4744, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 5.64, | |
| "learning_rate": 9.800830971224957e-05, | |
| "loss": 1.533, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 5.67, | |
| "learning_rate": 0.00010290000000000003, | |
| "loss": 1.4971, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 5.69, | |
| "learning_rate": 0.0001075692416257573, | |
| "loss": 1.5636, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 5.72, | |
| "learning_rate": 0.00011198575276584287, | |
| "loss": 1.6137, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 5.74, | |
| "learning_rate": 0.0001161208906507585, | |
| "loss": 1.6421, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 5.77, | |
| "learning_rate": 0.00011994783732453749, | |
| "loss": 1.5246, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 5.79, | |
| "learning_rate": 0.00012344177356948013, | |
| "loss": 1.5616, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 5.82, | |
| "learning_rate": 0.00012658003986830427, | |
| "loss": 1.5668, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "learning_rate": 0.00012934228335981013, | |
| "loss": 1.4651, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 5.87, | |
| "learning_rate": 0.00013171058983499527, | |
| "loss": 1.5195, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 5.9, | |
| "learning_rate": 0.0001336695999175742, | |
| "loss": 1.571, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 5.92, | |
| "learning_rate": 0.00013520660867542716, | |
| "loss": 1.5089, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 5.95, | |
| "learning_rate": 0.00013631164801696083, | |
| "loss": 1.5889, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 5.97, | |
| "learning_rate": 0.00013697755133800678, | |
| "loss": 1.4905, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "learning_rate": 0.0001372, | |
| "loss": 1.4218, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "eval_loss": 1.549699306488037, | |
| "eval_runtime": 11.5077, | |
| "eval_samples_per_second": 21.377, | |
| "eval_steps_per_second": 2.694, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 6.03, | |
| "learning_rate": 0.0001369775513380068, | |
| "loss": 1.5342, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 6.05, | |
| "learning_rate": 0.00013631164801696085, | |
| "loss": 1.5204, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 6.08, | |
| "learning_rate": 0.0001352066086754272, | |
| "loss": 1.4333, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 6.1, | |
| "learning_rate": 0.00013366959991757425, | |
| "loss": 1.4939, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 6.13, | |
| "learning_rate": 0.00013171058983499532, | |
| "loss": 1.5237, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 6.15, | |
| "learning_rate": 0.00012934228335981018, | |
| "loss": 1.5111, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 6.18, | |
| "learning_rate": 0.00012658003986830432, | |
| "loss": 1.5271, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 6.21, | |
| "learning_rate": 0.0001234417735694802, | |
| "loss": 1.4337, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 6.23, | |
| "learning_rate": 0.00011994783732453757, | |
| "loss": 1.5122, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 6.26, | |
| "learning_rate": 0.00011612089065075855, | |
| "loss": 1.5887, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 6.28, | |
| "learning_rate": 0.00011198575276584294, | |
| "loss": 1.4314, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 6.31, | |
| "learning_rate": 0.00010756924162575738, | |
| "loss": 1.3917, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 6.33, | |
| "learning_rate": 0.00010290000000000012, | |
| "loss": 1.4896, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 6.36, | |
| "learning_rate": 9.800830971224966e-05, | |
| "loss": 1.4996, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 6.38, | |
| "learning_rate": 9.292589525111789e-05, | |
| "loss": 1.4866, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 6.41, | |
| "learning_rate": 8.768571802466861e-05, | |
| "loss": 1.4984, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 6.44, | |
| "learning_rate": 8.232176259303664e-05, | |
| "loss": 1.47, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 6.46, | |
| "learning_rate": 7.686881626551518e-05, | |
| "loss": 1.5694, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 6.49, | |
| "learning_rate": 7.136224349150593e-05, | |
| "loss": 1.4892, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 6.51, | |
| "learning_rate": 6.583775650849418e-05, | |
| "loss": 1.5118, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 6.54, | |
| "learning_rate": 6.033118373448493e-05, | |
| "loss": 1.4075, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 6.56, | |
| "learning_rate": 5.487823740696347e-05, | |
| "loss": 1.3875, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 6.59, | |
| "learning_rate": 4.9514281975331485e-05, | |
| "loss": 1.4728, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 6.62, | |
| "learning_rate": 4.427410474888221e-05, | |
| "loss": 1.484, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 6.64, | |
| "learning_rate": 3.919169028775044e-05, | |
| "loss": 1.5624, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 6.67, | |
| "learning_rate": 3.429999999999998e-05, | |
| "loss": 1.4664, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 6.69, | |
| "learning_rate": 2.9630758374242713e-05, | |
| "loss": 1.4719, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 6.72, | |
| "learning_rate": 2.521424723415714e-05, | |
| "loss": 1.5392, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 6.74, | |
| "learning_rate": 2.107910934924152e-05, | |
| "loss": 1.3993, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 6.77, | |
| "learning_rate": 1.725216267546251e-05, | |
| "loss": 1.446, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 6.79, | |
| "learning_rate": 1.375822643051988e-05, | |
| "loss": 1.3712, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 6.82, | |
| "learning_rate": 1.0619960131695743e-05, | |
| "loss": 1.4234, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 6.85, | |
| "learning_rate": 7.857716640189868e-06, | |
| "loss": 1.4638, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 6.87, | |
| "learning_rate": 5.489410165004727e-06, | |
| "loss": 1.4316, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 6.9, | |
| "learning_rate": 3.530400082425805e-06, | |
| "loss": 1.4005, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 6.92, | |
| "learning_rate": 1.9933913245728244e-06, | |
| "loss": 1.4936, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 6.95, | |
| "learning_rate": 8.883519830391788e-07, | |
| "loss": 1.4579, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 6.97, | |
| "learning_rate": 2.2244866199319883e-07, | |
| "loss": 1.4931, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "learning_rate": 0.0, | |
| "loss": 1.5549, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "eval_loss": 1.5031161308288574, | |
| "eval_runtime": 11.524, | |
| "eval_samples_per_second": 21.347, | |
| "eval_steps_per_second": 2.69, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 7.03, | |
| "learning_rate": 2.2244866199319123e-07, | |
| "loss": 1.3518, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 7.05, | |
| "learning_rate": 8.883519830391636e-07, | |
| "loss": 1.331, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 7.08, | |
| "learning_rate": 1.9933913245728015e-06, | |
| "loss": 1.3751, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 7.1, | |
| "learning_rate": 3.530400082425767e-06, | |
| "loss": 1.3551, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 7.13, | |
| "learning_rate": 5.489410165004681e-06, | |
| "loss": 1.516, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 7.15, | |
| "learning_rate": 7.857716640189815e-06, | |
| "loss": 1.3005, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 7.18, | |
| "learning_rate": 1.0619960131695684e-05, | |
| "loss": 1.36, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 7.21, | |
| "learning_rate": 1.3758226430519812e-05, | |
| "loss": 1.4466, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 7.23, | |
| "learning_rate": 1.7252162675462436e-05, | |
| "loss": 1.3912, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 7.26, | |
| "learning_rate": 2.1079109349241436e-05, | |
| "loss": 1.4354, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 7.28, | |
| "learning_rate": 2.521424723415705e-05, | |
| "loss": 1.3196, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 7.31, | |
| "learning_rate": 2.963075837424261e-05, | |
| "loss": 1.4088, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 7.33, | |
| "learning_rate": 3.429999999999988e-05, | |
| "loss": 1.3855, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 7.36, | |
| "learning_rate": 3.919169028775034e-05, | |
| "loss": 1.4227, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 7.38, | |
| "learning_rate": 4.4274104748882104e-05, | |
| "loss": 1.3923, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 7.41, | |
| "learning_rate": 4.951428197533137e-05, | |
| "loss": 1.509, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 7.44, | |
| "learning_rate": 5.4878237406963356e-05, | |
| "loss": 1.4401, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 7.46, | |
| "learning_rate": 6.0331183734484816e-05, | |
| "loss": 1.4038, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 7.49, | |
| "learning_rate": 6.583775650849407e-05, | |
| "loss": 1.4624, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 7.51, | |
| "learning_rate": 7.13622434915058e-05, | |
| "loss": 1.4675, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 7.54, | |
| "learning_rate": 7.686881626551506e-05, | |
| "loss": 1.3629, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 7.56, | |
| "learning_rate": 8.232176259303652e-05, | |
| "loss": 1.4762, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 7.59, | |
| "learning_rate": 8.76857180246685e-05, | |
| "loss": 1.4092, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 7.62, | |
| "learning_rate": 9.292589525111778e-05, | |
| "loss": 1.4516, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 7.64, | |
| "learning_rate": 9.800830971224955e-05, | |
| "loss": 1.4641, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 7.67, | |
| "learning_rate": 0.00010290000000000002, | |
| "loss": 1.4513, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 7.69, | |
| "learning_rate": 0.00010756924162575728, | |
| "loss": 1.4179, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 7.72, | |
| "learning_rate": 0.00011198575276584284, | |
| "loss": 1.4565, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 7.74, | |
| "learning_rate": 0.00011612089065075847, | |
| "loss": 1.4936, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 7.77, | |
| "learning_rate": 0.00011994783732453747, | |
| "loss": 1.4875, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 7.79, | |
| "learning_rate": 0.0001234417735694801, | |
| "loss": 1.4635, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 7.82, | |
| "learning_rate": 0.00012658003986830424, | |
| "loss": 1.4118, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 7.85, | |
| "learning_rate": 0.00012934228335981013, | |
| "loss": 1.3592, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 7.87, | |
| "learning_rate": 0.00013171058983499527, | |
| "loss": 1.4221, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 7.9, | |
| "learning_rate": 0.0001336695999175742, | |
| "loss": 1.4614, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 7.92, | |
| "learning_rate": 0.00013520660867542716, | |
| "loss": 1.4663, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 7.95, | |
| "learning_rate": 0.00013631164801696083, | |
| "loss": 1.5087, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 7.97, | |
| "learning_rate": 0.00013697755133800678, | |
| "loss": 1.449, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "learning_rate": 0.0001372, | |
| "loss": 1.4238, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "eval_loss": 1.5225884914398193, | |
| "eval_runtime": 11.5201, | |
| "eval_samples_per_second": 21.354, | |
| "eval_steps_per_second": 2.691, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 8.03, | |
| "learning_rate": 0.0001369775513380068, | |
| "loss": 1.4416, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 8.05, | |
| "learning_rate": 0.00013631164801696085, | |
| "loss": 1.4836, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 8.08, | |
| "learning_rate": 0.0001352066086754272, | |
| "loss": 1.3901, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 8.1, | |
| "learning_rate": 0.00013366959991757425, | |
| "loss": 1.2685, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 8.13, | |
| "learning_rate": 0.00013171058983499543, | |
| "loss": 1.4028, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 8.15, | |
| "learning_rate": 0.0001293422833598103, | |
| "loss": 1.4551, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 8.18, | |
| "learning_rate": 0.00012658003986830446, | |
| "loss": 1.4609, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 8.21, | |
| "learning_rate": 0.00012344177356948035, | |
| "loss": 1.3414, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 8.23, | |
| "learning_rate": 0.00011994783732453773, | |
| "loss": 1.3587, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 8.26, | |
| "learning_rate": 0.0001161208906507584, | |
| "loss": 1.3815, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 8.28, | |
| "learning_rate": 0.00011198575276584276, | |
| "loss": 1.434, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 8.31, | |
| "learning_rate": 0.00010756924162575719, | |
| "loss": 1.3752, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 8.33, | |
| "learning_rate": 0.00010289999999999993, | |
| "loss": 1.4958, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 8.36, | |
| "learning_rate": 9.800830971224945e-05, | |
| "loss": 1.4, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 8.38, | |
| "learning_rate": 9.29258952511179e-05, | |
| "loss": 1.3682, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 8.41, | |
| "learning_rate": 8.768571802466864e-05, | |
| "loss": 1.4391, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 8.44, | |
| "learning_rate": 8.232176259303665e-05, | |
| "loss": 1.3929, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 8.46, | |
| "learning_rate": 7.68688162655152e-05, | |
| "loss": 1.3586, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 8.49, | |
| "learning_rate": 7.136224349150594e-05, | |
| "loss": 1.5063, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 8.51, | |
| "learning_rate": 6.583775650849421e-05, | |
| "loss": 1.3837, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 8.54, | |
| "learning_rate": 6.0331183734484945e-05, | |
| "loss": 1.3909, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 8.56, | |
| "learning_rate": 5.4878237406963485e-05, | |
| "loss": 1.4117, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 8.59, | |
| "learning_rate": 4.95142819753315e-05, | |
| "loss": 1.3474, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 8.62, | |
| "learning_rate": 4.4274104748882226e-05, | |
| "loss": 1.3716, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 8.64, | |
| "learning_rate": 3.919169028775067e-05, | |
| "loss": 1.417, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 8.67, | |
| "learning_rate": 3.4300000000000204e-05, | |
| "loss": 1.4424, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 8.69, | |
| "learning_rate": 2.9630758374242923e-05, | |
| "loss": 1.457, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 8.72, | |
| "learning_rate": 2.521424723415734e-05, | |
| "loss": 1.41, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 8.74, | |
| "learning_rate": 2.1079109349241704e-05, | |
| "loss": 1.3425, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 8.77, | |
| "learning_rate": 1.7252162675462368e-05, | |
| "loss": 1.3511, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 8.79, | |
| "learning_rate": 1.3758226430519743e-05, | |
| "loss": 1.3591, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 8.82, | |
| "learning_rate": 1.061996013169563e-05, | |
| "loss": 1.3275, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 8.85, | |
| "learning_rate": 7.85771664018977e-06, | |
| "loss": 1.454, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 8.87, | |
| "learning_rate": 5.489410165004643e-06, | |
| "loss": 1.3359, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 8.9, | |
| "learning_rate": 3.5304000824258126e-06, | |
| "loss": 1.4167, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 8.92, | |
| "learning_rate": 1.993391324572832e-06, | |
| "loss": 1.2789, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 8.95, | |
| "learning_rate": 8.883519830391788e-07, | |
| "loss": 1.3723, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 8.97, | |
| "learning_rate": 2.2244866199319883e-07, | |
| "loss": 1.3274, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "learning_rate": 0.0, | |
| "loss": 1.3058, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "eval_loss": 1.4917341470718384, | |
| "eval_runtime": 11.5163, | |
| "eval_samples_per_second": 21.361, | |
| "eval_steps_per_second": 2.692, | |
| "step": 1755 | |
| } | |
| ], | |
| "max_steps": 1950, | |
| "num_train_epochs": 10, | |
| "total_flos": 1834270064640000.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |