| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.8846153846153846, | |
| "global_step": 1200, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 0.0003, | |
| "loss": 2.0952, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.000298793242156074, | |
| "loss": 2.2593, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.000297586484312148, | |
| "loss": 2.1623, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.000296379726468222, | |
| "loss": 1.9038, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.000295172968624296, | |
| "loss": 1.8576, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00029396621078037004, | |
| "loss": 1.8397, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00029275945293644407, | |
| "loss": 1.8032, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00029155269509251804, | |
| "loss": 1.7281, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00029034593724859206, | |
| "loss": 1.7097, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00028913917940466614, | |
| "loss": 1.7421, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.0002879324215607401, | |
| "loss": 1.6861, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00028672566371681414, | |
| "loss": 1.6848, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00028551890587288816, | |
| "loss": 1.5163, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.0002843121480289622, | |
| "loss": 1.7266, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.0002831053901850362, | |
| "loss": 1.5888, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.0002818986323411102, | |
| "loss": 1.5463, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.0002806918744971842, | |
| "loss": 1.4611, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00027948511665325823, | |
| "loss": 1.5554, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00027827835880933226, | |
| "loss": 1.5481, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.0002770716009654062, | |
| "loss": 1.4464, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "eval_loss": 1.6602041721343994, | |
| "eval_runtime": 25.516, | |
| "eval_samples_per_second": 8.191, | |
| "eval_steps_per_second": 1.058, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00027586484312148025, | |
| "loss": 1.6648, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.0002746580852775543, | |
| "loss": 1.6464, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.0002734513274336283, | |
| "loss": 1.524, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.0002722445695897023, | |
| "loss": 1.4791, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.0002710378117457763, | |
| "loss": 1.5162, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.0002698310539018503, | |
| "loss": 1.6636, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00026862429605792435, | |
| "loss": 1.7803, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00026741753821399837, | |
| "loss": 1.7384, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.0002662107803700724, | |
| "loss": 1.5462, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.0002650040225261464, | |
| "loss": 1.5802, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00026379726468222045, | |
| "loss": 1.4942, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.0002625905068382944, | |
| "loss": 1.601, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00026138374899436844, | |
| "loss": 1.5822, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00026017699115044247, | |
| "loss": 1.5071, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.0002589702333065165, | |
| "loss": 1.5926, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00025776347546259046, | |
| "loss": 1.4206, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.0002565567176186645, | |
| "loss": 1.5317, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.0002553499597747385, | |
| "loss": 1.4862, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00025414320193081253, | |
| "loss": 1.4387, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00025293644408688656, | |
| "loss": 1.5276, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00025172968624296053, | |
| "loss": 1.55, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "eval_loss": 1.6166218519210815, | |
| "eval_runtime": 13.3266, | |
| "eval_samples_per_second": 15.683, | |
| "eval_steps_per_second": 2.026, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00025052292839903455, | |
| "loss": 1.5525, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.0002493161705551086, | |
| "loss": 1.5591, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 0.0002481094127111826, | |
| "loss": 1.529, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 0.0002469026548672566, | |
| "loss": 1.7152, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.00024569589702333065, | |
| "loss": 1.7493, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 0.0002444891391794047, | |
| "loss": 1.5288, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 0.00024328238133547868, | |
| "loss": 1.5651, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 0.00024207562349155267, | |
| "loss": 1.5144, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 0.0002408688656476267, | |
| "loss": 1.5334, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 0.0002396621078037007, | |
| "loss": 1.5988, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.00023845534995977472, | |
| "loss": 1.6401, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 0.00023724859211584875, | |
| "loss": 1.5929, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 0.00023604183427192274, | |
| "loss": 1.3948, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 0.00023483507642799677, | |
| "loss": 1.221, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 0.00023362831858407077, | |
| "loss": 1.5604, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 0.0002324215607401448, | |
| "loss": 1.505, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.0002312148028962188, | |
| "loss": 1.5008, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 0.0002300080450522928, | |
| "loss": 1.5176, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 0.0002288012872083668, | |
| "loss": 1.4227, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 0.00022759452936444084, | |
| "loss": 1.1778, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 0.00022638777152051486, | |
| "loss": 1.5052, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "eval_loss": 1.5979019403457642, | |
| "eval_runtime": 13.342, | |
| "eval_samples_per_second": 15.665, | |
| "eval_steps_per_second": 2.024, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.00022518101367658886, | |
| "loss": 1.5228, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 0.0002239742558326629, | |
| "loss": 1.5385, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 0.0002227674979887369, | |
| "loss": 1.5072, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 0.00022156074014481093, | |
| "loss": 1.5032, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 0.00022035398230088496, | |
| "loss": 1.5981, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 0.00021914722445695895, | |
| "loss": 1.422, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 0.00021794046661303298, | |
| "loss": 1.4948, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 0.00021673370876910698, | |
| "loss": 1.4421, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 0.000215526950925181, | |
| "loss": 1.6086, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 0.000214320193081255, | |
| "loss": 1.3874, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 0.00021311343523732902, | |
| "loss": 1.2493, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 0.00021190667739340302, | |
| "loss": 1.5565, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 0.00021069991954947705, | |
| "loss": 1.4739, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 0.00020949316170555107, | |
| "loss": 1.5288, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 0.00020828640386162507, | |
| "loss": 1.4837, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 0.0002070796460176991, | |
| "loss": 1.5009, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 0.0002058728881737731, | |
| "loss": 1.3469, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 0.00020466613032984712, | |
| "loss": 1.3949, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 0.00020345937248592117, | |
| "loss": 1.3688, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 0.00020225261464199517, | |
| "loss": 1.6629, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.0002010458567980692, | |
| "loss": 1.5395, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 1.589026927947998, | |
| "eval_runtime": 13.34, | |
| "eval_samples_per_second": 15.667, | |
| "eval_steps_per_second": 2.024, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 0.0001998390989541432, | |
| "loss": 1.5794, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 0.0001986323411102172, | |
| "loss": 1.4746, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 0.0001974255832662912, | |
| "loss": 1.3205, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 0.00019621882542236523, | |
| "loss": 1.491, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 0.00019501206757843926, | |
| "loss": 1.4803, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 0.00019380530973451326, | |
| "loss": 1.3436, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 0.00019259855189058728, | |
| "loss": 1.3893, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 0.00019139179404666128, | |
| "loss": 1.0474, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 0.0001901850362027353, | |
| "loss": 1.3388, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 0.0001889782783588093, | |
| "loss": 1.4201, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 0.00018777152051488333, | |
| "loss": 1.4805, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 0.00018656476267095732, | |
| "loss": 1.2253, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 0.00018535800482703135, | |
| "loss": 1.5064, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 0.00018415124698310537, | |
| "loss": 1.3565, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 0.00018294448913917937, | |
| "loss": 1.3976, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 0.00018173773129525342, | |
| "loss": 1.5882, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 0.00018053097345132742, | |
| "loss": 1.4698, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 0.00017932421560740145, | |
| "loss": 1.5326, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 0.00017811745776347547, | |
| "loss": 1.5659, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 0.00017691069991954947, | |
| "loss": 1.4489, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 0.0001757039420756235, | |
| "loss": 1.5312, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "eval_loss": 1.5857353210449219, | |
| "eval_runtime": 13.3247, | |
| "eval_samples_per_second": 15.685, | |
| "eval_steps_per_second": 2.026, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 0.0001744971842316975, | |
| "loss": 1.3595, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 0.00017329042638777152, | |
| "loss": 1.1543, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 0.0001720836685438455, | |
| "loss": 1.4568, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 0.00017087691069991954, | |
| "loss": 1.3682, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 0.00016967015285599354, | |
| "loss": 1.5323, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 0.00016846339501206756, | |
| "loss": 1.6644, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 0.00016725663716814158, | |
| "loss": 1.5735, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 0.00016604987932421558, | |
| "loss": 1.4759, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 0.0001648431214802896, | |
| "loss": 1.3774, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 0.0001636363636363636, | |
| "loss": 1.3321, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 0.00016242960579243763, | |
| "loss": 1.2843, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 0.00016122284794851163, | |
| "loss": 1.4636, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 0.00016001609010458568, | |
| "loss": 1.4082, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 0.0001588093322606597, | |
| "loss": 1.3848, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 0.0001576025744167337, | |
| "loss": 1.5396, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 0.00015639581657280773, | |
| "loss": 1.4889, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 0.00015518905872888172, | |
| "loss": 1.4019, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 0.00015398230088495575, | |
| "loss": 1.3788, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 0.00015277554304102975, | |
| "loss": 1.409, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 0.00015156878519710377, | |
| "loss": 1.5675, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "eval_loss": 1.584699273109436, | |
| "eval_runtime": 13.3509, | |
| "eval_samples_per_second": 15.654, | |
| "eval_steps_per_second": 2.022, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 0.0001503620273531778, | |
| "loss": 1.3176, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 0.0001491552695092518, | |
| "loss": 1.3114, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 0.00014794851166532582, | |
| "loss": 1.4261, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 0.00014674175382139982, | |
| "loss": 1.5156, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 0.00014553499597747384, | |
| "loss": 1.3231, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 0.00014432823813354787, | |
| "loss": 1.4353, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 0.00014312148028962186, | |
| "loss": 1.4423, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 0.0001419147224456959, | |
| "loss": 1.1512, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 0.0001407079646017699, | |
| "loss": 1.3562, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 0.0001395012067578439, | |
| "loss": 1.4033, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 0.00013829444891391793, | |
| "loss": 1.5435, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 0.00013708769106999193, | |
| "loss": 1.4543, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 0.00013588093322606596, | |
| "loss": 1.3509, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 0.00013467417538213995, | |
| "loss": 1.4268, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 0.000133467417538214, | |
| "loss": 1.4231, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 0.000132260659694288, | |
| "loss": 1.4727, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 0.00013105390185036203, | |
| "loss": 1.2846, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 0.00012984714400643603, | |
| "loss": 1.3458, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 0.00012864038616251005, | |
| "loss": 1.6349, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 0.00012743362831858405, | |
| "loss": 1.4798, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 0.00012622687047465807, | |
| "loss": 1.6157, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "eval_loss": 1.5761181116104126, | |
| "eval_runtime": 13.3218, | |
| "eval_samples_per_second": 15.689, | |
| "eval_steps_per_second": 2.027, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 0.00012502011263073207, | |
| "loss": 1.4723, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 0.0001238133547868061, | |
| "loss": 1.4487, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 0.00012260659694288012, | |
| "loss": 1.3613, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 0.00012139983909895413, | |
| "loss": 1.1906, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 0.00012019308125502816, | |
| "loss": 1.4962, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 0.00011898632341110217, | |
| "loss": 1.5629, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 0.00011777956556717618, | |
| "loss": 1.3566, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 0.00011657280772325019, | |
| "loss": 1.3442, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 0.0001153660498793242, | |
| "loss": 1.4071, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 0.00011415929203539821, | |
| "loss": 1.4331, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 0.00011295253419147222, | |
| "loss": 1.3572, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 0.00011174577634754626, | |
| "loss": 1.4009, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 0.00011053901850362027, | |
| "loss": 1.47, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 0.00010933226065969428, | |
| "loss": 1.233, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 0.0001081255028157683, | |
| "loss": 1.4593, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 0.00010691874497184231, | |
| "loss": 1.4147, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 0.00010571198712791632, | |
| "loss": 1.2775, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 0.00010450522928399033, | |
| "loss": 1.5372, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 0.00010329847144006434, | |
| "loss": 1.2994, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 0.00010209171359613835, | |
| "loss": 1.3639, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 0.00010088495575221239, | |
| "loss": 1.4629, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 1.5735822916030884, | |
| "eval_runtime": 13.3179, | |
| "eval_samples_per_second": 15.693, | |
| "eval_steps_per_second": 2.027, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 9.96781979082864e-05, | |
| "loss": 1.3559, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 9.847144006436041e-05, | |
| "loss": 1.3924, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 9.726468222043442e-05, | |
| "loss": 1.2349, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 9.605792437650844e-05, | |
| "loss": 1.1467, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 9.485116653258245e-05, | |
| "loss": 1.3251, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 9.364440868865646e-05, | |
| "loss": 1.1517, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 9.243765084473048e-05, | |
| "loss": 1.4388, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 9.12308930008045e-05, | |
| "loss": 1.5138, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 9.002413515687852e-05, | |
| "loss": 1.4948, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 8.881737731295253e-05, | |
| "loss": 1.2988, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 8.761061946902654e-05, | |
| "loss": 1.226, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 8.640386162510055e-05, | |
| "loss": 1.1645, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 8.519710378117456e-05, | |
| "loss": 1.278, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 8.399034593724859e-05, | |
| "loss": 1.3873, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 8.27835880933226e-05, | |
| "loss": 1.231, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 8.157683024939661e-05, | |
| "loss": 1.401, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 8.037007240547062e-05, | |
| "loss": 1.4372, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 7.916331456154465e-05, | |
| "loss": 1.3624, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 7.795655671761866e-05, | |
| "loss": 1.5242, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 7.674979887369267e-05, | |
| "loss": 1.3661, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 7.554304102976669e-05, | |
| "loss": 1.3608, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "eval_loss": 1.582654356956482, | |
| "eval_runtime": 13.332, | |
| "eval_samples_per_second": 15.677, | |
| "eval_steps_per_second": 2.025, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 7.43362831858407e-05, | |
| "loss": 1.476, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 7.312952534191472e-05, | |
| "loss": 1.3051, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 7.192276749798874e-05, | |
| "loss": 1.418, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 7.071600965406275e-05, | |
| "loss": 1.1781, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 6.950925181013676e-05, | |
| "loss": 1.4319, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 6.830249396621077e-05, | |
| "loss": 1.3059, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 6.709573612228479e-05, | |
| "loss": 1.0836, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 6.588897827835881e-05, | |
| "loss": 1.376, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 6.468222043443282e-05, | |
| "loss": 1.5022, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 6.347546259050683e-05, | |
| "loss": 1.2871, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 6.226870474658084e-05, | |
| "loss": 1.2777, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 6.106194690265487e-05, | |
| "loss": 1.5625, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 5.985518905872888e-05, | |
| "loss": 1.087, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 5.864843121480289e-05, | |
| "loss": 1.3169, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 5.74416733708769e-05, | |
| "loss": 1.3657, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 5.6234915526950926e-05, | |
| "loss": 1.3917, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 5.502815768302494e-05, | |
| "loss": 1.3179, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 5.382139983909895e-05, | |
| "loss": 1.4162, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 5.261464199517296e-05, | |
| "loss": 1.3564, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 5.140788415124698e-05, | |
| "loss": 1.276, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 5.0201126307320996e-05, | |
| "loss": 1.3666, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "eval_loss": 1.5801613330841064, | |
| "eval_runtime": 13.3272, | |
| "eval_samples_per_second": 15.682, | |
| "eval_steps_per_second": 2.026, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 4.899436846339501e-05, | |
| "loss": 1.4251, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 4.778761061946902e-05, | |
| "loss": 1.4518, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 4.6580852775543036e-05, | |
| "loss": 1.3541, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 4.5374094931617054e-05, | |
| "loss": 1.249, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 4.4167337087691066e-05, | |
| "loss": 1.3855, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 4.2960579243765084e-05, | |
| "loss": 1.2707, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 4.1753821399839095e-05, | |
| "loss": 1.2083, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 4.0547063555913106e-05, | |
| "loss": 1.4777, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 3.934030571198713e-05, | |
| "loss": 1.4348, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 3.813354786806114e-05, | |
| "loss": 1.2354, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 3.692679002413515e-05, | |
| "loss": 1.294, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 3.572003218020917e-05, | |
| "loss": 1.3231, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 3.451327433628318e-05, | |
| "loss": 1.2215, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 3.33065164923572e-05, | |
| "loss": 1.1217, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 3.209975864843121e-05, | |
| "loss": 1.4292, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 3.089300080450522e-05, | |
| "loss": 1.195, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 2.968624296057924e-05, | |
| "loss": 1.3549, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 2.8479485116653255e-05, | |
| "loss": 1.4274, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 2.727272727272727e-05, | |
| "loss": 1.4838, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 2.6065969428801284e-05, | |
| "loss": 1.2394, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "eval_loss": 1.5814132690429688, | |
| "eval_runtime": 13.3223, | |
| "eval_samples_per_second": 15.688, | |
| "eval_steps_per_second": 2.027, | |
| "step": 1144 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 2.4859211584875302e-05, | |
| "loss": 1.2414, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 2.3652453740949313e-05, | |
| "loss": 1.288, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 2.244569589702333e-05, | |
| "loss": 1.2, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 2.1238938053097343e-05, | |
| "loss": 1.2117, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 2.003218020917136e-05, | |
| "loss": 1.3717, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 1.8825422365245372e-05, | |
| "loss": 1.438, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 1.7618664521319386e-05, | |
| "loss": 1.3346, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 1.64119066773934e-05, | |
| "loss": 1.227, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 1.5205148833467417e-05, | |
| "loss": 1.3146, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 1.3998390989541432e-05, | |
| "loss": 1.4163, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 1.2791633145615445e-05, | |
| "loss": 1.1681, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 1.158487530168946e-05, | |
| "loss": 1.4127, | |
| "step": 1200 | |
| } | |
| ], | |
| "max_steps": 1248, | |
| "num_train_epochs": 3, | |
| "total_flos": 957297581350656.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |