| { | |
| "best_metric": 1.5933494567871094, | |
| "best_model_checkpoint": "output/scriptonite/checkpoint-608", | |
| "epoch": 2.0, | |
| "global_step": 608, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00013710963595739938, | |
| "loss": 2.6067, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.0001368387818954924, | |
| "loss": 2.3107, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00013638815138477438, | |
| "loss": 2.1302, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00013575893162042516, | |
| "loss": 2.1562, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00013495278029461931, | |
| "loss": 2.0081, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00013397182122930294, | |
| "loss": 1.9735, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.0001328186387809415, | |
| "loss": 2.0752, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.0001314962710319808, | |
| "loss": 2.0235, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.0001300082017869573, | |
| "loss": 2.0059, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.0001283583513943447, | |
| "loss": 1.8462, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00012655106641831695, | |
| "loss": 1.8365, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00012459110818763681, | |
| "loss": 2.0625, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00012248364025183937, | |
| "loss": 1.9252, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00012023421477775639, | |
| "loss": 1.9528, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00011784875792222071, | |
| "loss": 1.7558, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00011533355421948647, | |
| "loss": 1.8522, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00011269523002449659, | |
| "loss": 1.7714, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.000109940736055617, | |
| "loss": 1.8545, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.0001070773290828286, | |
| "loss": 1.856, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00010411255280962044, | |
| "loss": 1.7938, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.0001010542179989503, | |
| "loss": 1.8907, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 9.791038189563213e-05, | |
| "loss": 1.8999, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 9.468932699936187e-05, | |
| "loss": 1.8537, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 9.139953924430466e-05, | |
| "loss": 1.8283, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 8.804968564273005e-05, | |
| "loss": 1.6966, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 8.46485914515931e-05, | |
| "loss": 1.8649, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 8.120521692221673e-05, | |
| "loss": 1.8703, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 7.772863369432873e-05, | |
| "loss": 1.7685, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 7.422800089664365e-05, | |
| "loss": 1.8474, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 7.071254101695329e-05, | |
| "loss": 1.7611, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 6.719151560529643e-05, | |
| "loss": 1.7199, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 6.367420087421813e-05, | |
| "loss": 1.7935, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 6.016986326040062e-05, | |
| "loss": 1.7749, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 5.668773501204858e-05, | |
| "loss": 1.8416, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 5.323698986634498e-05, | |
| "loss": 1.7939, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.982671888105512e-05, | |
| "loss": 1.6949, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 4.646590648395145e-05, | |
| "loss": 1.7562, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 4.316340680315709e-05, | |
| "loss": 1.6271, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 3.992792034076668e-05, | |
| "loss": 1.713, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 3.6767971051197685e-05, | |
| "loss": 1.7105, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 3.369188388466058e-05, | |
| "loss": 1.7335, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 3.0707762854909304e-05, | |
| "loss": 1.6748, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 2.7823469689053436e-05, | |
| "loss": 1.7346, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 2.50466031156793e-05, | |
| "loss": 1.8197, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 2.2384478845846175e-05, | |
| "loss": 1.6761, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.9844110299697732e-05, | |
| "loss": 1.7013, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.7432190129464996e-05, | |
| "loss": 1.7646, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.5155072587539005e-05, | |
| "loss": 1.5319, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.3018756786064535e-05, | |
| "loss": 1.79, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.1028870892157994e-05, | |
| "loss": 1.6705, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 9.190657300387505e-06, | |
| "loss": 1.6285, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 7.508958821578629e-06, | |
| "loss": 1.706, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 5.988205924331354e-06, | |
| "loss": 1.6291, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 4.6324050628611986e-06, | |
| "loss": 1.7411, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 3.445128121914628e-06, | |
| "loss": 1.7647, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 2.4295030065665785e-06, | |
| "loss": 1.7888, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.5882054016913933e-06, | |
| "loss": 1.6871, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 9.234517228169584e-07, | |
| "loss": 1.7539, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 4.3699327693326474e-07, | |
| "loss": 1.6844, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.3011164863877445e-07, | |
| "loss": 1.6774, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.6153237799239955e-09, | |
| "loss": 1.7028, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 1.727362871170044, | |
| "eval_runtime": 20.8096, | |
| "eval_samples_per_second": 21.336, | |
| "eval_steps_per_second": 2.691, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.3182872524144475e-07, | |
| "loss": 1.7503, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 4.427558891872417e-07, | |
| "loss": 1.7186, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 9.35614586573241e-07, | |
| "loss": 1.7355, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.6090892340046806e-06, | |
| "loss": 1.7325, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 2.461382131516746e-06, | |
| "loss": 1.5602, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 3.4902182611592783e-06, | |
| "loss": 1.5739, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 4.692851359682261e-06, | |
| "loss": 1.7024, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 6.066071249112177e-06, | |
| "loss": 1.7912, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 7.606212405651899e-06, | |
| "loss": 1.7776, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 9.309163744031197e-06, | |
| "loss": 1.8068, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.1170379591190527e-05, | |
| "loss": 1.7059, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.3184891820006337e-05, | |
| "loss": 1.7498, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.5347323110669033e-05, | |
| "loss": 1.7696, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.7651901304315657e-05, | |
| "loss": 1.703, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 2.009247481060283e-05, | |
| "loss": 1.6793, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 2.2662529028092886e-05, | |
| "loss": 1.7064, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 2.5355203733622272e-05, | |
| "loss": 1.6889, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 2.816331139423508e-05, | |
| "loss": 1.7357, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 3.1079356352801514e-05, | |
| "loss": 1.6548, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 3.4095554836109936e-05, | |
| "loss": 1.7133, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 3.7203855732024184e-05, | |
| "loss": 1.6095, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 4.039596208024663e-05, | |
| "loss": 1.6151, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 4.3663353219321276e-05, | |
| "loss": 1.6988, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 4.6997307530760796e-05, | |
| "loss": 1.8323, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 5.038892571958626e-05, | |
| "loss": 1.6735, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 5.382915456913676e-05, | |
| "loss": 1.6212, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 5.7308811106741675e-05, | |
| "loss": 1.7788, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 6.0818607115748475e-05, | |
| "loss": 1.6847, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 6.434917392847842e-05, | |
| "loss": 1.6746, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 6.78910874339285e-05, | |
| "loss": 1.7122, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 7.143489323346855e-05, | |
| "loss": 1.6207, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 7.497113187738444e-05, | |
| "loss": 1.719, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 7.849036411490396e-05, | |
| "loss": 1.814, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 8.19831960903064e-05, | |
| "loss": 1.7822, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 8.54403044178588e-05, | |
| "loss": 1.7602, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 8.885246106864805e-05, | |
| "loss": 1.6178, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 9.221055800287699e-05, | |
| "loss": 1.7397, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 9.550563148187563e-05, | |
| "loss": 1.6071, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 9.87288859949296e-05, | |
| "loss": 1.66, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 0.00010187171773706003, | |
| "loss": 1.6028, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 0.00010492573757508462, | |
| "loss": 1.697, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 0.0001078827934406569, | |
| "loss": 1.5973, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 0.00011073499209051121, | |
| "loss": 1.6779, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 0.00011347472017582718, | |
| "loss": 1.639, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 0.00011609466456447531, | |
| "loss": 1.7209, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 0.00011858783186189604, | |
| "loss": 1.7896, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 0.00012094756707850673, | |
| "loss": 1.7093, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 0.00012316757139380674, | |
| "loss": 1.614, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 0.0001252419189697637, | |
| "loss": 1.7043, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 0.0001271650727686013, | |
| "loss": 1.6544, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 0.00012893189933276512, | |
| "loss": 1.6355, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 0.00013053768248761616, | |
| "loss": 1.7327, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 0.00013197813593027427, | |
| "loss": 1.5672, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 0.00013324941467100882, | |
| "loss": 1.6036, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 0.00013434812529663614, | |
| "loss": 1.5748, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 0.00013527133502852737, | |
| "loss": 1.6181, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 0.00013601657955104887, | |
| "loss": 1.6947, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 0.00013658186958953817, | |
| "loss": 1.6579, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 0.00013696569622025754, | |
| "loss": 1.6297, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 0.00013716703489815072, | |
| "loss": 1.6272, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 1.5933494567871094, | |
| "eval_runtime": 22.1769, | |
| "eval_samples_per_second": 20.968, | |
| "eval_steps_per_second": 2.66, | |
| "step": 608 | |
| } | |
| ], | |
| "max_steps": 608, | |
| "num_train_epochs": 2, | |
| "total_flos": 634286407680000.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |