| { | |
| "best_metric": 1.254175066947937, | |
| "best_model_checkpoint": "data/phi-1_5_sft/checkpoint-2900", | |
| "epoch": 2.998499249624812, | |
| "eval_steps": 100, | |
| "global_step": 2997, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 1.6666666666666668e-07, | |
| "loss": 1.6512, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 8.333333333333333e-07, | |
| "loss": 1.7421, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.6666666666666667e-06, | |
| "loss": 1.7114, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 2.5e-06, | |
| "loss": 1.6882, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 1.5959, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.166666666666667e-06, | |
| "loss": 1.4829, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 5e-06, | |
| "loss": 1.4395, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 5.833333333333334e-06, | |
| "loss": 1.4149, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 1.3751, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 1.3746, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 8.333333333333334e-06, | |
| "loss": 1.3864, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 9.166666666666666e-06, | |
| "loss": 1.3487, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1e-05, | |
| "loss": 1.3613, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.0833333333333334e-05, | |
| "loss": 1.3476, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.1666666666666668e-05, | |
| "loss": 1.3487, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.25e-05, | |
| "loss": 1.3485, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 1.347, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.416666666666667e-05, | |
| "loss": 1.3166, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 1.3199, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.5833333333333333e-05, | |
| "loss": 1.3265, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 1.3099, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "eval_loss": 1.339752197265625, | |
| "eval_runtime": 152.8603, | |
| "eval_samples_per_second": 92.581, | |
| "eval_steps_per_second": 2.898, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.7500000000000002e-05, | |
| "loss": 1.3124, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.8333333333333333e-05, | |
| "loss": 1.3422, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.916666666666667e-05, | |
| "loss": 1.3155, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3181, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.9999850951038044e-05, | |
| "loss": 1.296, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.999940380859529e-05, | |
| "loss": 1.2947, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9998658586000968e-05, | |
| "loss": 1.3271, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9997615305470002e-05, | |
| "loss": 1.3193, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9996273998102368e-05, | |
| "loss": 1.321, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9994634703882166e-05, | |
| "loss": 1.3308, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9992697471676413e-05, | |
| "loss": 1.329, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9990462359233595e-05, | |
| "loss": 1.324, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.998792943318196e-05, | |
| "loss": 1.3263, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.99850987690275e-05, | |
| "loss": 1.3239, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9981970451151724e-05, | |
| "loss": 1.3088, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9978544572809143e-05, | |
| "loss": 1.3385, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.997482123612448e-05, | |
| "loss": 1.3323, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.9970800552089623e-05, | |
| "loss": 1.308, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9966482640560335e-05, | |
| "loss": 1.2985, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.996186763025266e-05, | |
| "loss": 1.3131, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "eval_loss": 1.315926194190979, | |
| "eval_runtime": 152.9359, | |
| "eval_samples_per_second": 92.535, | |
| "eval_steps_per_second": 2.897, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9956955658739098e-05, | |
| "loss": 1.3049, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.995174687244449e-05, | |
| "loss": 1.3123, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.994624142664169e-05, | |
| "loss": 1.3071, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.9940439485446884e-05, | |
| "loss": 1.2991, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.993434122181474e-05, | |
| "loss": 1.3026, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.9927946817533224e-05, | |
| "loss": 1.2919, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9921256463218207e-05, | |
| "loss": 1.2949, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9914270358307762e-05, | |
| "loss": 1.3001, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.9906988711056215e-05, | |
| "loss": 1.3064, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.9899411738527977e-05, | |
| "loss": 1.3145, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.989153966659101e-05, | |
| "loss": 1.3119, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.9883372729910154e-05, | |
| "loss": 1.2871, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.9874911171940087e-05, | |
| "loss": 1.2849, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.9866155244918106e-05, | |
| "loss": 1.3173, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.985710520985657e-05, | |
| "loss": 1.285, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.9847761336535148e-05, | |
| "loss": 1.2926, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.9838123903492764e-05, | |
| "loss": 1.2915, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.9828193198019297e-05, | |
| "loss": 1.2786, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.981796951614701e-05, | |
| "loss": 1.2994, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.9807453162641745e-05, | |
| "loss": 1.3009, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "eval_loss": 1.3045789003372192, | |
| "eval_runtime": 152.7904, | |
| "eval_samples_per_second": 92.624, | |
| "eval_steps_per_second": 2.899, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.9796644450993812e-05, | |
| "loss": 1.274, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.978554370340866e-05, | |
| "loss": 1.2899, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.9774151250797278e-05, | |
| "loss": 1.311, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.9762467432766303e-05, | |
| "loss": 1.2917, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.975049259760793e-05, | |
| "loss": 1.2636, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.973822710228951e-05, | |
| "loss": 1.305, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.972567131244291e-05, | |
| "loss": 1.2816, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.9712825602353617e-05, | |
| "loss": 1.2708, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.969969035494958e-05, | |
| "loss": 1.2657, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.9686265961789807e-05, | |
| "loss": 1.2958, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.9672552823052663e-05, | |
| "loss": 1.2901, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.965855134752397e-05, | |
| "loss": 1.2807, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.9644261952584804e-05, | |
| "loss": 1.2744, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.962968506419906e-05, | |
| "loss": 1.2891, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.9614821116900763e-05, | |
| "loss": 1.2947, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.9599670553781087e-05, | |
| "loss": 1.251, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.9584233826475178e-05, | |
| "loss": 1.2793, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.9568511395148674e-05, | |
| "loss": 1.2735, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.955250372848398e-05, | |
| "loss": 1.2796, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.9536211303666324e-05, | |
| "loss": 1.2915, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "eval_loss": 1.296703577041626, | |
| "eval_runtime": 152.879, | |
| "eval_samples_per_second": 92.57, | |
| "eval_steps_per_second": 2.898, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.9519634606369506e-05, | |
| "loss": 1.2607, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.9502774130741427e-05, | |
| "loss": 1.3004, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.948563037938937e-05, | |
| "loss": 1.2881, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.9468203863365002e-05, | |
| "loss": 1.2765, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.9450495102149144e-05, | |
| "loss": 1.2955, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.94325046236363e-05, | |
| "loss": 1.3082, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.9414232964118893e-05, | |
| "loss": 1.2897, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.93956806682713e-05, | |
| "loss": 1.287, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.9376848289133607e-05, | |
| "loss": 1.2774, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.9357736388095132e-05, | |
| "loss": 1.2961, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.9338345534877675e-05, | |
| "loss": 1.2771, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.9318676307518543e-05, | |
| "loss": 1.2679, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.9298729292353318e-05, | |
| "loss": 1.3019, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.9278505083998393e-05, | |
| "loss": 1.2785, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.9258004285333204e-05, | |
| "loss": 1.2741, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.923722750748231e-05, | |
| "loss": 1.281, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.9216175369797145e-05, | |
| "loss": 1.2836, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.9194848499837567e-05, | |
| "loss": 1.292, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.9173247533353137e-05, | |
| "loss": 1.2937, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.9151373114264183e-05, | |
| "loss": 1.2714, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "eval_loss": 1.2905510663986206, | |
| "eval_runtime": 152.8556, | |
| "eval_samples_per_second": 92.584, | |
| "eval_steps_per_second": 2.898, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.9129225894642594e-05, | |
| "loss": 1.2424, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.9106806534692392e-05, | |
| "loss": 1.2737, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.9084115702730043e-05, | |
| "loss": 1.268, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.9061154075164535e-05, | |
| "loss": 1.2651, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.903792233647722e-05, | |
| "loss": 1.2828, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.9014421179201407e-05, | |
| "loss": 1.2656, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.899065130390171e-05, | |
| "loss": 1.2667, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.896661341915318e-05, | |
| "loss": 1.3043, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.8942308241520174e-05, | |
| "loss": 1.2754, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.8917736495534988e-05, | |
| "loss": 1.2773, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.8892898913676264e-05, | |
| "loss": 1.2737, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.8867796236347168e-05, | |
| "loss": 1.2881, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.8842429211853298e-05, | |
| "loss": 1.2547, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.8816798596380388e-05, | |
| "loss": 1.256, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.879090515397176e-05, | |
| "loss": 1.291, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.876474965650556e-05, | |
| "loss": 1.2748, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8738332883671738e-05, | |
| "loss": 1.2653, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8711655622948805e-05, | |
| "loss": 1.2727, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8684718669580368e-05, | |
| "loss": 1.2425, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8657522826551412e-05, | |
| "loss": 1.2811, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "eval_loss": 1.2853693962097168, | |
| "eval_runtime": 152.8403, | |
| "eval_samples_per_second": 92.593, | |
| "eval_steps_per_second": 2.898, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.8630068904564373e-05, | |
| "loss": 1.2578, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.8602357722014966e-05, | |
| "loss": 1.254, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.8574390104967784e-05, | |
| "loss": 1.2757, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.85461668871317e-05, | |
| "loss": 1.2756, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.851768890983496e-05, | |
| "loss": 1.2673, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.8488957022000164e-05, | |
| "loss": 1.2752, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.8459972080118925e-05, | |
| "loss": 1.2691, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.8430734948226336e-05, | |
| "loss": 1.2737, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.8401246497875238e-05, | |
| "loss": 1.2601, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.8371507608110206e-05, | |
| "loss": 1.2711, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.8341519165441373e-05, | |
| "loss": 1.2861, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.831128206381799e-05, | |
| "loss": 1.2867, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.8280797204601782e-05, | |
| "loss": 1.2666, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.8250065496540062e-05, | |
| "loss": 1.2876, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.8219087855738678e-05, | |
| "loss": 1.2693, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.818786520563467e-05, | |
| "loss": 1.2868, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.815639847696875e-05, | |
| "loss": 1.252, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.812468860775757e-05, | |
| "loss": 1.2457, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.809273654326575e-05, | |
| "loss": 1.2627, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.8060543235977696e-05, | |
| "loss": 1.2621, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "eval_loss": 1.2807434797286987, | |
| "eval_runtime": 152.8742, | |
| "eval_samples_per_second": 92.573, | |
| "eval_steps_per_second": 2.898, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.8028109645569212e-05, | |
| "loss": 1.2517, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.79954367388789e-05, | |
| "loss": 1.2717, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.7962525489879324e-05, | |
| "loss": 1.2707, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.7929376879647984e-05, | |
| "loss": 1.2745, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.7895991896338066e-05, | |
| "loss": 1.2715, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.7862371535149e-05, | |
| "loss": 1.2419, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.782851679829676e-05, | |
| "loss": 1.2804, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.779442869498403e-05, | |
| "loss": 1.2821, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.7760108241370097e-05, | |
| "loss": 1.2569, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.7725556460540553e-05, | |
| "loss": 1.2802, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.7690774382476808e-05, | |
| "loss": 1.258, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.7655763044025395e-05, | |
| "loss": 1.2775, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.7620523488867042e-05, | |
| "loss": 1.2684, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.7585056767485574e-05, | |
| "loss": 1.2835, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.7549363937136586e-05, | |
| "loss": 1.246, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.7513446061815946e-05, | |
| "loss": 1.2623, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.747730421222806e-05, | |
| "loss": 1.2679, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.744093946575396e-05, | |
| "loss": 1.2835, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.7404352906419187e-05, | |
| "loss": 1.2549, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.7367545624861483e-05, | |
| "loss": 1.2406, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "eval_loss": 1.2767128944396973, | |
| "eval_runtime": 152.8153, | |
| "eval_samples_per_second": 92.609, | |
| "eval_steps_per_second": 2.899, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.7330518718298263e-05, | |
| "loss": 1.2444, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.7293273290493932e-05, | |
| "loss": 1.2615, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.725581045172696e-05, | |
| "loss": 1.2722, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.721813131875679e-05, | |
| "loss": 1.2676, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.718023701479055e-05, | |
| "loss": 1.2455, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.714212866944958e-05, | |
| "loss": 1.2792, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.710380741873574e-05, | |
| "loss": 1.2674, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.706527440499756e-05, | |
| "loss": 1.2514, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.702653077689618e-05, | |
| "loss": 1.2564, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.6987577689371107e-05, | |
| "loss": 1.2636, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.6948416303605796e-05, | |
| "loss": 1.2583, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.690904778699302e-05, | |
| "loss": 1.2724, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.68694733131001e-05, | |
| "loss": 1.2631, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.6829694061633874e-05, | |
| "loss": 1.2713, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.6789711218405575e-05, | |
| "loss": 1.2453, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.6749525975295455e-05, | |
| "loss": 1.2941, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.670913953021727e-05, | |
| "loss": 1.2435, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.666855308708257e-05, | |
| "loss": 1.261, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.6627767855764786e-05, | |
| "loss": 1.2629, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.6586785052063206e-05, | |
| "loss": 1.2371, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "eval_loss": 1.273120403289795, | |
| "eval_runtime": 152.8358, | |
| "eval_samples_per_second": 92.596, | |
| "eval_steps_per_second": 2.899, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.65456058976667e-05, | |
| "loss": 1.2868, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.6504231620117305e-05, | |
| "loss": 1.2628, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.646266345277365e-05, | |
| "loss": 1.2489, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.642090263477417e-05, | |
| "loss": 1.2645, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.6378950411000183e-05, | |
| "loss": 1.2803, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.6336808032038768e-05, | |
| "loss": 1.2429, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.6294476754145497e-05, | |
| "loss": 1.2285, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.625195783920697e-05, | |
| "loss": 1.2508, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.6209252554703215e-05, | |
| "loss": 1.2661, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.61663621736699e-05, | |
| "loss": 1.2709, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.6123287974660378e-05, | |
| "loss": 1.25, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.608003124170758e-05, | |
| "loss": 1.2693, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.603659326428573e-05, | |
| "loss": 1.2574, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.599297533727192e-05, | |
| "loss": 1.2554, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.59491787609075e-05, | |
| "loss": 1.2678, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.590520484075932e-05, | |
| "loss": 1.242, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.5861054887680805e-05, | |
| "loss": 1.2637, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.581673021777289e-05, | |
| "loss": 1.2353, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.5772232152344797e-05, | |
| "loss": 1.2466, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.5727562017874602e-05, | |
| "loss": 1.2547, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 1.2699366807937622, | |
| "eval_runtime": 152.6518, | |
| "eval_samples_per_second": 92.708, | |
| "eval_steps_per_second": 2.902, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.5682721145969742e-05, | |
| "loss": 1.2239, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.5637710873327305e-05, | |
| "loss": 1.228, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.5592532541694173e-05, | |
| "loss": 1.2122, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.554718749782703e-05, | |
| "loss": 1.2218, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.5501677093452218e-05, | |
| "loss": 1.228, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.5456002685225447e-05, | |
| "loss": 1.2189, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 1.5410165634691343e-05, | |
| "loss": 1.2289, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 1.5364167308242866e-05, | |
| "loss": 1.2214, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.5318009077080577e-05, | |
| "loss": 1.1938, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.5271692317171765e-05, | |
| "loss": 1.2181, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 1.5225218409209424e-05, | |
| "loss": 1.2311, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 1.5178588738571108e-05, | |
| "loss": 1.2348, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.5131804695277612e-05, | |
| "loss": 1.2268, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.5084867673951558e-05, | |
| "loss": 1.2187, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.5037779073775805e-05, | |
| "loss": 1.2285, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.4990540298451748e-05, | |
| "loss": 1.2053, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.4943152756157476e-05, | |
| "loss": 1.2075, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.4895617859505782e-05, | |
| "loss": 1.2191, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.4847937025502067e-05, | |
| "loss": 1.203, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.4800111675502095e-05, | |
| "loss": 1.2085, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "eval_loss": 1.2692837715148926, | |
| "eval_runtime": 152.6613, | |
| "eval_samples_per_second": 92.702, | |
| "eval_steps_per_second": 2.902, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.4752143235169623e-05, | |
| "loss": 1.2413, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.4704033134433895e-05, | |
| "loss": 1.1931, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 1.4655782807447025e-05, | |
| "loss": 1.2152, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 1.460739369254125e-05, | |
| "loss": 1.2152, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 1.4558867232186034e-05, | |
| "loss": 1.2088, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 1.4510204872945084e-05, | |
| "loss": 1.1985, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 1.446140806543323e-05, | |
| "loss": 1.2047, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 1.4412478264273173e-05, | |
| "loss": 1.214, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.4363416928052126e-05, | |
| "loss": 1.222, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.4314225519278333e-05, | |
| "loss": 1.2188, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 1.4264905504337483e-05, | |
| "loss": 1.1926, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 1.4215458353448981e-05, | |
| "loss": 1.2045, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 1.4165885540622128e-05, | |
| "loss": 1.2075, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 1.4116188543612182e-05, | |
| "loss": 1.2222, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.4066368843876305e-05, | |
| "loss": 1.206, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.4016427926529406e-05, | |
| "loss": 1.2172, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 1.3966367280299865e-05, | |
| "loss": 1.2021, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 1.3916188397485147e-05, | |
| "loss": 1.2137, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.3865892773907335e-05, | |
| "loss": 1.2151, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.3815481908868524e-05, | |
| "loss": 1.2253, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "eval_loss": 1.266944408416748, | |
| "eval_runtime": 152.7809, | |
| "eval_samples_per_second": 92.629, | |
| "eval_steps_per_second": 2.9, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.3764957305106142e-05, | |
| "loss": 1.2213, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.3714320468748126e-05, | |
| "loss": 1.2396, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.3663572909268064e-05, | |
| "loss": 1.195, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.3612716139440164e-05, | |
| "loss": 1.2225, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.3561751675294182e-05, | |
| "loss": 1.2257, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.3510681036070206e-05, | |
| "loss": 1.2274, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.3459505744173392e-05, | |
| "loss": 1.2203, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.3408227325128569e-05, | |
| "loss": 1.2354, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.3356847307534759e-05, | |
| "loss": 1.2147, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.3305367223019622e-05, | |
| "loss": 1.1968, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.3253788606193791e-05, | |
| "loss": 1.192, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.3202112994605128e-05, | |
| "loss": 1.1817, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.3150341928692875e-05, | |
| "loss": 1.207, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.309847695174177e-05, | |
| "loss": 1.1933, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.3046519609836002e-05, | |
| "loss": 1.2172, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.2994471451813149e-05, | |
| "loss": 1.2269, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.2942334029217993e-05, | |
| "loss": 1.2186, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.2890108896256282e-05, | |
| "loss": 1.2287, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.2837797609748384e-05, | |
| "loss": 1.2276, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.2785401729082893e-05, | |
| "loss": 1.215, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "eval_loss": 1.2649345397949219, | |
| "eval_runtime": 152.6977, | |
| "eval_samples_per_second": 92.68, | |
| "eval_steps_per_second": 2.901, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.273292281617013e-05, | |
| "loss": 1.2203, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.2680362435395595e-05, | |
| "loss": 1.2208, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.2627722153573326e-05, | |
| "loss": 1.2206, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.2575003539899198e-05, | |
| "loss": 1.2156, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 1.2522208165904137e-05, | |
| "loss": 1.2144, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 1.2469337605407284e-05, | |
| "loss": 1.2217, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.2416393434469068e-05, | |
| "loss": 1.2111, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.2363377231344239e-05, | |
| "loss": 1.2218, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.2310290576434795e-05, | |
| "loss": 1.221, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.2257135052242906e-05, | |
| "loss": 1.2137, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.220391224332371e-05, | |
| "loss": 1.2183, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.215062373623809e-05, | |
| "loss": 1.2343, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 1.2097271119505382e-05, | |
| "loss": 1.2279, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 1.2043855983556014e-05, | |
| "loss": 1.2252, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.1990379920684097e-05, | |
| "loss": 1.2363, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.1936844524999966e-05, | |
| "loss": 1.2285, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.1883251392382649e-05, | |
| "loss": 1.2088, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.1829602120432305e-05, | |
| "loss": 1.2299, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.177589830842259e-05, | |
| "loss": 1.2032, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.1722141557252998e-05, | |
| "loss": 1.2103, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "eval_loss": 1.2630119323730469, | |
| "eval_runtime": 152.7782, | |
| "eval_samples_per_second": 92.631, | |
| "eval_steps_per_second": 2.9, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.1668333469401109e-05, | |
| "loss": 1.2004, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.1614475648874861e-05, | |
| "loss": 1.2238, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.1560569701164696e-05, | |
| "loss": 1.2373, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.1506617233195732e-05, | |
| "loss": 1.2081, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.145261985327983e-05, | |
| "loss": 1.2074, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.1398579171067679e-05, | |
| "loss": 1.2049, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.1344496797500795e-05, | |
| "loss": 1.193, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.1290374344763506e-05, | |
| "loss": 1.2352, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.1236213426234894e-05, | |
| "loss": 1.1985, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.1182015656440692e-05, | |
| "loss": 1.1942, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.112778265100517e-05, | |
| "loss": 1.22, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.1073516026602959e-05, | |
| "loss": 1.2066, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.1019217400910864e-05, | |
| "loss": 1.1993, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.096488839255965e-05, | |
| "loss": 1.2043, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 1.0910530621085769e-05, | |
| "loss": 1.2347, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 1.085614570688311e-05, | |
| "loss": 1.2172, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.080173527115467e-05, | |
| "loss": 1.2095, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.0747300935864245e-05, | |
| "loss": 1.2196, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 1.0692844323688065e-05, | |
| "loss": 1.204, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 1.0638367057966442e-05, | |
| "loss": 1.2081, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "eval_loss": 1.261203646659851, | |
| "eval_runtime": 152.8657, | |
| "eval_samples_per_second": 92.578, | |
| "eval_steps_per_second": 2.898, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.0583870762655347e-05, | |
| "loss": 1.1952, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.0529357062278033e-05, | |
| "loss": 1.2291, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.0474827581876594e-05, | |
| "loss": 1.2092, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.042028394696352e-05, | |
| "loss": 1.2042, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.0365727783473238e-05, | |
| "loss": 1.1966, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.031116071771366e-05, | |
| "loss": 1.2092, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 1.0256584376317686e-05, | |
| "loss": 1.2127, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 1.0202000386194726e-05, | |
| "loss": 1.2148, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.0147410374482188e-05, | |
| "loss": 1.1877, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.009281596849699e-05, | |
| "loss": 1.2095, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 1.003821879568704e-05, | |
| "loss": 1.1959, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 9.983620483582726e-06, | |
| "loss": 1.2015, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 9.929022659748395e-06, | |
| "loss": 1.2127, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 9.874426951733844e-06, | |
| "loss": 1.2026, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 9.81983498702579e-06, | |
| "loss": 1.2011, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 9.76524839299937e-06, | |
| "loss": 1.1925, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 9.710668796869616e-06, | |
| "loss": 1.2147, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 9.65609782564296e-06, | |
| "loss": 1.2214, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 9.601537106068727e-06, | |
| "loss": 1.1978, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 9.546988264590637e-06, | |
| "loss": 1.2033, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "eval_loss": 1.2596791982650757, | |
| "eval_runtime": 152.6062, | |
| "eval_samples_per_second": 92.735, | |
| "eval_steps_per_second": 2.903, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 9.49245292729833e-06, | |
| "loss": 1.2225, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 9.437932719878888e-06, | |
| "loss": 1.2053, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 9.383429267568379e-06, | |
| "loss": 1.2083, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 9.328944195103397e-06, | |
| "loss": 1.2234, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 9.27447912667264e-06, | |
| "loss": 1.1946, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 9.220035685868495e-06, | |
| "loss": 1.2128, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 9.165615495638627e-06, | |
| "loss": 1.2111, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 9.111220178237607e-06, | |
| "loss": 1.2125, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 9.056851355178556e-06, | |
| "loss": 1.2026, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 9.002510647184803e-06, | |
| "loss": 1.2093, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 8.94819967414157e-06, | |
| "loss": 1.1912, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 8.893920055047686e-06, | |
| "loss": 1.201, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 8.83967340796733e-06, | |
| "loss": 1.203, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 8.785461349981789e-06, | |
| "loss": 1.2098, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 8.731285497141258e-06, | |
| "loss": 1.2068, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 8.67714746441666e-06, | |
| "loss": 1.2164, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 8.62304886565151e-06, | |
| "loss": 1.2234, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 8.56899131351381e-06, | |
| "loss": 1.2237, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 8.514976419447963e-06, | |
| "loss": 1.2053, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 8.461005793626752e-06, | |
| "loss": 1.2307, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "eval_loss": 1.2582082748413086, | |
| "eval_runtime": 152.6347, | |
| "eval_samples_per_second": 92.718, | |
| "eval_steps_per_second": 2.902, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 8.407081044903323e-06, | |
| "loss": 1.1903, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 8.353203780763239e-06, | |
| "loss": 1.2058, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 8.299375607276564e-06, | |
| "loss": 1.2081, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 8.245598129049973e-06, | |
| "loss": 1.2275, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 8.191872949178926e-06, | |
| "loss": 1.2165, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 8.138201669199878e-06, | |
| "loss": 1.2105, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 8.084585889042547e-06, | |
| "loss": 1.1934, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 8.031027206982211e-06, | |
| "loss": 1.2045, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 7.977527219592057e-06, | |
| "loss": 1.2253, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 7.924087521695602e-06, | |
| "loss": 1.2139, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 7.87070970631915e-06, | |
| "loss": 1.2129, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 7.817395364644294e-06, | |
| "loss": 1.2117, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 7.76414608596049e-06, | |
| "loss": 1.2058, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 7.710963457617681e-06, | |
| "loss": 1.2286, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 7.657849064978976e-06, | |
| "loss": 1.2063, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 7.604804491373396e-06, | |
| "loss": 1.1878, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 7.551831318048663e-06, | |
| "loss": 1.1991, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 7.498931124124077e-06, | |
| "loss": 1.2007, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 7.446105486543438e-06, | |
| "loss": 1.2314, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 7.393355980028039e-06, | |
| "loss": 1.2038, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "eval_loss": 1.2567870616912842, | |
| "eval_runtime": 152.7043, | |
| "eval_samples_per_second": 92.676, | |
| "eval_steps_per_second": 2.901, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 7.340684177029709e-06, | |
| "loss": 1.2296, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 7.288091647683966e-06, | |
| "loss": 1.2335, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 7.235579959763189e-06, | |
| "loss": 1.1986, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 7.183150678629898e-06, | |
| "loss": 1.1929, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 7.1308053671900675e-06, | |
| "loss": 1.2053, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 7.078545585846578e-06, | |
| "loss": 1.201, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 7.026372892452653e-06, | |
| "loss": 1.2166, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 6.974288842265455e-06, | |
| "loss": 1.2185, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 6.922294987899705e-06, | |
| "loss": 1.2157, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 6.870392879281407e-06, | |
| "loss": 1.2103, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 6.818584063601646e-06, | |
| "loss": 1.1871, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 6.766870085270459e-06, | |
| "loss": 1.2035, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 6.715252485870804e-06, | |
| "loss": 1.2021, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 6.663732804112604e-06, | |
| "loss": 1.1799, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 6.6123125757868764e-06, | |
| "loss": 1.1871, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 6.5609933337199515e-06, | |
| "loss": 1.1941, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 6.50977660772778e-06, | |
| "loss": 1.2048, | |
| "step": 1885 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 6.458663924570332e-06, | |
| "loss": 1.2106, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 6.4076568079060815e-06, | |
| "loss": 1.2121, | |
| "step": 1895 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 6.356756778246587e-06, | |
| "loss": 1.2014, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "eval_loss": 1.2556641101837158, | |
| "eval_runtime": 152.825, | |
| "eval_samples_per_second": 92.603, | |
| "eval_steps_per_second": 2.899, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 6.305965352911162e-06, | |
| "loss": 1.2225, | |
| "step": 1905 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 6.255284045981653e-06, | |
| "loss": 1.1912, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 6.204714368257303e-06, | |
| "loss": 1.2343, | |
| "step": 1915 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 6.1542578272096975e-06, | |
| "loss": 1.2197, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 6.103915926937853e-06, | |
| "loss": 1.1902, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 6.053690168123367e-06, | |
| "loss": 1.2308, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 6.003582047985684e-06, | |
| "loss": 1.2306, | |
| "step": 1935 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 5.953593060237457e-06, | |
| "loss": 1.2162, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 5.903724695040036e-06, | |
| "loss": 1.1785, | |
| "step": 1945 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 5.8539784389590345e-06, | |
| "loss": 1.2002, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 5.804355774920017e-06, | |
| "loss": 1.2138, | |
| "step": 1955 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 5.754858182164298e-06, | |
| "loss": 1.1786, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 5.705487136204846e-06, | |
| "loss": 1.223, | |
| "step": 1965 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 5.65624410878228e-06, | |
| "loss": 1.2107, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 5.60713056782103e-06, | |
| "loss": 1.1922, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 5.558147977385558e-06, | |
| "loss": 1.2078, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 5.509297797636712e-06, | |
| "loss": 1.217, | |
| "step": 1985 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 5.46058148478821e-06, | |
| "loss": 1.2227, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 5.412000491063225e-06, | |
| "loss": 1.2072, | |
| "step": 1995 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 5.363556264651093e-06, | |
| "loss": 1.188, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 1.2545795440673828, | |
| "eval_runtime": 152.4763, | |
| "eval_samples_per_second": 92.814, | |
| "eval_steps_per_second": 2.905, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 5.315250249664151e-06, | |
| "loss": 1.1591, | |
| "step": 2005 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 5.267083886094668e-06, | |
| "loss": 1.1676, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 5.219058609771945e-06, | |
| "loss": 1.1739, | |
| "step": 2015 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 5.171175852319503e-06, | |
| "loss": 1.1684, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 5.123437041112395e-06, | |
| "loss": 1.1805, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 5.075843599234678e-06, | |
| "loss": 1.178, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 5.0283969454369685e-06, | |
| "loss": 1.1644, | |
| "step": 2035 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 4.98109849409417e-06, | |
| "loss": 1.1745, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 4.933949655163295e-06, | |
| "loss": 1.1824, | |
| "step": 2045 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 4.886951834141446e-06, | |
| "loss": 1.1768, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 4.840106432023908e-06, | |
| "loss": 1.1733, | |
| "step": 2055 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 4.7934148452623906e-06, | |
| "loss": 1.17, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 4.7468784657234055e-06, | |
| "loss": 1.1679, | |
| "step": 2065 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 4.7004986806467635e-06, | |
| "loss": 1.183, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 4.65427687260423e-06, | |
| "loss": 1.1705, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 4.608214419458306e-06, | |
| "loss": 1.1644, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 4.562312694321156e-06, | |
| "loss": 1.1739, | |
| "step": 2085 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 4.51657306551368e-06, | |
| "loss": 1.1691, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 4.470996896524713e-06, | |
| "loss": 1.1825, | |
| "step": 2095 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 4.425585545970392e-06, | |
| "loss": 1.1473, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "eval_loss": 1.2563046216964722, | |
| "eval_runtime": 152.6039, | |
| "eval_samples_per_second": 92.737, | |
| "eval_steps_per_second": 2.903, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 4.380340367553649e-06, | |
| "loss": 1.2004, | |
| "step": 2105 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 4.335262710023861e-06, | |
| "loss": 1.157, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 4.290353917136639e-06, | |
| "loss": 1.1748, | |
| "step": 2115 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 4.245615327613779e-06, | |
| "loss": 1.1689, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 4.201048275103339e-06, | |
| "loss": 1.1691, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 4.156654088139907e-06, | |
| "loss": 1.1869, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 4.112434090104978e-06, | |
| "loss": 1.1709, | |
| "step": 2135 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 4.068389599187514e-06, | |
| "loss": 1.175, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 4.024521928344646e-06, | |
| "loss": 1.1444, | |
| "step": 2145 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 3.980832385262532e-06, | |
| "loss": 1.1824, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 3.937322272317383e-06, | |
| "loss": 1.1718, | |
| "step": 2155 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 3.893992886536637e-06, | |
| "loss": 1.1699, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 3.850845519560279e-06, | |
| "loss": 1.1815, | |
| "step": 2165 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 3.807881457602367e-06, | |
| "loss": 1.1679, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 3.7651019814126656e-06, | |
| "loss": 1.2018, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 3.722508366238481e-06, | |
| "loss": 1.1885, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 3.6801018817866375e-06, | |
| "loss": 1.1572, | |
| "step": 2185 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 3.6378837921856324e-06, | |
| "loss": 1.1708, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 3.5958553559479524e-06, | |
| "loss": 1.1669, | |
| "step": 2195 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 3.5540178259325565e-06, | |
| "loss": 1.1872, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "eval_loss": 1.2559261322021484, | |
| "eval_runtime": 152.7815, | |
| "eval_samples_per_second": 92.629, | |
| "eval_steps_per_second": 2.9, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 3.5123724493075318e-06, | |
| "loss": 1.1767, | |
| "step": 2205 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 3.4709204675129025e-06, | |
| "loss": 1.2005, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 3.429663116223644e-06, | |
| "loss": 1.1564, | |
| "step": 2215 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 3.388601625312833e-06, | |
| "loss": 1.2063, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 3.347737218814987e-06, | |
| "loss": 1.1659, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 3.307071114889583e-06, | |
| "loss": 1.1777, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 3.2666045257847332e-06, | |
| "loss": 1.1724, | |
| "step": 2235 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 3.22633865780106e-06, | |
| "loss": 1.1831, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 3.1862747112557313e-06, | |
| "loss": 1.171, | |
| "step": 2245 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 3.146413880446668e-06, | |
| "loss": 1.183, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 3.106757353616966e-06, | |
| "loss": 1.1724, | |
| "step": 2255 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 3.067306312919456e-06, | |
| "loss": 1.1855, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 3.028061934381471e-06, | |
| "loss": 1.169, | |
| "step": 2265 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 2.9890253878697885e-06, | |
| "loss": 1.2027, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 2.950197837055755e-06, | |
| "loss": 1.1835, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 2.911580439380599e-06, | |
| "loss": 1.172, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 2.8731743460209325e-06, | |
| "loss": 1.1915, | |
| "step": 2285 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 2.834980701854417e-06, | |
| "loss": 1.1736, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 2.7970006454256605e-06, | |
| "loss": 1.1861, | |
| "step": 2295 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 2.7592353089122582e-06, | |
| "loss": 1.2086, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "eval_loss": 1.2553389072418213, | |
| "eval_runtime": 152.643, | |
| "eval_samples_per_second": 92.713, | |
| "eval_steps_per_second": 2.902, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 2.7216858180910532e-06, | |
| "loss": 1.1664, | |
| "step": 2305 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 2.6843532923045702e-06, | |
| "loss": 1.1633, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 2.647238844427653e-06, | |
| "loss": 1.1584, | |
| "step": 2315 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 2.610343580834287e-06, | |
| "loss": 1.1762, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 2.5736686013646226e-06, | |
| "loss": 1.1907, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 2.53721499929218e-06, | |
| "loss": 1.1613, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 2.5009838612912697e-06, | |
| "loss": 1.153, | |
| "step": 2335 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 2.464976267404593e-06, | |
| "loss": 1.1823, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 2.4291932910110473e-06, | |
| "loss": 1.164, | |
| "step": 2345 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 2.3936359987937353e-06, | |
| "loss": 1.1889, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 2.3583054507081526e-06, | |
| "loss": 1.1655, | |
| "step": 2355 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 2.3232026999506062e-06, | |
| "loss": 1.1608, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 2.2883287929268027e-06, | |
| "loss": 1.1638, | |
| "step": 2365 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 2.2536847692206722e-06, | |
| "loss": 1.1982, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 2.21927166156337e-06, | |
| "loss": 1.1586, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 2.185090495802491e-06, | |
| "loss": 1.1639, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 2.1511422908714897e-06, | |
| "loss": 1.1899, | |
| "step": 2385 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 2.1174280587593076e-06, | |
| "loss": 1.1679, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 2.083948804480204e-06, | |
| "loss": 1.1791, | |
| "step": 2395 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 2.0507055260438014e-06, | |
| "loss": 1.1896, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "eval_loss": 1.25504469871521, | |
| "eval_runtime": 152.7532, | |
| "eval_samples_per_second": 92.646, | |
| "eval_steps_per_second": 2.9, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 2.017699214425323e-06, | |
| "loss": 1.1863, | |
| "step": 2405 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 1.9849308535360688e-06, | |
| "loss": 1.1658, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 1.9524014201940732e-06, | |
| "loss": 1.1743, | |
| "step": 2415 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 1.9201118840949905e-06, | |
| "loss": 1.1695, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1.8880632077831906e-06, | |
| "loss": 1.178, | |
| "step": 2425 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1.8562563466230577e-06, | |
| "loss": 1.1591, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 1.824692248770521e-06, | |
| "loss": 1.1913, | |
| "step": 2435 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 1.7933718551447877e-06, | |
| "loss": 1.1801, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 1.7622960994002836e-06, | |
| "loss": 1.1923, | |
| "step": 2445 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 1.7314659078988383e-06, | |
| "loss": 1.1584, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.700882199682059e-06, | |
| "loss": 1.174, | |
| "step": 2455 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.6705458864439405e-06, | |
| "loss": 1.197, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 1.6404578725036802e-06, | |
| "loss": 1.1716, | |
| "step": 2465 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 1.6106190547787282e-06, | |
| "loss": 1.1776, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 1.5810303227580448e-06, | |
| "loss": 1.1546, | |
| "step": 2475 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 1.5516925584755938e-06, | |
| "loss": 1.1459, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 1.5226066364840298e-06, | |
| "loss": 1.1676, | |
| "step": 2485 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 1.4937734238286539e-06, | |
| "loss": 1.1767, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 1.4651937800215465e-06, | |
| "loss": 1.1464, | |
| "step": 2495 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 1.436868557015959e-06, | |
| "loss": 1.1733, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "eval_loss": 1.2547988891601562, | |
| "eval_runtime": 152.7044, | |
| "eval_samples_per_second": 92.676, | |
| "eval_steps_per_second": 2.901, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 1.4087985991809072e-06, | |
| "loss": 1.1714, | |
| "step": 2505 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 1.380984743276006e-06, | |
| "loss": 1.1802, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 1.353427818426527e-06, | |
| "loss": 1.174, | |
| "step": 2515 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 1.32612864609868e-06, | |
| "loss": 1.1993, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 1.2990880400751204e-06, | |
| "loss": 1.1689, | |
| "step": 2525 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 1.272306806430702e-06, | |
| "loss": 1.1681, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 1.245785743508441e-06, | |
| "loss": 1.1841, | |
| "step": 2535 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 1.2195256418957146e-06, | |
| "loss": 1.1767, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 1.1935272844007029e-06, | |
| "loss": 1.1753, | |
| "step": 2545 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 1.167791446029043e-06, | |
| "loss": 1.1547, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 1.1423188939607356e-06, | |
| "loss": 1.2079, | |
| "step": 2555 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 1.1171103875272693e-06, | |
| "loss": 1.1776, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 1.0921666781889861e-06, | |
| "loss": 1.192, | |
| "step": 2565 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 1.067488509512683e-06, | |
| "loss": 1.1615, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 1.0430766171494444e-06, | |
| "loss": 1.1664, | |
| "step": 2575 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 1.0189317288127154e-06, | |
| "loss": 1.1749, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 9.950545642566045e-07, | |
| "loss": 1.1912, | |
| "step": 2585 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 9.714458352544276e-07, | |
| "loss": 1.1704, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 9.481062455774981e-07, | |
| "loss": 1.1674, | |
| "step": 2595 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 9.25036490974136e-07, | |
| "loss": 1.1665, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "eval_loss": 1.2544310092926025, | |
| "eval_runtime": 152.8108, | |
| "eval_samples_per_second": 92.611, | |
| "eval_steps_per_second": 2.899, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 9.022372591489381e-07, | |
| "loss": 1.1531, | |
| "step": 2605 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 8.797092297422715e-07, | |
| "loss": 1.191, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 8.574530743100151e-07, | |
| "loss": 1.1529, | |
| "step": 2615 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 8.354694563035426e-07, | |
| "loss": 1.1724, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 8.137590310499433e-07, | |
| "loss": 1.1698, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 7.923224457324852e-07, | |
| "loss": 1.1841, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 7.711603393713297e-07, | |
| "loss": 1.1679, | |
| "step": 2635 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 7.502733428044684e-07, | |
| "loss": 1.185, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 7.296620786689356e-07, | |
| "loss": 1.1784, | |
| "step": 2645 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 7.093271613822373e-07, | |
| "loss": 1.1703, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 6.89269197124034e-07, | |
| "loss": 1.1825, | |
| "step": 2655 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 6.694887838180764e-07, | |
| "loss": 1.1789, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 6.499865111143788e-07, | |
| "loss": 1.1693, | |
| "step": 2665 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 6.307629603716425e-07, | |
| "loss": 1.1682, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 6.11818704639926e-07, | |
| "loss": 1.1778, | |
| "step": 2675 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 5.931543086435543e-07, | |
| "loss": 1.1951, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 5.747703287643003e-07, | |
| "loss": 1.1818, | |
| "step": 2685 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 5.566673130247879e-07, | |
| "loss": 1.1722, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 5.388458010721575e-07, | |
| "loss": 1.1669, | |
| "step": 2695 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 5.213063241619798e-07, | |
| "loss": 1.1499, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "eval_loss": 1.254270076751709, | |
| "eval_runtime": 152.9074, | |
| "eval_samples_per_second": 92.553, | |
| "eval_steps_per_second": 2.897, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 5.040494051424205e-07, | |
| "loss": 1.172, | |
| "step": 2705 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 4.870755584386544e-07, | |
| "loss": 1.1771, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 4.703852900375272e-07, | |
| "loss": 1.1574, | |
| "step": 2715 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 4.539790974724723e-07, | |
| "loss": 1.178, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 4.378574698086868e-07, | |
| "loss": 1.1696, | |
| "step": 2725 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 4.2202088762854255e-07, | |
| "loss": 1.1794, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 4.064698230172681e-07, | |
| "loss": 1.17, | |
| "step": 2735 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 3.912047395488705e-07, | |
| "loss": 1.1873, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 3.762260922723182e-07, | |
| "loss": 1.1786, | |
| "step": 2745 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 3.615343276979777e-07, | |
| "loss": 1.1958, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 3.471298837843007e-07, | |
| "loss": 1.1831, | |
| "step": 2755 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 3.3301318992476904e-07, | |
| "loss": 1.1855, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 3.191846669350973e-07, | |
| "loss": 1.1659, | |
| "step": 2765 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 3.056447270406826e-07, | |
| "loss": 1.1666, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 2.9239377386432345e-07, | |
| "loss": 1.172, | |
| "step": 2775 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 2.7943220241418376e-07, | |
| "loss": 1.1759, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 2.667603990720169e-07, | |
| "loss": 1.1766, | |
| "step": 2785 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 2.5437874158164897e-07, | |
| "loss": 1.1744, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 2.422875990377205e-07, | |
| "loss": 1.2014, | |
| "step": 2795 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 2.3048733187468032e-07, | |
| "loss": 1.1779, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "eval_loss": 1.2542082071304321, | |
| "eval_runtime": 152.7562, | |
| "eval_samples_per_second": 92.644, | |
| "eval_steps_per_second": 2.9, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 2.1897829185604235e-07, | |
| "loss": 1.1756, | |
| "step": 2805 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 2.0776082206389936e-07, | |
| "loss": 1.1919, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 1.9683525688869776e-07, | |
| "loss": 1.1807, | |
| "step": 2815 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 1.862019220192679e-07, | |
| "loss": 1.1878, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 1.7586113443311404e-07, | |
| "loss": 1.1695, | |
| "step": 2825 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 1.658132023869663e-07, | |
| "loss": 1.1949, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 1.5605842540759476e-07, | |
| "loss": 1.1779, | |
| "step": 2835 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 1.4659709428287538e-07, | |
| "loss": 1.177, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 1.3742949105312375e-07, | |
| "loss": 1.1727, | |
| "step": 2845 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 1.2855588900269057e-07, | |
| "loss": 1.1921, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 1.199765526518082e-07, | |
| "loss": 1.1768, | |
| "step": 2855 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 1.1169173774871478e-07, | |
| "loss": 1.1775, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 1.0370169126202145e-07, | |
| "loss": 1.1533, | |
| "step": 2865 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 9.600665137335374e-08, | |
| "loss": 1.202, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 8.860684747025727e-08, | |
| "loss": 1.1789, | |
| "step": 2875 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 8.150250013934547e-08, | |
| "loss": 1.1725, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 7.469382115974034e-08, | |
| "loss": 1.1604, | |
| "step": 2885 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 6.818101349674756e-08, | |
| "loss": 1.1795, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 6.196427129581129e-08, | |
| "loss": 1.1658, | |
| "step": 2895 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 5.60437798767266e-08, | |
| "loss": 1.1746, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "eval_loss": 1.254175066947937, | |
| "eval_runtime": 152.7765, | |
| "eval_samples_per_second": 92.632, | |
| "eval_steps_per_second": 2.9, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 5.041971572811277e-08, | |
| "loss": 1.1694, | |
| "step": 2905 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 4.509224650215527e-08, | |
| "loss": 1.192, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 4.0061531009605305e-08, | |
| "loss": 1.1549, | |
| "step": 2915 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 3.5327719215046965e-08, | |
| "loss": 1.1779, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 3.089095223242744e-08, | |
| "loss": 1.1734, | |
| "step": 2925 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 2.6751362320849293e-08, | |
| "loss": 1.161, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 2.2909072880629156e-08, | |
| "loss": 1.1553, | |
| "step": 2935 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 1.9364198449616233e-08, | |
| "loss": 1.1885, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 1.6116844699781697e-08, | |
| "loss": 1.1814, | |
| "step": 2945 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 1.3167108434066766e-08, | |
| "loss": 1.1724, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 1.0515077583498346e-08, | |
| "loss": 1.1942, | |
| "step": 2955 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 8.160831204563347e-09, | |
| "loss": 1.207, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 6.1044394768594585e-09, | |
| "loss": 1.1665, | |
| "step": 2965 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 4.345963700995714e-09, | |
| "loss": 1.1608, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 2.8854562967706167e-09, | |
| "loss": 1.182, | |
| "step": 2975 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 1.7229608016067213e-09, | |
| "loss": 1.1413, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 8.585118692538974e-10, | |
| "loss": 1.1668, | |
| "step": 2985 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 2.9213526875349063e-10, | |
| "loss": 1.1881, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 2.384788367670865e-11, | |
| "loss": 1.1883, | |
| "step": 2995 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 2997, | |
| "total_flos": 6.191263180086313e+18, | |
| "train_loss": 1.2270306770030681, | |
| "train_runtime": 19334.7166, | |
| "train_samples_per_second": 19.844, | |
| "train_steps_per_second": 0.155 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 2997, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 100, | |
| "total_flos": 6.191263180086313e+18, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |