| { | |
| "best_metric": 1.421057105064392, | |
| "best_model_checkpoint": "/home/s1970716/models/bart_base_qgen/checkpoint-1369", | |
| "epoch": 9.992695398100803, | |
| "global_step": 1710, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 1.994152046783626e-05, | |
| "loss": 3.2937, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.9883040935672515e-05, | |
| "loss": 2.4156, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.9824561403508773e-05, | |
| "loss": 2.199, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.976608187134503e-05, | |
| "loss": 2.0742, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.970760233918129e-05, | |
| "loss": 2.0309, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9649122807017544e-05, | |
| "loss": 1.9927, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9590643274853802e-05, | |
| "loss": 1.9811, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.953216374269006e-05, | |
| "loss": 1.9124, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.9473684210526318e-05, | |
| "loss": 1.8912, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.9415204678362573e-05, | |
| "loss": 1.92, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.935672514619883e-05, | |
| "loss": 1.8673, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.929824561403509e-05, | |
| "loss": 1.8451, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.9239766081871347e-05, | |
| "loss": 1.7833, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.9181286549707602e-05, | |
| "loss": 1.7887, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.912280701754386e-05, | |
| "loss": 1.8294, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.9064327485380118e-05, | |
| "loss": 1.8207, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.9005847953216376e-05, | |
| "loss": 1.8147, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.894736842105263e-05, | |
| "loss": 1.8262, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.888888888888889e-05, | |
| "loss": 1.8197, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.8830409356725147e-05, | |
| "loss": 1.7758, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.8771929824561405e-05, | |
| "loss": 1.7745, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.871345029239766e-05, | |
| "loss": 1.7955, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.8654970760233918e-05, | |
| "loss": 1.7856, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.8596491228070176e-05, | |
| "loss": 1.7584, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.8538011695906434e-05, | |
| "loss": 1.7864, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.847953216374269e-05, | |
| "loss": 1.7483, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.8421052631578947e-05, | |
| "loss": 1.7431, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.8362573099415205e-05, | |
| "loss": 1.7358, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.8304093567251464e-05, | |
| "loss": 1.7497, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.824561403508772e-05, | |
| "loss": 1.7249, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.8187134502923976e-05, | |
| "loss": 1.7271, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.8128654970760235e-05, | |
| "loss": 1.6804, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.8070175438596493e-05, | |
| "loss": 1.7269, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.8011695906432747e-05, | |
| "loss": 1.7237, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 1.5064257383346558, | |
| "eval_runtime": 54.6397, | |
| "eval_samples_per_second": 96.724, | |
| "eval_steps_per_second": 3.038, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.7953216374269006e-05, | |
| "loss": 1.6854, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.7894736842105264e-05, | |
| "loss": 1.663, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.7836257309941522e-05, | |
| "loss": 1.6874, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.7777777777777777e-05, | |
| "loss": 1.6968, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 1.7719298245614035e-05, | |
| "loss": 1.672, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 1.7660818713450293e-05, | |
| "loss": 1.6925, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.760233918128655e-05, | |
| "loss": 1.6568, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.754385964912281e-05, | |
| "loss": 1.6246, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.7485380116959064e-05, | |
| "loss": 1.6766, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.7426900584795322e-05, | |
| "loss": 1.6567, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.736842105263158e-05, | |
| "loss": 1.6632, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.7309941520467838e-05, | |
| "loss": 1.6726, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 1.7251461988304093e-05, | |
| "loss": 1.6241, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.719298245614035e-05, | |
| "loss": 1.6584, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.713450292397661e-05, | |
| "loss": 1.6508, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.7076023391812867e-05, | |
| "loss": 1.6397, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.7017543859649125e-05, | |
| "loss": 1.6378, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.695906432748538e-05, | |
| "loss": 1.6827, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.690058479532164e-05, | |
| "loss": 1.658, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 1.6842105263157896e-05, | |
| "loss": 1.6236, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 1.6783625730994155e-05, | |
| "loss": 1.6602, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 1.672514619883041e-05, | |
| "loss": 1.6419, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 1.6307, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 1.6608187134502926e-05, | |
| "loss": 1.6179, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 1.6549707602339184e-05, | |
| "loss": 1.6396, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 1.649122807017544e-05, | |
| "loss": 1.6342, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 1.6432748538011697e-05, | |
| "loss": 1.6414, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 1.6374269005847955e-05, | |
| "loss": 1.6393, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 1.6315789473684213e-05, | |
| "loss": 1.6478, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 1.625730994152047e-05, | |
| "loss": 1.6384, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 1.6198830409356726e-05, | |
| "loss": 1.61, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.6140350877192984e-05, | |
| "loss": 1.6028, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.6081871345029242e-05, | |
| "loss": 1.5761, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 1.60233918128655e-05, | |
| "loss": 1.5995, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 1.4697957038879395, | |
| "eval_runtime": 54.6066, | |
| "eval_samples_per_second": 96.783, | |
| "eval_steps_per_second": 3.04, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 1.5964912280701755e-05, | |
| "loss": 1.6004, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 1.5906432748538013e-05, | |
| "loss": 1.5699, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 1.584795321637427e-05, | |
| "loss": 1.5738, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 1.578947368421053e-05, | |
| "loss": 1.5974, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 1.5730994152046787e-05, | |
| "loss": 1.5888, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.5672514619883042e-05, | |
| "loss": 1.5881, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 1.56140350877193e-05, | |
| "loss": 1.5908, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 1.555555555555556e-05, | |
| "loss": 1.5866, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 1.5497076023391816e-05, | |
| "loss": 1.5688, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 1.543859649122807e-05, | |
| "loss": 1.5718, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 1.538011695906433e-05, | |
| "loss": 1.5862, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 1.5321637426900587e-05, | |
| "loss": 1.5568, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 1.5263157894736846e-05, | |
| "loss": 1.5796, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 1.52046783625731e-05, | |
| "loss": 1.5615, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1.5146198830409358e-05, | |
| "loss": 1.5674, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 1.5087719298245615e-05, | |
| "loss": 1.5974, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 1.5029239766081873e-05, | |
| "loss": 1.552, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 1.497076023391813e-05, | |
| "loss": 1.5843, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 1.4912280701754388e-05, | |
| "loss": 1.5495, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 1.4853801169590644e-05, | |
| "loss": 1.576, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 1.4795321637426902e-05, | |
| "loss": 1.5556, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 1.4736842105263159e-05, | |
| "loss": 1.5451, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 1.4678362573099417e-05, | |
| "loss": 1.5697, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 1.4619883040935675e-05, | |
| "loss": 1.5698, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 1.4561403508771931e-05, | |
| "loss": 1.571, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 1.4502923976608188e-05, | |
| "loss": 1.5633, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 1.4444444444444446e-05, | |
| "loss": 1.5628, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 1.4385964912280704e-05, | |
| "loss": 1.5627, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 1.432748538011696e-05, | |
| "loss": 1.5465, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 1.4269005847953217e-05, | |
| "loss": 1.5661, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 1.4210526315789475e-05, | |
| "loss": 1.5996, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 1.4152046783625733e-05, | |
| "loss": 1.5689, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 1.409356725146199e-05, | |
| "loss": 1.5207, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 1.4035087719298246e-05, | |
| "loss": 1.5289, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_loss": 1.4481685161590576, | |
| "eval_runtime": 54.5707, | |
| "eval_samples_per_second": 96.847, | |
| "eval_steps_per_second": 3.042, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 1.3976608187134504e-05, | |
| "loss": 1.5335, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 1.3918128654970762e-05, | |
| "loss": 1.5218, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 1.385964912280702e-05, | |
| "loss": 1.5529, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 1.3801169590643275e-05, | |
| "loss": 1.5287, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 1.3742690058479533e-05, | |
| "loss": 1.5382, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 1.3684210526315791e-05, | |
| "loss": 1.5375, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 1.362573099415205e-05, | |
| "loss": 1.5205, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 1.3567251461988304e-05, | |
| "loss": 1.5213, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 1.3508771929824562e-05, | |
| "loss": 1.5088, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 1.345029239766082e-05, | |
| "loss": 1.4972, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 1.3391812865497079e-05, | |
| "loss": 1.4984, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 1.5149, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 1.3274853801169591e-05, | |
| "loss": 1.4956, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 1.321637426900585e-05, | |
| "loss": 1.5321, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 1.3157894736842108e-05, | |
| "loss": 1.5237, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 1.3099415204678362e-05, | |
| "loss": 1.5124, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 1.304093567251462e-05, | |
| "loss": 1.5087, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 1.2982456140350879e-05, | |
| "loss": 1.5042, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 1.2923976608187137e-05, | |
| "loss": 1.5324, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 1.2865497076023392e-05, | |
| "loss": 1.5298, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 1.280701754385965e-05, | |
| "loss": 1.5311, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 1.2748538011695908e-05, | |
| "loss": 1.5229, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 1.2690058479532166e-05, | |
| "loss": 1.4949, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 1.263157894736842e-05, | |
| "loss": 1.5191, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 1.2573099415204679e-05, | |
| "loss": 1.4942, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 1.2514619883040937e-05, | |
| "loss": 1.4864, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 1.2456140350877195e-05, | |
| "loss": 1.4959, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 1.239766081871345e-05, | |
| "loss": 1.504, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 1.2339181286549708e-05, | |
| "loss": 1.5097, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 1.2280701754385966e-05, | |
| "loss": 1.5135, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 1.2222222222222224e-05, | |
| "loss": 1.4982, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 1.216374269005848e-05, | |
| "loss": 1.5007, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 1.2105263157894737e-05, | |
| "loss": 1.5467, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 1.2046783625730995e-05, | |
| "loss": 1.5082, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_loss": 1.4363151788711548, | |
| "eval_runtime": 54.5283, | |
| "eval_samples_per_second": 96.922, | |
| "eval_steps_per_second": 3.044, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 1.1988304093567253e-05, | |
| "loss": 1.5229, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 1.192982456140351e-05, | |
| "loss": 1.458, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 1.1871345029239766e-05, | |
| "loss": 1.4808, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 1.1812865497076024e-05, | |
| "loss": 1.5026, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "learning_rate": 1.1754385964912282e-05, | |
| "loss": 1.4988, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 1.1695906432748539e-05, | |
| "loss": 1.4884, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 1.1637426900584797e-05, | |
| "loss": 1.4833, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 1.1578947368421053e-05, | |
| "loss": 1.4669, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 1.1520467836257312e-05, | |
| "loss": 1.4759, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "learning_rate": 1.1461988304093568e-05, | |
| "loss": 1.4444, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 1.1403508771929826e-05, | |
| "loss": 1.4659, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 1.1345029239766083e-05, | |
| "loss": 1.4745, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "learning_rate": 1.128654970760234e-05, | |
| "loss": 1.4783, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 1.1228070175438597e-05, | |
| "loss": 1.4604, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 1.1169590643274855e-05, | |
| "loss": 1.467, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 1.1111111111111113e-05, | |
| "loss": 1.486, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "learning_rate": 1.105263157894737e-05, | |
| "loss": 1.4789, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "learning_rate": 1.0994152046783626e-05, | |
| "loss": 1.4938, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 1.0935672514619884e-05, | |
| "loss": 1.4702, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "learning_rate": 1.0877192982456142e-05, | |
| "loss": 1.4938, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "learning_rate": 1.0818713450292399e-05, | |
| "loss": 1.4807, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 1.0760233918128655e-05, | |
| "loss": 1.487, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "learning_rate": 1.0701754385964913e-05, | |
| "loss": 1.4808, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "learning_rate": 1.0643274853801172e-05, | |
| "loss": 1.4707, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 1.0584795321637428e-05, | |
| "loss": 1.4748, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "learning_rate": 1.0526315789473684e-05, | |
| "loss": 1.4728, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "learning_rate": 1.0467836257309943e-05, | |
| "loss": 1.4848, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 1.04093567251462e-05, | |
| "loss": 1.4664, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "learning_rate": 1.0350877192982459e-05, | |
| "loss": 1.4639, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 1.0292397660818714e-05, | |
| "loss": 1.5213, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 1.0233918128654972e-05, | |
| "loss": 1.4455, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 1.017543859649123e-05, | |
| "loss": 1.5129, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 4.94, | |
| "learning_rate": 1.0116959064327488e-05, | |
| "loss": 1.4527, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 1.0058479532163743e-05, | |
| "loss": 1.4543, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 1e-05, | |
| "loss": 1.4782, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_loss": 1.4285916090011597, | |
| "eval_runtime": 54.3492, | |
| "eval_samples_per_second": 97.242, | |
| "eval_steps_per_second": 3.054, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 5.03, | |
| "learning_rate": 9.941520467836257e-06, | |
| "loss": 1.4358, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 5.05, | |
| "learning_rate": 9.883040935672515e-06, | |
| "loss": 1.4702, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 5.08, | |
| "learning_rate": 9.824561403508772e-06, | |
| "loss": 1.4468, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 9.76608187134503e-06, | |
| "loss": 1.4365, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 5.14, | |
| "learning_rate": 9.707602339181286e-06, | |
| "loss": 1.462, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 5.17, | |
| "learning_rate": 9.649122807017545e-06, | |
| "loss": 1.4665, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 5.2, | |
| "learning_rate": 9.590643274853801e-06, | |
| "loss": 1.4579, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 5.23, | |
| "learning_rate": 9.532163742690059e-06, | |
| "loss": 1.445, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 5.26, | |
| "learning_rate": 9.473684210526315e-06, | |
| "loss": 1.4563, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 5.29, | |
| "learning_rate": 9.415204678362574e-06, | |
| "loss": 1.4286, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 5.32, | |
| "learning_rate": 9.35672514619883e-06, | |
| "loss": 1.4753, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 5.35, | |
| "learning_rate": 9.298245614035088e-06, | |
| "loss": 1.4628, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "learning_rate": 9.239766081871345e-06, | |
| "loss": 1.4685, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "learning_rate": 9.181286549707603e-06, | |
| "loss": 1.4559, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 5.43, | |
| "learning_rate": 9.12280701754386e-06, | |
| "loss": 1.435, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "learning_rate": 9.064327485380117e-06, | |
| "loss": 1.4272, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 5.49, | |
| "learning_rate": 9.005847953216374e-06, | |
| "loss": 1.4592, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 5.52, | |
| "learning_rate": 8.947368421052632e-06, | |
| "loss": 1.4264, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 5.55, | |
| "learning_rate": 8.888888888888888e-06, | |
| "loss": 1.4329, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 5.58, | |
| "learning_rate": 8.830409356725146e-06, | |
| "loss": 1.4368, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 5.61, | |
| "learning_rate": 8.771929824561405e-06, | |
| "loss": 1.4481, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 5.64, | |
| "learning_rate": 8.713450292397661e-06, | |
| "loss": 1.458, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 5.67, | |
| "learning_rate": 8.654970760233919e-06, | |
| "loss": 1.4302, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 5.7, | |
| "learning_rate": 8.596491228070176e-06, | |
| "loss": 1.4269, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 5.73, | |
| "learning_rate": 8.538011695906434e-06, | |
| "loss": 1.4743, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 5.76, | |
| "learning_rate": 8.47953216374269e-06, | |
| "loss": 1.4397, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 5.79, | |
| "learning_rate": 8.421052631578948e-06, | |
| "loss": 1.4537, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 5.81, | |
| "learning_rate": 8.362573099415205e-06, | |
| "loss": 1.4483, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 5.84, | |
| "learning_rate": 8.304093567251463e-06, | |
| "loss": 1.479, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 5.87, | |
| "learning_rate": 8.24561403508772e-06, | |
| "loss": 1.4238, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 5.9, | |
| "learning_rate": 8.187134502923977e-06, | |
| "loss": 1.4544, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 5.93, | |
| "learning_rate": 8.128654970760235e-06, | |
| "loss": 1.4557, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 5.96, | |
| "learning_rate": 8.070175438596492e-06, | |
| "loss": 1.4514, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 5.99, | |
| "learning_rate": 8.01169590643275e-06, | |
| "loss": 1.4084, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "eval_loss": 1.4264638423919678, | |
| "eval_runtime": 54.613, | |
| "eval_samples_per_second": 96.772, | |
| "eval_steps_per_second": 3.04, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 6.02, | |
| "learning_rate": 7.953216374269006e-06, | |
| "loss": 1.4231, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 6.05, | |
| "learning_rate": 7.894736842105265e-06, | |
| "loss": 1.4327, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 6.08, | |
| "learning_rate": 7.836257309941521e-06, | |
| "loss": 1.4188, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 6.11, | |
| "learning_rate": 7.77777777777778e-06, | |
| "loss": 1.4453, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 6.14, | |
| "learning_rate": 7.719298245614036e-06, | |
| "loss": 1.4, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 6.17, | |
| "learning_rate": 7.660818713450294e-06, | |
| "loss": 1.3889, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 6.19, | |
| "learning_rate": 7.60233918128655e-06, | |
| "loss": 1.4013, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 6.22, | |
| "learning_rate": 7.5438596491228074e-06, | |
| "loss": 1.4102, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 6.25, | |
| "learning_rate": 7.485380116959065e-06, | |
| "loss": 1.3953, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 6.28, | |
| "learning_rate": 7.426900584795322e-06, | |
| "loss": 1.4428, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 6.31, | |
| "learning_rate": 7.368421052631579e-06, | |
| "loss": 1.4446, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 6.34, | |
| "learning_rate": 7.309941520467837e-06, | |
| "loss": 1.4221, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 6.37, | |
| "learning_rate": 7.251461988304094e-06, | |
| "loss": 1.437, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 6.4, | |
| "learning_rate": 7.192982456140352e-06, | |
| "loss": 1.4144, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "learning_rate": 7.134502923976608e-06, | |
| "loss": 1.4665, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 6.46, | |
| "learning_rate": 7.0760233918128665e-06, | |
| "loss": 1.4175, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 6.49, | |
| "learning_rate": 7.017543859649123e-06, | |
| "loss": 1.4346, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 6.52, | |
| "learning_rate": 6.959064327485381e-06, | |
| "loss": 1.422, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 6.54, | |
| "learning_rate": 6.9005847953216375e-06, | |
| "loss": 1.4322, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 6.57, | |
| "learning_rate": 6.842105263157896e-06, | |
| "loss": 1.4421, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 6.6, | |
| "learning_rate": 6.783625730994152e-06, | |
| "loss": 1.4346, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 6.63, | |
| "learning_rate": 6.72514619883041e-06, | |
| "loss": 1.4141, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 6.66, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 1.4387, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 6.69, | |
| "learning_rate": 6.608187134502925e-06, | |
| "loss": 1.422, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 6.72, | |
| "learning_rate": 6.549707602339181e-06, | |
| "loss": 1.4172, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 6.75, | |
| "learning_rate": 6.491228070175439e-06, | |
| "loss": 1.4387, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 6.78, | |
| "learning_rate": 6.432748538011696e-06, | |
| "loss": 1.4121, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 6.81, | |
| "learning_rate": 6.374269005847954e-06, | |
| "loss": 1.4065, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 6.84, | |
| "learning_rate": 6.31578947368421e-06, | |
| "loss": 1.4487, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 6.87, | |
| "learning_rate": 6.2573099415204685e-06, | |
| "loss": 1.4362, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 6.9, | |
| "learning_rate": 6.198830409356725e-06, | |
| "loss": 1.4351, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 6.92, | |
| "learning_rate": 6.140350877192983e-06, | |
| "loss": 1.4378, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 6.95, | |
| "learning_rate": 6.08187134502924e-06, | |
| "loss": 1.3944, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 6.98, | |
| "learning_rate": 6.023391812865498e-06, | |
| "loss": 1.4229, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 6.99, | |
| "eval_loss": 1.4238632917404175, | |
| "eval_runtime": 54.6138, | |
| "eval_samples_per_second": 96.77, | |
| "eval_steps_per_second": 3.04, | |
| "step": 1197 | |
| }, | |
| { | |
| "epoch": 7.01, | |
| "learning_rate": 5.964912280701755e-06, | |
| "loss": 1.4347, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 7.04, | |
| "learning_rate": 5.906432748538012e-06, | |
| "loss": 1.3752, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 7.07, | |
| "learning_rate": 5.847953216374269e-06, | |
| "loss": 1.4408, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 7.1, | |
| "learning_rate": 5.789473684210527e-06, | |
| "loss": 1.3978, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 7.13, | |
| "learning_rate": 5.730994152046784e-06, | |
| "loss": 1.3911, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 7.16, | |
| "learning_rate": 5.672514619883041e-06, | |
| "loss": 1.4236, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 7.19, | |
| "learning_rate": 5.6140350877192985e-06, | |
| "loss": 1.396, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 7.22, | |
| "learning_rate": 5.555555555555557e-06, | |
| "loss": 1.3968, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 7.25, | |
| "learning_rate": 5.497076023391813e-06, | |
| "loss": 1.411, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 7.28, | |
| "learning_rate": 5.438596491228071e-06, | |
| "loss": 1.4243, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 7.3, | |
| "learning_rate": 5.380116959064328e-06, | |
| "loss": 1.3972, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 7.33, | |
| "learning_rate": 5.321637426900586e-06, | |
| "loss": 1.4079, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 7.36, | |
| "learning_rate": 5.263157894736842e-06, | |
| "loss": 1.4157, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 7.39, | |
| "learning_rate": 5.2046783625731e-06, | |
| "loss": 1.3847, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 7.42, | |
| "learning_rate": 5.146198830409357e-06, | |
| "loss": 1.4167, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 7.45, | |
| "learning_rate": 5.087719298245615e-06, | |
| "loss": 1.4167, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 7.48, | |
| "learning_rate": 5.029239766081871e-06, | |
| "loss": 1.3941, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 7.51, | |
| "learning_rate": 4.970760233918129e-06, | |
| "loss": 1.4131, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 7.54, | |
| "learning_rate": 4.912280701754386e-06, | |
| "loss": 1.3771, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 7.57, | |
| "learning_rate": 4.853801169590643e-06, | |
| "loss": 1.395, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 7.6, | |
| "learning_rate": 4.7953216374269005e-06, | |
| "loss": 1.4258, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 7.63, | |
| "learning_rate": 4.736842105263158e-06, | |
| "loss": 1.3951, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 7.66, | |
| "learning_rate": 4.678362573099415e-06, | |
| "loss": 1.4218, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 7.68, | |
| "learning_rate": 4.619883040935672e-06, | |
| "loss": 1.4144, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 7.71, | |
| "learning_rate": 4.56140350877193e-06, | |
| "loss": 1.4083, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 7.74, | |
| "learning_rate": 4.502923976608187e-06, | |
| "loss": 1.4303, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 7.77, | |
| "learning_rate": 4.444444444444444e-06, | |
| "loss": 1.3925, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 7.8, | |
| "learning_rate": 4.385964912280702e-06, | |
| "loss": 1.3988, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 7.83, | |
| "learning_rate": 4.3274853801169596e-06, | |
| "loss": 1.4123, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 7.86, | |
| "learning_rate": 4.269005847953217e-06, | |
| "loss": 1.4339, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 7.89, | |
| "learning_rate": 4.210526315789474e-06, | |
| "loss": 1.4151, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 7.92, | |
| "learning_rate": 4.152046783625731e-06, | |
| "loss": 1.4058, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 7.95, | |
| "learning_rate": 4.093567251461989e-06, | |
| "loss": 1.4106, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 7.98, | |
| "learning_rate": 4.035087719298246e-06, | |
| "loss": 1.4, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "eval_loss": 1.421057105064392, | |
| "eval_runtime": 54.4659, | |
| "eval_samples_per_second": 97.033, | |
| "eval_steps_per_second": 3.048, | |
| "step": 1369 | |
| }, | |
| { | |
| "epoch": 8.01, | |
| "learning_rate": 3.976608187134503e-06, | |
| "loss": 1.444, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 8.04, | |
| "learning_rate": 3.9181286549707605e-06, | |
| "loss": 1.3986, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 8.06, | |
| "learning_rate": 3.859649122807018e-06, | |
| "loss": 1.3945, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 8.09, | |
| "learning_rate": 3.801169590643275e-06, | |
| "loss": 1.4086, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 8.12, | |
| "learning_rate": 3.7426900584795324e-06, | |
| "loss": 1.4037, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 8.15, | |
| "learning_rate": 3.6842105263157896e-06, | |
| "loss": 1.4306, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 8.18, | |
| "learning_rate": 3.625730994152047e-06, | |
| "loss": 1.4049, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 8.21, | |
| "learning_rate": 3.567251461988304e-06, | |
| "loss": 1.3857, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 8.24, | |
| "learning_rate": 3.5087719298245615e-06, | |
| "loss": 1.3903, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 8.27, | |
| "learning_rate": 3.4502923976608188e-06, | |
| "loss": 1.4029, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 8.3, | |
| "learning_rate": 3.391812865497076e-06, | |
| "loss": 1.3956, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 8.33, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 1.3745, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 8.36, | |
| "learning_rate": 3.2748538011695906e-06, | |
| "loss": 1.394, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 8.39, | |
| "learning_rate": 3.216374269005848e-06, | |
| "loss": 1.3947, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 8.41, | |
| "learning_rate": 3.157894736842105e-06, | |
| "loss": 1.396, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 8.44, | |
| "learning_rate": 3.0994152046783624e-06, | |
| "loss": 1.4008, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 8.47, | |
| "learning_rate": 3.04093567251462e-06, | |
| "loss": 1.3946, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 8.5, | |
| "learning_rate": 2.9824561403508774e-06, | |
| "loss": 1.3897, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 8.53, | |
| "learning_rate": 2.9239766081871347e-06, | |
| "loss": 1.4241, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 8.56, | |
| "learning_rate": 2.865497076023392e-06, | |
| "loss": 1.3892, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 8.59, | |
| "learning_rate": 2.8070175438596493e-06, | |
| "loss": 1.3806, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 8.62, | |
| "learning_rate": 2.7485380116959066e-06, | |
| "loss": 1.3899, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 8.65, | |
| "learning_rate": 2.690058479532164e-06, | |
| "loss": 1.3768, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 8.68, | |
| "learning_rate": 2.631578947368421e-06, | |
| "loss": 1.4009, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 8.71, | |
| "learning_rate": 2.5730994152046784e-06, | |
| "loss": 1.3709, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 8.74, | |
| "learning_rate": 2.5146198830409357e-06, | |
| "loss": 1.4098, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 8.77, | |
| "learning_rate": 2.456140350877193e-06, | |
| "loss": 1.407, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 8.79, | |
| "learning_rate": 2.3976608187134502e-06, | |
| "loss": 1.3903, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 8.82, | |
| "learning_rate": 2.3391812865497075e-06, | |
| "loss": 1.3892, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 8.85, | |
| "learning_rate": 2.280701754385965e-06, | |
| "loss": 1.3618, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 8.88, | |
| "learning_rate": 2.222222222222222e-06, | |
| "loss": 1.4114, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 8.91, | |
| "learning_rate": 2.1637426900584798e-06, | |
| "loss": 1.3862, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 8.94, | |
| "learning_rate": 2.105263157894737e-06, | |
| "loss": 1.3997, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 8.97, | |
| "learning_rate": 2.0467836257309943e-06, | |
| "loss": 1.4109, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "learning_rate": 1.9883040935672516e-06, | |
| "loss": 1.3865, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "eval_loss": 1.4214701652526855, | |
| "eval_runtime": 54.5568, | |
| "eval_samples_per_second": 96.871, | |
| "eval_steps_per_second": 3.043, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 9.03, | |
| "learning_rate": 1.929824561403509e-06, | |
| "loss": 1.3806, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 9.06, | |
| "learning_rate": 1.8713450292397662e-06, | |
| "loss": 1.3961, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 9.09, | |
| "learning_rate": 1.8128654970760235e-06, | |
| "loss": 1.3759, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 9.12, | |
| "learning_rate": 1.7543859649122807e-06, | |
| "loss": 1.3744, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 9.15, | |
| "learning_rate": 1.695906432748538e-06, | |
| "loss": 1.401, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 9.17, | |
| "learning_rate": 1.6374269005847953e-06, | |
| "loss": 1.3915, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 9.2, | |
| "learning_rate": 1.5789473684210526e-06, | |
| "loss": 1.4001, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 9.23, | |
| "learning_rate": 1.52046783625731e-06, | |
| "loss": 1.3885, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 9.26, | |
| "learning_rate": 1.4619883040935674e-06, | |
| "loss": 1.3917, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 9.29, | |
| "learning_rate": 1.4035087719298246e-06, | |
| "loss": 1.4041, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 9.32, | |
| "learning_rate": 1.345029239766082e-06, | |
| "loss": 1.397, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 9.35, | |
| "learning_rate": 1.2865497076023392e-06, | |
| "loss": 1.3874, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 9.38, | |
| "learning_rate": 1.2280701754385965e-06, | |
| "loss": 1.3742, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 9.41, | |
| "learning_rate": 1.1695906432748538e-06, | |
| "loss": 1.3782, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 9.44, | |
| "learning_rate": 1.111111111111111e-06, | |
| "loss": 1.3787, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 9.47, | |
| "learning_rate": 1.0526315789473685e-06, | |
| "loss": 1.3616, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 9.5, | |
| "learning_rate": 9.941520467836258e-07, | |
| "loss": 1.3762, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 9.53, | |
| "learning_rate": 9.356725146198831e-07, | |
| "loss": 1.3793, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 9.55, | |
| "learning_rate": 8.771929824561404e-07, | |
| "loss": 1.3743, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 9.58, | |
| "learning_rate": 8.187134502923977e-07, | |
| "loss": 1.411, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 9.61, | |
| "learning_rate": 7.60233918128655e-07, | |
| "loss": 1.3924, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 9.64, | |
| "learning_rate": 7.017543859649123e-07, | |
| "loss": 1.3624, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 9.67, | |
| "learning_rate": 6.432748538011696e-07, | |
| "loss": 1.3848, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 9.7, | |
| "learning_rate": 5.847953216374269e-07, | |
| "loss": 1.3912, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 9.73, | |
| "learning_rate": 5.263157894736843e-07, | |
| "loss": 1.4152, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 9.76, | |
| "learning_rate": 4.6783625730994155e-07, | |
| "loss": 1.3942, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 9.79, | |
| "learning_rate": 4.093567251461988e-07, | |
| "loss": 1.3855, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 9.82, | |
| "learning_rate": 3.5087719298245616e-07, | |
| "loss": 1.3804, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 9.85, | |
| "learning_rate": 2.9239766081871344e-07, | |
| "loss": 1.3904, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 9.88, | |
| "learning_rate": 2.3391812865497077e-07, | |
| "loss": 1.3886, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 9.91, | |
| "learning_rate": 1.7543859649122808e-07, | |
| "loss": 1.4217, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 9.93, | |
| "learning_rate": 1.1695906432748539e-07, | |
| "loss": 1.3938, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 9.96, | |
| "learning_rate": 5.847953216374269e-08, | |
| "loss": 1.3799, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 9.99, | |
| "learning_rate": 0.0, | |
| "loss": 1.3871, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 9.99, | |
| "eval_loss": 1.4218624830245972, | |
| "eval_runtime": 54.6025, | |
| "eval_samples_per_second": 96.791, | |
| "eval_steps_per_second": 3.04, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 9.99, | |
| "step": 1710, | |
| "total_flos": 1.862028915941376e+17, | |
| "train_loss": 1.5164859166619373, | |
| "train_runtime": 422176.3551, | |
| "train_samples_per_second": 2.075, | |
| "train_steps_per_second": 0.004 | |
| } | |
| ], | |
| "max_steps": 1710, | |
| "num_train_epochs": 10, | |
| "total_flos": 1.862028915941376e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |