| { | |
| "best_metric": 0.3943726122379303, | |
| "best_model_checkpoint": "output/eminem/checkpoint-2400", | |
| "epoch": 5.0, | |
| "global_step": 2400, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 7.104344779818036e-05, | |
| "loss": 0.7686, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 7.326215453099495e-05, | |
| "loss": 0.6054, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 7.54759713075063e-05, | |
| "loss": 1.0539, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 7.768257613962603e-05, | |
| "loss": 0.7369, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 7.987965460359759e-05, | |
| "loss": 0.7577, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 8.206490226751101e-05, | |
| "loss": 0.5693, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 8.423602710832802e-05, | |
| "loss": 0.5772, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 8.63907519158997e-05, | |
| "loss": 1.6227, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 8.852681668145864e-05, | |
| "loss": 0.7695, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 9.064198096803333e-05, | |
| "loss": 0.6463, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 9.27340262603724e-05, | |
| "loss": 0.7689, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 9.480075829185323e-05, | |
| "loss": 1.0531, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 9.684000934597923e-05, | |
| "loss": 0.6097, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 9.884964052999149e-05, | |
| "loss": 0.7093, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00010082754401829355, | |
| "loss": 0.5035, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00010277164526325172, | |
| "loss": 1.0989, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00010467990517112986, | |
| "loss": 1.0782, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00010655032224079468, | |
| "loss": 0.8885, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00010838093466302798, | |
| "loss": 0.9381, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00011016982237818681, | |
| "loss": 1.0053, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00011191510909009365, | |
| "loss": 0.6927, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00011361496423399298, | |
| "loss": 0.9509, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.0001152676048965721, | |
| "loss": 0.6696, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.0001168712976859924, | |
| "loss": 0.9941, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00011842436054997332, | |
| "loss": 0.5395, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00011992516454003592, | |
| "loss": 0.4659, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00012137213552004078, | |
| "loss": 0.8814, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00012276375581724122, | |
| "loss": 0.7903, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00012409856581412063, | |
| "loss": 0.8096, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00012537516547931506, | |
| "loss": 0.7711, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.0001265922158360627, | |
| "loss": 0.7044, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00012774844036660105, | |
| "loss": 0.9893, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00012884262635106593, | |
| "loss": 0.9534, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00012987362613945392, | |
| "loss": 0.9657, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.0001308403583553606, | |
| "loss": 0.8557, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00013174180903018608, | |
| "loss": 0.8623, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00013257703266665794, | |
| "loss": 0.9682, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.0001333451532305167, | |
| "loss": 0.8295, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00013404536506936147, | |
| "loss": 0.6588, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.0001346769337576652, | |
| "loss": 0.7481, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.0001352391968670901, | |
| "loss": 0.8978, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00013573156466127666, | |
| "loss": 1.1897, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.0001361535207144, | |
| "loss": 1.238, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00013650462245282926, | |
| "loss": 1.1457, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00013678450161932478, | |
| "loss": 0.8702, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00013699286465928987, | |
| "loss": 1.071, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00013712949302866842, | |
| "loss": 0.8298, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00013719424342316803, | |
| "loss": 0.9333, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00013718704792856656, | |
| "loss": 0.6711, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.000137107914091944, | |
| "loss": 0.786, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 0.00013695692491376736, | |
| "loss": 0.7021, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 0.00013673423876083446, | |
| "loss": 1.0258, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.0001364400892001687, | |
| "loss": 1.2098, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 0.00013607478475403945, | |
| "loss": 1.0657, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 0.00013563870857636603, | |
| "loss": 0.664, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 0.00013513231805083994, | |
| "loss": 0.8538, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 0.00013455614431119578, | |
| "loss": 0.8278, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 0.00013391079168412009, | |
| "loss": 0.6271, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 0.00013319693705540213, | |
| "loss": 0.9119, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.00013241532915997056, | |
| "loss": 0.5942, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 0.0001315667877965762, | |
| "loss": 0.8013, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 0.00013065220296793026, | |
| "loss": 0.6856, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 0.00012967253394722351, | |
| "loss": 0.8229, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 0.0001286288082719761, | |
| "loss": 0.9531, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 0.00012752212066629144, | |
| "loss": 1.3199, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 0.00012635363189264417, | |
| "loss": 1.1022, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 0.00012512456753439896, | |
| "loss": 0.9544, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.00012383621671034582, | |
| "loss": 0.9981, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 0.00012248993072259197, | |
| "loss": 0.8137, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 0.00012108712163922375, | |
| "loss": 0.9984, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 0.00011962926081325661, | |
| "loss": 1.0316, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 0.0001181178773393795, | |
| "loss": 0.8207, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 0.00011655455645015076, | |
| "loss": 0.6663, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.00011494093785329717, | |
| "loss": 0.8862, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 0.00011327871401190472, | |
| "loss": 0.7749, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 0.00011156962836924195, | |
| "loss": 1.0212, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 0.0001098154735201431, | |
| "loss": 0.9901, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 0.00010801808933080577, | |
| "loss": 0.6503, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 0.0001061793610090449, | |
| "loss": 0.9526, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 0.00010430121712695815, | |
| "loss": 0.9911, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 0.00010238562759812645, | |
| "loss": 0.8466, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 0.00010043460161143304, | |
| "loss": 0.8538, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 9.845018552372211e-05, | |
| "loss": 1.0898, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 9.643446071344141e-05, | |
| "loss": 0.6029, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 9.438954139756169e-05, | |
| "loss": 0.6565, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 9.231757241406026e-05, | |
| "loss": 0.9035, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 9.022072697227865e-05, | |
| "loss": 0.6143, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 8.810120437353156e-05, | |
| "loss": 0.7718, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 8.596122770434176e-05, | |
| "loss": 0.9339, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 8.380304150471618e-05, | |
| "loss": 0.6168, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 8.162890941395579e-05, | |
| "loss": 0.917, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 7.944111179639565e-05, | |
| "loss": 0.9396, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 7.724194334962525e-05, | |
| "loss": 0.8879, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 7.50337106976521e-05, | |
| "loss": 0.7428, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 7.281872997159678e-05, | |
| "loss": 0.8685, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 7.059932438036807e-05, | |
| "loss": 0.6925, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 6.837782177395701e-05, | |
| "loss": 0.7737, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 0.8009498715400696, | |
| "eval_runtime": 7.9645, | |
| "eval_samples_per_second": 81.487, | |
| "eval_steps_per_second": 10.296, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 6.615655220182037e-05, | |
| "loss": 0.9648, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 6.393784546900578e-05, | |
| "loss": 0.7027, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 6.172402869249251e-05, | |
| "loss": 0.6923, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 5.9517423860374716e-05, | |
| "loss": 0.6036, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 5.7320345396401225e-05, | |
| "loss": 0.5738, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 5.5135097732487806e-05, | |
| "loss": 0.522, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 5.2963972891672695e-05, | |
| "loss": 0.6331, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 5.080924808409912e-05, | |
| "loss": 0.8264, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 4.867318331854207e-05, | |
| "loss": 0.5139, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 4.655801903196737e-05, | |
| "loss": 0.4191, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 4.4465973739628306e-05, | |
| "loss": 0.7371, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 4.239924170814745e-05, | |
| "loss": 0.7683, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 4.0359990654021446e-05, | |
| "loss": 0.6885, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 3.835035947000743e-05, | |
| "loss": 0.573, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 3.6372455981705394e-05, | |
| "loss": 0.5655, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 3.442835473674722e-05, | |
| "loss": 0.6614, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 3.2520094828869116e-05, | |
| "loss": 0.553, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 3.064967775920593e-05, | |
| "loss": 0.9856, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 2.881906533697264e-05, | |
| "loss": 0.7987, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 2.7030177621813776e-05, | |
| "loss": 0.4053, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 2.5284890909906917e-05, | |
| "loss": 0.5987, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 2.3585035766007593e-05, | |
| "loss": 0.9376, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 2.1932395103427002e-05, | |
| "loss": 0.6322, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 2.032870231400813e-05, | |
| "loss": 0.6378, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.8775639450025844e-05, | |
| "loss": 0.5735, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.7274835459963273e-05, | |
| "loss": 1.0382, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.582786447995969e-05, | |
| "loss": 0.6316, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.4436244182758039e-05, | |
| "loss": 0.6119, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.3101434185879816e-05, | |
| "loss": 0.9124, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.1824834520685365e-05, | |
| "loss": 0.5674, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.060778416393771e-05, | |
| "loss": 0.8984, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 9.451559633399314e-06, | |
| "loss": 0.4093, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 8.35737364893442e-06, | |
| "loss": 1.0756, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 7.326373860545556e-06, | |
| "loss": 0.7388, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 6.359641644638905e-06, | |
| "loss": 0.7588, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 5.4581909698134475e-06, | |
| "loss": 0.6298, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 4.622967333341646e-06, | |
| "loss": 0.5269, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 3.854846769483559e-06, | |
| "loss": 0.8377, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 3.1546349306387612e-06, | |
| "loss": 0.5796, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 2.5230662423349877e-06, | |
| "loss": 0.4994, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.9608031329100813e-06, | |
| "loss": 0.659, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.4684353387235077e-06, | |
| "loss": 0.5819, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.0464792855997834e-06, | |
| "loss": 0.5023, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 6.953775471708487e-07, | |
| "loss": 0.5082, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 4.154983806750897e-07, | |
| "loss": 0.6839, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 2.071353407100288e-07, | |
| "loss": 0.7121, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 7.050697133161242e-08, | |
| "loss": 0.4166, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 5.756576831951343e-09, | |
| "loss": 0.7094, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.295207143343513e-08, | |
| "loss": 0.6831, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 9.208590805595201e-08, | |
| "loss": 0.6554, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 2.4307508623258407e-07, | |
| "loss": 0.3354, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 4.657612391656818e-07, | |
| "loss": 0.4874, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 7.59910799831185e-07, | |
| "loss": 0.6073, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 1.1252152459604225e-06, | |
| "loss": 0.8413, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 1.5612914236342025e-06, | |
| "loss": 0.3873, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 2.0676819491598904e-06, | |
| "loss": 0.7879, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 2.643855688804542e-06, | |
| "loss": 0.6646, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 3.2892083158796966e-06, | |
| "loss": 0.5732, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 4.003062944597618e-06, | |
| "loss": 0.5925, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 4.784670840029167e-06, | |
| "loss": 0.5607, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 5.63321220342353e-06, | |
| "loss": 0.7195, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 6.547797032069429e-06, | |
| "loss": 0.4434, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 7.527466052776141e-06, | |
| "loss": 0.6778, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 8.571191728024455e-06, | |
| "loss": 0.7627, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 9.677879333708184e-06, | |
| "loss": 0.7002, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 1.0846368107356494e-05, | |
| "loss": 0.5221, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 1.2075432465600628e-05, | |
| "loss": 0.5574, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.336378328965373e-05, | |
| "loss": 0.7088, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 1.4710069277408788e-05, | |
| "loss": 0.7595, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 1.6112878360775777e-05, | |
| "loss": 0.3203, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 1.7570739186742892e-05, | |
| "loss": 0.5597, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 1.9082122660619998e-05, | |
| "loss": 0.5739, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 2.0645443549848698e-05, | |
| "loss": 0.531, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 2.2259062146702276e-05, | |
| "loss": 0.5565, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 2.392128598809619e-05, | |
| "loss": 0.7898, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 2.5630371630759003e-05, | |
| "loss": 0.8282, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 2.7384526479857852e-05, | |
| "loss": 0.6593, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 2.9181910669195226e-05, | |
| "loss": 0.5809, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 3.102063899095449e-05, | |
| "loss": 0.5236, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 3.289878287304121e-05, | |
| "loss": 0.8349, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 3.481437240187291e-05, | |
| "loss": 0.5913, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 3.6765398388566315e-05, | |
| "loss": 0.7902, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 3.874981447627722e-05, | |
| "loss": 0.447, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 4.076553928655969e-05, | |
| "loss": 0.5064, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 4.281045860243763e-05, | |
| "loss": 0.6135, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 4.4882427585940885e-05, | |
| "loss": 0.7355, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 4.69792730277225e-05, | |
| "loss": 0.6229, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 4.909879562646775e-05, | |
| "loss": 0.5816, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 5.123877229565941e-05, | |
| "loss": 0.558, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 5.3396958495283104e-05, | |
| "loss": 0.5434, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 5.557109058604349e-05, | |
| "loss": 0.7625, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 5.7758888203603614e-05, | |
| "loss": 0.6406, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 5.9958056650374005e-05, | |
| "loss": 0.6691, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 6.216628930234716e-05, | |
| "loss": 0.6314, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 6.438127002840443e-05, | |
| "loss": 0.7209, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 6.660067561963313e-05, | |
| "loss": 0.5188, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 6.882217822604419e-05, | |
| "loss": 0.5676, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 0.8053014278411865, | |
| "eval_runtime": 7.9895, | |
| "eval_samples_per_second": 81.231, | |
| "eval_steps_per_second": 10.263, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 7.104344779818084e-05, | |
| "loss": 0.8119, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 7.326215453099347e-05, | |
| "loss": 0.8241, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 7.547597130750675e-05, | |
| "loss": 0.4897, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 7.768257613962455e-05, | |
| "loss": 0.7487, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 7.987965460359805e-05, | |
| "loss": 0.7651, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 8.206490226751148e-05, | |
| "loss": 0.6692, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 8.423602710832848e-05, | |
| "loss": 0.7985, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 8.639075191590017e-05, | |
| "loss": 0.5855, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 8.852681668145908e-05, | |
| "loss": 0.6353, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 9.064198096803377e-05, | |
| "loss": 0.552, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 9.273402626037102e-05, | |
| "loss": 0.4755, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 9.480075829185367e-05, | |
| "loss": 0.6665, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 9.684000934597788e-05, | |
| "loss": 0.3769, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 9.884964052999192e-05, | |
| "loss": 0.6127, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 0.00010082754401829396, | |
| "loss": 0.7927, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 0.00010277164526325214, | |
| "loss": 0.5635, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 0.00010467990517113026, | |
| "loss": 0.694, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 0.00010655032224079507, | |
| "loss": 0.75, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 0.00010838093466302835, | |
| "loss": 0.4615, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 0.00011016982237818718, | |
| "loss": 0.4177, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 0.00011191510909009402, | |
| "loss": 0.7636, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 0.00011361496423399185, | |
| "loss": 0.5608, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 0.00011526760489657245, | |
| "loss": 0.4228, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 0.00011687129768599134, | |
| "loss": 0.6887, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 0.00011842436054997366, | |
| "loss": 0.4605, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 0.00011992516454003623, | |
| "loss": 0.5089, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 0.00012137213552004109, | |
| "loss": 0.7078, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 0.00012276375581724152, | |
| "loss": 0.5178, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 0.0001240985658141209, | |
| "loss": 0.6848, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 0.0001253751654793153, | |
| "loss": 0.5177, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 0.00012659221583606188, | |
| "loss": 0.8266, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 0.0001277484403666013, | |
| "loss": 0.5183, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 0.00012884262635106522, | |
| "loss": 0.8944, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 0.00012987362613945414, | |
| "loss": 0.8195, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 0.00013084035835536078, | |
| "loss": 0.8404, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 0.00013174180903018627, | |
| "loss": 0.6088, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 0.00013257703266665808, | |
| "loss": 0.6745, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 0.00013334515323051684, | |
| "loss": 0.6451, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 0.0001340453650693616, | |
| "loss": 0.7023, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 0.00013467693375766531, | |
| "loss": 0.8257, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 0.0001352391968670902, | |
| "loss": 0.5857, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 0.00013573156466127633, | |
| "loss": 0.3617, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 0.0001361535207144001, | |
| "loss": 0.7152, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 0.00013650462245282905, | |
| "loss": 0.7494, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 0.0001367845016193248, | |
| "loss": 0.5441, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 0.00013699286465928992, | |
| "loss": 0.916, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 0.00013712949302866845, | |
| "loss": 0.6266, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 0.00013719424342316803, | |
| "loss": 0.8704, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 0.00013718704792856654, | |
| "loss": 0.6092, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 0.00013710791409194398, | |
| "loss": 0.6756, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 0.00013695692491376746, | |
| "loss": 0.6538, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 0.0001367342387608344, | |
| "loss": 0.5702, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 0.0001364400892001689, | |
| "loss": 0.8016, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 0.00013607478475403972, | |
| "loss": 0.6645, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 0.00013563870857636595, | |
| "loss": 0.9163, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 0.0001351323180508398, | |
| "loss": 0.6785, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 0.00013455614431119567, | |
| "loss": 0.7625, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 0.00013391079168411995, | |
| "loss": 0.5317, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 0.00013319693705540197, | |
| "loss": 0.6176, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 0.0001324153291599711, | |
| "loss": 0.4044, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 0.000131566787796576, | |
| "loss": 0.6815, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 0.0001306522029679309, | |
| "loss": 0.7421, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 0.0001296725339472242, | |
| "loss": 0.9262, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 0.00012862880827197592, | |
| "loss": 0.5789, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 0.0001275221206662922, | |
| "loss": 0.6176, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 0.00012635363189264392, | |
| "loss": 0.5932, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 0.00012512456753439868, | |
| "loss": 0.5771, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 0.00012383621671034555, | |
| "loss": 0.5197, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 0.00012248993072259167, | |
| "loss": 0.5347, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 0.00012108712163922345, | |
| "loss": 0.7577, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 0.0001196292608132576, | |
| "loss": 0.6237, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 0.00011811787733938051, | |
| "loss": 0.9522, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 0.00011655455645015182, | |
| "loss": 0.6511, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 0.00011494093785329826, | |
| "loss": 0.7683, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 0.00011327871401190438, | |
| "loss": 0.8408, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 0.00011156962836924156, | |
| "loss": 0.7132, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 0.00010981547352014274, | |
| "loss": 0.7741, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 0.00010801808933080536, | |
| "loss": 0.6007, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 0.0001061793610090445, | |
| "loss": 0.7367, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 0.00010430121712695943, | |
| "loss": 0.7996, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 0.00010238562759812602, | |
| "loss": 0.5623, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 0.00010043460161143261, | |
| "loss": 0.5343, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 9.845018552372344e-05, | |
| "loss": 1.0444, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 9.643446071344098e-05, | |
| "loss": 0.6887, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 9.438954139756306e-05, | |
| "loss": 0.8487, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 9.231757241405981e-05, | |
| "loss": 0.7323, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 9.02207269722782e-05, | |
| "loss": 0.6256, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 8.81012043735311e-05, | |
| "loss": 0.832, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 8.59612277043413e-05, | |
| "loss": 0.539, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 8.380304150471572e-05, | |
| "loss": 0.6064, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 8.162890941395533e-05, | |
| "loss": 0.6207, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 7.944111179639712e-05, | |
| "loss": 0.5802, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 7.72419433496248e-05, | |
| "loss": 0.6887, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 7.503371069765357e-05, | |
| "loss": 0.8306, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 7.28187299715963e-05, | |
| "loss": 0.4574, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 7.059932438036761e-05, | |
| "loss": 0.6537, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 6.837782177395655e-05, | |
| "loss": 0.8464, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_loss": 0.7975038886070251, | |
| "eval_runtime": 7.9678, | |
| "eval_samples_per_second": 81.452, | |
| "eval_steps_per_second": 10.291, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 3.533340602884326e-05, | |
| "loss": 0.6584, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 3.340418885898272e-05, | |
| "loss": 0.9188, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 3.1512039922545005e-05, | |
| "loss": 0.7885, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 2.9658952031065493e-05, | |
| "loss": 0.6785, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 2.784687685697823e-05, | |
| "loss": 0.9316, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 2.6077722878123704e-05, | |
| "loss": 0.7074, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 2.4353353367727083e-05, | |
| "loss": 0.5083, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 2.2675584432009507e-05, | |
| "loss": 0.7031, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 2.104618309745989e-05, | |
| "loss": 0.5955, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 1.94668654497983e-05, | |
| "loss": 0.6515, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 1.7939294826601462e-05, | |
| "loss": 0.6982, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 1.6465080065474947e-05, | |
| "loss": 0.7575, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 1.504577380961979e-05, | |
| "loss": 0.7294, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 1.3682870872588951e-05, | |
| "loss": 0.566, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 1.2377806663955526e-05, | |
| "loss": 0.4467, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 1.1131955677535227e-05, | |
| "loss": 0.8649, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 9.946630043766268e-06, | |
| "loss": 0.6318, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 8.8230781477809e-06, | |
| "loss": 0.5197, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 7.762483314597675e-06, | |
| "loss": 0.7195, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 6.765962562849582e-06, | |
| "loss": 0.6544, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 5.834565428339295e-06, | |
| "loss": 0.6373, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 4.969272858664371e-06, | |
| "loss": 0.7969, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 4.170996180083756e-06, | |
| "loss": 0.5398, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 3.440576137712397e-06, | |
| "loss": 0.6392, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 2.778782010045892e-06, | |
| "loss": 0.8183, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 2.186310798754732e-06, | |
| "loss": 0.4459, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 1.6637864946060192e-06, | |
| "loss": 0.6059, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 1.2117594202713427e-06, | |
| "loss": 0.9818, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 8.307056507336243e-07, | |
| "loss": 0.3983, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 5.21026511876941e-07, | |
| "loss": 0.9094, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 2.8304815781598153e-07, | |
| "loss": 0.5006, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 1.1702122738749632e-07, | |
| "loss": 0.5685, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 2.312058017904961e-08, | |
| "loss": 0.7558, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 1.445112365939849e-09, | |
| "loss": 0.5613, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 5.2017652553981476e-08, | |
| "loss": 0.7474, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 1.7478493773641084e-07, | |
| "loss": 0.7388, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 3.69617669391192e-07, | |
| "loss": 0.5788, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 6.363106496559428e-07, | |
| "loss": 0.6158, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 9.745829974457439e-07, | |
| "loss": 0.6697, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 1.3840784442740293e-06, | |
| "loss": 0.5538, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 1.8643657094772689e-06, | |
| "loss": 0.7529, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 2.4149389544374383e-06, | |
| "loss": 0.5164, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 3.035218315332177e-06, | |
| "loss": 0.5686, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 3.7245505138493062e-06, | |
| "loss": 0.7307, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 4.482209545215727e-06, | |
| "loss": 0.4993, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 5.307397442832051e-06, | |
| "loss": 0.5905, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 6.199245118679951e-06, | |
| "loss": 0.7372, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 7.15681327866027e-06, | |
| "loss": 0.7588, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 8.179093411845718e-06, | |
| "loss": 0.7609, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 9.265008852652901e-06, | |
| "loss": 0.4208, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 1.0413415914780494e-05, | |
| "loss": 0.5304, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 1.1623105095742268e-05, | |
| "loss": 0.47, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 1.2892802350718737e-05, | |
| "loss": 0.4479, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 1.4221170434376129e-05, | |
| "loss": 0.3907, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 1.5606810309252903e-05, | |
| "loss": 0.8871, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 1.7048262619230822e-05, | |
| "loss": 0.5595, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 1.8544009226529548e-05, | |
| "loss": 0.6169, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 2.0092474810601755e-05, | |
| "loss": 0.557, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 2.169202852727143e-05, | |
| "loss": 0.8663, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 2.334098572632982e-05, | |
| "loss": 0.754, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 2.5037609725813137e-05, | |
| "loss": 0.7106, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 2.6780113641080935e-05, | |
| "loss": 0.8902, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 2.8566662266755925e-05, | |
| "loss": 0.5382, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 3.039537400956214e-05, | |
| "loss": 0.5907, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 3.226432287002555e-05, | |
| "loss": 1.0195, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 3.417154047093731e-05, | |
| "loss": 0.5186, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 3.611501813044067e-05, | |
| "loss": 0.4889, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 3.8092708977579776e-05, | |
| "loss": 0.6288, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 4.010253010806977e-05, | |
| "loss": 0.7543, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 4.214236477799971e-05, | |
| "loss": 0.5513, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 4.421006463320525e-05, | |
| "loss": 0.6435, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 4.630345197188684e-05, | |
| "loss": 0.4894, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 4.8420322038201655e-05, | |
| "loss": 0.4861, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 5.0558445344283775e-05, | |
| "loss": 0.634, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 5.2715570018359465e-05, | |
| "loss": 0.5037, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 5.48894241764059e-05, | |
| "loss": 0.6078, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 5.70777183149052e-05, | |
| "loss": 0.7133, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 5.927814772215574e-05, | |
| "loss": 0.6985, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 6.148839490558139e-05, | |
| "loss": 0.6508, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 6.370613203253432e-05, | |
| "loss": 0.4598, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 6.592902338193773e-05, | |
| "loss": 0.428, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 6.815472780430632e-05, | |
| "loss": 0.5706, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 7.038090118741434e-05, | |
| "loss": 0.9235, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 7.260519892513427e-05, | |
| "loss": 0.8443, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 7.482527838676278e-05, | |
| "loss": 0.5691, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 7.703880138428551e-05, | |
| "loss": 0.4075, | |
| "step": 1885 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 7.924343663496286e-05, | |
| "loss": 0.4282, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 8.14368622166238e-05, | |
| "loss": 0.6324, | |
| "step": 1895 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 8.361676801313385e-05, | |
| "loss": 0.7816, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 8.578085814737905e-05, | |
| "loss": 0.6076, | |
| "step": 1905 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 8.792685339932241e-05, | |
| "loss": 0.5257, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 9.005249360644818e-05, | |
| "loss": 0.6216, | |
| "step": 1915 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 9.215554004418427e-05, | |
| "loss": 0.7805, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 9.423377778371423e-05, | |
| "loss": 0.7339, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 9.628501802474621e-05, | |
| "loss": 0.6319, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 9.830710040076315e-05, | |
| "loss": 0.5267, | |
| "step": 1935 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_loss": 0.4826279878616333, | |
| "eval_runtime": 14.7153, | |
| "eval_samples_per_second": 44.716, | |
| "eval_steps_per_second": 5.64, | |
| "step": 1936 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 0.00011450235959621898, | |
| "loss": 0.3751, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 0.00011280977272542054, | |
| "loss": 0.7741, | |
| "step": 1945 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 0.00011106984492265664, | |
| "loss": 0.6279, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "learning_rate": 0.00010928443934619291, | |
| "loss": 0.4172, | |
| "step": 1955 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 0.00010745546785304314, | |
| "loss": 0.8202, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 0.0001055848889516962, | |
| "loss": 0.7253, | |
| "step": 1965 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 0.00010367470570488607, | |
| "loss": 0.6737, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "learning_rate": 0.00010172696358468322, | |
| "loss": 0.602, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "learning_rate": 9.974374828213395e-05, | |
| "loss": 0.7079, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 9.772718347385836e-05, | |
| "loss": 0.8964, | |
| "step": 1985 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 9.567942854796078e-05, | |
| "loss": 0.4818, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 9.360267629171191e-05, | |
| "loss": 0.7278, | |
| "step": 1995 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 9.149915054343752e-05, | |
| "loss": 0.6072, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 8.937110381119999e-05, | |
| "loss": 0.8248, | |
| "step": 2005 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 8.722081486074574e-05, | |
| "loss": 0.8012, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "learning_rate": 8.50505862753343e-05, | |
| "loss": 0.542, | |
| "step": 2015 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 8.286274199009886e-05, | |
| "loss": 0.5911, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "learning_rate": 8.06596248034977e-05, | |
| "loss": 0.6034, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 7.844359386859629e-05, | |
| "loss": 0.6177, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 7.621702216682333e-05, | |
| "loss": 0.7298, | |
| "step": 2035 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "learning_rate": 7.398229396693239e-05, | |
| "loss": 0.51, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 7.174180227184603e-05, | |
| "loss": 0.8855, | |
| "step": 2045 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "learning_rate": 6.94979462561934e-05, | |
| "loss": 0.6728, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 6.725312869720243e-05, | |
| "loss": 0.4899, | |
| "step": 2055 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "learning_rate": 6.500975340173405e-05, | |
| "loss": 0.4827, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 6.27702226322509e-05, | |
| "loss": 0.6521, | |
| "step": 2065 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 6.05369345343951e-05, | |
| "loss": 0.5115, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 5.831228056899374e-05, | |
| "loss": 0.3792, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "learning_rate": 5.609864295123839e-05, | |
| "loss": 0.5564, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 5.389839209973954e-05, | |
| "loss": 0.8714, | |
| "step": 2085 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "learning_rate": 5.171388409821159e-05, | |
| "loss": 0.4843, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "learning_rate": 4.9547458172542385e-05, | |
| "loss": 0.7155, | |
| "step": 2095 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 4.74014341858799e-05, | |
| "loss": 0.4362, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "learning_rate": 4.5278110154455465e-05, | |
| "loss": 0.4663, | |
| "step": 2105 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 4.317975978682545e-05, | |
| "loss": 0.5994, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 4.110863004912692e-05, | |
| "loss": 0.4463, | |
| "step": 2115 | |
| }, | |
| { | |
| "epoch": 4.42, | |
| "learning_rate": 3.9066938758951916e-05, | |
| "loss": 0.6226, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "learning_rate": 3.7056872210476396e-05, | |
| "loss": 0.5947, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 3.508058283331054e-05, | |
| "loss": 0.5276, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "learning_rate": 3.314018688761322e-05, | |
| "loss": 0.5118, | |
| "step": 2135 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "learning_rate": 3.123776219796979e-05, | |
| "loss": 0.6581, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "learning_rate": 2.937534592838677e-05, | |
| "loss": 0.6129, | |
| "step": 2145 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "learning_rate": 2.7554932400854067e-05, | |
| "loss": 0.6305, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 2.577847095977183e-05, | |
| "loss": 0.6368, | |
| "step": 2155 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "learning_rate": 2.4047863884551244e-05, | |
| "loss": 0.6684, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "learning_rate": 2.2364964352589423e-05, | |
| "loss": 0.7136, | |
| "step": 2165 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 2.0731574454860283e-05, | |
| "loss": 0.5243, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 1.914944326618096e-05, | |
| "loss": 0.665, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 4.54, | |
| "learning_rate": 1.7620264972250878e-05, | |
| "loss": 0.5977, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "learning_rate": 1.6145677055492958e-05, | |
| "loss": 0.7612, | |
| "step": 2185 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "learning_rate": 1.4727258541581522e-05, | |
| "loss": 0.7172, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "learning_rate": 1.3366528308588766e-05, | |
| "loss": 0.6571, | |
| "step": 2195 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 1.2064943460529662e-05, | |
| "loss": 0.664, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "learning_rate": 1.0823897767063955e-05, | |
| "loss": 1.1318, | |
| "step": 2205 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "learning_rate": 9.644720171000711e-06, | |
| "loss": 0.5895, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "learning_rate": 8.52867336524718e-06, | |
| "loss": 0.5683, | |
| "step": 2215 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 7.476952440678663e-06, | |
| "loss": 0.8862, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 6.490683606400273e-06, | |
| "loss": 0.5336, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "learning_rate": 5.570922983785744e-06, | |
| "loss": 0.7185, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "learning_rate": 4.718655475547088e-06, | |
| "loss": 0.6121, | |
| "step": 2235 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "learning_rate": 3.934793711076979e-06, | |
| "loss": 0.4995, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 3.2201770691879365e-06, | |
| "loss": 0.639, | |
| "step": 2245 | |
| }, | |
| { | |
| "epoch": 4.69, | |
| "learning_rate": 2.5755707792803602e-06, | |
| "loss": 0.6703, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 2.001665101912862e-06, | |
| "loss": 0.5835, | |
| "step": 2255 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 1.499074589660724e-06, | |
| "loss": 0.8827, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "learning_rate": 1.0683374290336727e-06, | |
| "loss": 0.521, | |
| "step": 2265 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "learning_rate": 7.099148641708803e-07, | |
| "loss": 0.5759, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 4.2419070293249417e-07, | |
| "loss": 0.4576, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "learning_rate": 2.1147090590762733e-07, | |
| "loss": 0.9183, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "learning_rate": 7.19832587822494e-08, | |
| "loss": 0.6811, | |
| "step": 2285 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 5.877128423112166e-09, | |
| "loss": 0.6599, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 4.78, | |
| "learning_rate": 1.3223302930315373e-08, | |
| "loss": 0.8066, | |
| "step": 2295 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 9.401391583621219e-08, | |
| "loss": 0.93, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "learning_rate": 2.4816245452881085e-07, | |
| "loss": 0.7526, | |
| "step": 2305 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 4.755038528919192e-07, | |
| "loss": 0.6209, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "learning_rate": 7.757946680618806e-07, | |
| "loss": 0.5472, | |
| "step": 2315 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 1.1487133411127057e-06, | |
| "loss": 0.5128, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 4.84, | |
| "learning_rate": 1.5938605413876288e-06, | |
| "loss": 0.5971, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 2.1107595941164267e-06, | |
| "loss": 0.9001, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 4.86, | |
| "learning_rate": 2.698856990845814e-06, | |
| "loss": 0.624, | |
| "step": 2335 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 3.3575229821519346e-06, | |
| "loss": 0.469, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 4.086052251995796e-06, | |
| "loss": 0.7627, | |
| "step": 2345 | |
| }, | |
| { | |
| "epoch": 4.9, | |
| "learning_rate": 4.883664672985776e-06, | |
| "loss": 0.5373, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 5.749506141764069e-06, | |
| "loss": 0.684, | |
| "step": 2355 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "learning_rate": 6.682649493598944e-06, | |
| "loss": 0.6701, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 7.682095495217462e-06, | |
| "loss": 0.5721, | |
| "step": 2365 | |
| }, | |
| { | |
| "epoch": 4.94, | |
| "learning_rate": 8.746773914805009e-06, | |
| "loss": 0.5432, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 4.95, | |
| "learning_rate": 9.875544668046303e-06, | |
| "loss": 0.6582, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "learning_rate": 1.1067199038944353e-05, | |
| "loss": 0.5341, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 1.2320460974148876e-05, | |
| "loss": 0.677, | |
| "step": 2385 | |
| }, | |
| { | |
| "epoch": 4.98, | |
| "learning_rate": 1.3633988449393231e-05, | |
| "loss": 0.5661, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 1.5006374906553291e-05, | |
| "loss": 0.4745, | |
| "step": 2395 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 1.6436150759836066e-05, | |
| "loss": 0.746, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_loss": 0.3943726122379303, | |
| "eval_runtime": 15.4496, | |
| "eval_samples_per_second": 44.661, | |
| "eval_steps_per_second": 5.631, | |
| "step": 2400 | |
| } | |
| ], | |
| "max_steps": 2400, | |
| "num_train_epochs": 5, | |
| "total_flos": 2505790586880000.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |