| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 10.0, | |
| "global_step": 7010, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.964336661911555e-05, | |
| "loss": 2.1394, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.92867332382311e-05, | |
| "loss": 1.9883, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.893009985734665e-05, | |
| "loss": 1.9832, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.85734664764622e-05, | |
| "loss": 1.9561, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.821683309557775e-05, | |
| "loss": 1.9404, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 4.7860199714693296e-05, | |
| "loss": 1.9088, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 4.7503566333808845e-05, | |
| "loss": 1.8832, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.7146932952924393e-05, | |
| "loss": 1.8853, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 4.679029957203995e-05, | |
| "loss": 1.8799, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.64336661911555e-05, | |
| "loss": 1.825, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 4.607703281027104e-05, | |
| "loss": 1.8531, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 4.572039942938659e-05, | |
| "loss": 1.847, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 4.5363766048502145e-05, | |
| "loss": 1.8485, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.5007132667617693e-05, | |
| "loss": 1.83, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 4.4650499286733236e-05, | |
| "loss": 1.8062, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 4.429386590584879e-05, | |
| "loss": 1.806, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 4.393723252496434e-05, | |
| "loss": 1.7829, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 4.358059914407989e-05, | |
| "loss": 1.7775, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 4.322396576319544e-05, | |
| "loss": 1.7954, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 4.286733238231099e-05, | |
| "loss": 1.794, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 4.2510699001426536e-05, | |
| "loss": 1.7848, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 4.2154065620542085e-05, | |
| "loss": 1.7733, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 4.1797432239657633e-05, | |
| "loss": 1.7579, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 4.144079885877318e-05, | |
| "loss": 1.7667, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 4.108416547788873e-05, | |
| "loss": 1.7709, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 4.072753209700429e-05, | |
| "loss": 1.7481, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 4.037089871611983e-05, | |
| "loss": 1.7354, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 4.001426533523538e-05, | |
| "loss": 1.7537, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 3.965763195435093e-05, | |
| "loss": 1.7301, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 3.930099857346648e-05, | |
| "loss": 1.7135, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 3.8944365192582024e-05, | |
| "loss": 1.7132, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 3.858773181169757e-05, | |
| "loss": 1.7312, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 3.823109843081313e-05, | |
| "loss": 1.7184, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 3.787446504992868e-05, | |
| "loss": 1.7202, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 3.751783166904422e-05, | |
| "loss": 1.7139, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 3.716119828815977e-05, | |
| "loss": 1.721, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 3.6804564907275324e-05, | |
| "loss": 1.7021, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 3.644793152639087e-05, | |
| "loss": 1.722, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 3.609129814550642e-05, | |
| "loss": 1.6787, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 3.573466476462197e-05, | |
| "loss": 1.7017, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 3.537803138373752e-05, | |
| "loss": 1.7102, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 3.502139800285307e-05, | |
| "loss": 1.6821, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 3.466476462196862e-05, | |
| "loss": 1.6934, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 3.430813124108417e-05, | |
| "loss": 1.6817, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 3.3951497860199716e-05, | |
| "loss": 1.6769, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 3.3594864479315264e-05, | |
| "loss": 1.6836, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 3.323823109843081e-05, | |
| "loss": 1.6666, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 3.288159771754636e-05, | |
| "loss": 1.6788, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 3.252496433666191e-05, | |
| "loss": 1.6446, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 3.216833095577747e-05, | |
| "loss": 1.6658, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 3.1811697574893016e-05, | |
| "loss": 1.6523, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 3.145506419400856e-05, | |
| "loss": 1.6794, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 3.1098430813124107e-05, | |
| "loss": 1.6551, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 3.074179743223966e-05, | |
| "loss": 1.6595, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 3.0385164051355208e-05, | |
| "loss": 1.6518, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 3.0028530670470757e-05, | |
| "loss": 1.6226, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "learning_rate": 2.967189728958631e-05, | |
| "loss": 1.6458, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 2.9315263908701858e-05, | |
| "loss": 1.6407, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 2.8958630527817403e-05, | |
| "loss": 1.6321, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 2.8601997146932952e-05, | |
| "loss": 1.6502, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "learning_rate": 2.8245363766048504e-05, | |
| "loss": 1.6438, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 4.42, | |
| "learning_rate": 2.7888730385164053e-05, | |
| "loss": 1.6428, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 2.75320970042796e-05, | |
| "loss": 1.633, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "learning_rate": 2.7175463623395154e-05, | |
| "loss": 1.6245, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 2.68188302425107e-05, | |
| "loss": 1.6337, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 2.646219686162625e-05, | |
| "loss": 1.6511, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 4.78, | |
| "learning_rate": 2.6105563480741794e-05, | |
| "loss": 1.6104, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 2.574893009985735e-05, | |
| "loss": 1.614, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "learning_rate": 2.5392296718972895e-05, | |
| "loss": 1.6038, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 2.5035663338088444e-05, | |
| "loss": 1.6365, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 5.06, | |
| "learning_rate": 2.4679029957203993e-05, | |
| "loss": 1.6022, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 5.14, | |
| "learning_rate": 2.4322396576319545e-05, | |
| "loss": 1.6039, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "learning_rate": 2.396576319543509e-05, | |
| "loss": 1.5874, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "learning_rate": 2.3609129814550643e-05, | |
| "loss": 1.5715, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 5.35, | |
| "learning_rate": 2.3252496433666192e-05, | |
| "loss": 1.601, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 5.42, | |
| "learning_rate": 2.289586305278174e-05, | |
| "loss": 1.5898, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 5.49, | |
| "learning_rate": 2.253922967189729e-05, | |
| "loss": 1.6232, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 5.56, | |
| "learning_rate": 2.218259629101284e-05, | |
| "loss": 1.6, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 5.63, | |
| "learning_rate": 2.1825962910128388e-05, | |
| "loss": 1.5911, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 5.71, | |
| "learning_rate": 2.146932952924394e-05, | |
| "loss": 1.5976, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 5.78, | |
| "learning_rate": 2.111269614835949e-05, | |
| "loss": 1.5734, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "learning_rate": 2.0756062767475038e-05, | |
| "loss": 1.6059, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 5.92, | |
| "learning_rate": 2.0399429386590586e-05, | |
| "loss": 1.5414, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 5.99, | |
| "learning_rate": 2.0042796005706135e-05, | |
| "loss": 1.5934, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 6.06, | |
| "learning_rate": 1.9686162624821684e-05, | |
| "loss": 1.5541, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 6.13, | |
| "learning_rate": 1.9329529243937233e-05, | |
| "loss": 1.5748, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 6.21, | |
| "learning_rate": 1.8972895863052782e-05, | |
| "loss": 1.5502, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 6.28, | |
| "learning_rate": 1.861626248216833e-05, | |
| "loss": 1.5512, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 6.35, | |
| "learning_rate": 1.8259629101283883e-05, | |
| "loss": 1.5841, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 6.42, | |
| "learning_rate": 1.790299572039943e-05, | |
| "loss": 1.6056, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 6.49, | |
| "learning_rate": 1.754636233951498e-05, | |
| "loss": 1.5843, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 6.56, | |
| "learning_rate": 1.718972895863053e-05, | |
| "loss": 1.5775, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 6.63, | |
| "learning_rate": 1.683309557774608e-05, | |
| "loss": 1.567, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 6.7, | |
| "learning_rate": 1.6476462196861628e-05, | |
| "loss": 1.5395, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 6.78, | |
| "learning_rate": 1.6119828815977176e-05, | |
| "loss": 1.6035, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 6.85, | |
| "learning_rate": 1.5763195435092725e-05, | |
| "loss": 1.5744, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 6.92, | |
| "learning_rate": 1.5406562054208274e-05, | |
| "loss": 1.5538, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 6.99, | |
| "learning_rate": 1.5049928673323823e-05, | |
| "loss": 1.5561, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 7.06, | |
| "learning_rate": 1.4693295292439374e-05, | |
| "loss": 1.5545, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 7.13, | |
| "learning_rate": 1.433666191155492e-05, | |
| "loss": 1.5433, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 7.2, | |
| "learning_rate": 1.3980028530670471e-05, | |
| "loss": 1.536, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 7.28, | |
| "learning_rate": 1.362339514978602e-05, | |
| "loss": 1.5689, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 7.35, | |
| "learning_rate": 1.326676176890157e-05, | |
| "loss": 1.5497, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 7.42, | |
| "learning_rate": 1.291012838801712e-05, | |
| "loss": 1.5449, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 7.49, | |
| "learning_rate": 1.2553495007132669e-05, | |
| "loss": 1.5663, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 7.56, | |
| "learning_rate": 1.2196861626248217e-05, | |
| "loss": 1.5725, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 7.63, | |
| "learning_rate": 1.1840228245363768e-05, | |
| "loss": 1.5291, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 7.7, | |
| "learning_rate": 1.1483594864479317e-05, | |
| "loss": 1.5607, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 7.77, | |
| "learning_rate": 1.1126961483594866e-05, | |
| "loss": 1.5361, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 7.85, | |
| "learning_rate": 1.0770328102710415e-05, | |
| "loss": 1.5743, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 7.92, | |
| "learning_rate": 1.0413694721825964e-05, | |
| "loss": 1.5484, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 7.99, | |
| "learning_rate": 1.0057061340941512e-05, | |
| "loss": 1.5238, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 8.06, | |
| "learning_rate": 9.700427960057063e-06, | |
| "loss": 1.5493, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 8.13, | |
| "learning_rate": 9.343794579172612e-06, | |
| "loss": 1.5216, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 8.2, | |
| "learning_rate": 8.98716119828816e-06, | |
| "loss": 1.5233, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 8.27, | |
| "learning_rate": 8.63052781740371e-06, | |
| "loss": 1.5664, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 8.35, | |
| "learning_rate": 8.273894436519259e-06, | |
| "loss": 1.557, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 8.42, | |
| "learning_rate": 7.917261055634807e-06, | |
| "loss": 1.5289, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 8.49, | |
| "learning_rate": 7.560627674750358e-06, | |
| "loss": 1.5276, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 8.56, | |
| "learning_rate": 7.203994293865907e-06, | |
| "loss": 1.579, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 8.63, | |
| "learning_rate": 6.847360912981456e-06, | |
| "loss": 1.5147, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 8.7, | |
| "learning_rate": 6.490727532097005e-06, | |
| "loss": 1.5442, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 8.77, | |
| "learning_rate": 6.1340941512125535e-06, | |
| "loss": 1.5118, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 8.84, | |
| "learning_rate": 5.777460770328103e-06, | |
| "loss": 1.525, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 8.92, | |
| "learning_rate": 5.420827389443652e-06, | |
| "loss": 1.5131, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 8.99, | |
| "learning_rate": 5.064194008559201e-06, | |
| "loss": 1.5268, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 9.06, | |
| "learning_rate": 4.707560627674751e-06, | |
| "loss": 1.5325, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 9.13, | |
| "learning_rate": 4.3509272467903e-06, | |
| "loss": 1.5153, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 9.2, | |
| "learning_rate": 3.9942938659058485e-06, | |
| "loss": 1.5342, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 9.27, | |
| "learning_rate": 3.6376604850213982e-06, | |
| "loss": 1.5422, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 9.34, | |
| "learning_rate": 3.2810271041369475e-06, | |
| "loss": 1.5243, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 9.42, | |
| "learning_rate": 2.924393723252497e-06, | |
| "loss": 1.5273, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 9.49, | |
| "learning_rate": 2.5677603423680457e-06, | |
| "loss": 1.5353, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 9.56, | |
| "learning_rate": 2.211126961483595e-06, | |
| "loss": 1.537, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 9.63, | |
| "learning_rate": 1.8544935805991443e-06, | |
| "loss": 1.5333, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 9.7, | |
| "learning_rate": 1.4978601997146932e-06, | |
| "loss": 1.5402, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 9.77, | |
| "learning_rate": 1.1412268188302427e-06, | |
| "loss": 1.5236, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 9.84, | |
| "learning_rate": 7.845934379457918e-07, | |
| "loss": 1.5204, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 9.91, | |
| "learning_rate": 4.27960057061341e-07, | |
| "loss": 1.5038, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 9.99, | |
| "learning_rate": 7.132667617689017e-08, | |
| "loss": 1.5403, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "step": 7010, | |
| "total_flos": 7384869485936640.0, | |
| "train_loss": 1.6471780081788416, | |
| "train_runtime": 2443.9537, | |
| "train_samples_per_second": 91.692, | |
| "train_steps_per_second": 2.868 | |
| } | |
| ], | |
| "max_steps": 7010, | |
| "num_train_epochs": 10, | |
| "total_flos": 7384869485936640.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |