| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 204657, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 8e-05, | |
| "loss": 2.273, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 7.999957460493864e-05, | |
| "loss": 2.2497, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 7.999829842880257e-05, | |
| "loss": 2.3728, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 7.999617149873574e-05, | |
| "loss": 2.3576, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 7.999319385997746e-05, | |
| "loss": 2.3374, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 7.998936557586135e-05, | |
| "loss": 2.3538, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 7.998468672781407e-05, | |
| "loss": 2.366, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 7.997915741535355e-05, | |
| "loss": 2.3321, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 7.997277775608694e-05, | |
| "loss": 2.2838, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 7.996554788570796e-05, | |
| "loss": 2.2679, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 7.995746795799422e-05, | |
| "loss": 2.4041, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 7.994853814480376e-05, | |
| "loss": 2.3415, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 7.99387586360715e-05, | |
| "loss": 2.3589, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 7.992812963980518e-05, | |
| "loss": 2.3203, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 7.991665138208094e-05, | |
| "loss": 2.3517, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 7.990432410703848e-05, | |
| "loss": 2.4286, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 7.989114807687589e-05, | |
| "loss": 2.3227, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 7.987712357184408e-05, | |
| "loss": 2.3945, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 7.98622508902408e-05, | |
| "loss": 2.3775, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 7.984653034840432e-05, | |
| "loss": 2.3522, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 7.982996228070671e-05, | |
| "loss": 2.334, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 7.981254703954664e-05, | |
| "loss": 2.3742, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 7.979428499534201e-05, | |
| "loss": 2.2517, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 7.977517653652199e-05, | |
| "loss": 2.3994, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 7.975522206951876e-05, | |
| "loss": 2.3606, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 7.973442201875895e-05, | |
| "loss": 2.3634, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 7.971277682665446e-05, | |
| "loss": 2.3061, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 7.969028695359319e-05, | |
| "loss": 2.4985, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 7.966695287792921e-05, | |
| "loss": 2.3908, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 7.96427750959725e-05, | |
| "loss": 2.3605, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 7.961775412197857e-05, | |
| "loss": 2.347, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 7.959189048813735e-05, | |
| "loss": 2.3233, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 7.95651847445619e-05, | |
| "loss": 2.3415, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 7.953763745927682e-05, | |
| "loss": 2.4679, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 7.950924921820606e-05, | |
| "loss": 2.443, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 7.948002062516052e-05, | |
| "loss": 2.5141, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 7.944995230182513e-05, | |
| "loss": 2.4339, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 7.941904488774571e-05, | |
| "loss": 2.4308, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 7.938729904031533e-05, | |
| "loss": 2.4312, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 7.93547154347603e-05, | |
| "loss": 2.4756, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 7.932129476412592e-05, | |
| "loss": 2.4426, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 7.928703773926155e-05, | |
| "loss": 2.4779, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 7.925194508880567e-05, | |
| "loss": 2.4671, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 7.921601755917029e-05, | |
| "loss": 2.4473, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 7.917925591452508e-05, | |
| "loss": 2.3929, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 7.914166093678117e-05, | |
| "loss": 2.4158, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 7.910323342557442e-05, | |
| "loss": 2.4607, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 7.906397419824855e-05, | |
| "loss": 2.4866, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 7.902388408983759e-05, | |
| "loss": 2.3708, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 7.898296395304824e-05, | |
| "loss": 2.4718, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 7.894121465824175e-05, | |
| "loss": 2.4436, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 7.889863709341528e-05, | |
| "loss": 2.498, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 7.885523216418312e-05, | |
| "loss": 2.4418, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 7.881100079375742e-05, | |
| "loss": 2.3653, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 7.876594392292848e-05, | |
| "loss": 2.5256, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 7.872006251004482e-05, | |
| "loss": 2.549, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 7.867335753099278e-05, | |
| "loss": 2.4543, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 7.86258299791757e-05, | |
| "loss": 2.4647, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 7.857748086549292e-05, | |
| "loss": 2.5375, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 7.852831121831812e-05, | |
| "loss": 2.4895, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 7.847832208347754e-05, | |
| "loss": 2.4156, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 7.842751452422775e-05, | |
| "loss": 2.5006, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 7.8375889621233e-05, | |
| "loss": 2.459, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 7.83234484725422e-05, | |
| "loss": 2.469, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 7.827019219356568e-05, | |
| "loss": 2.4331, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 7.821612191705128e-05, | |
| "loss": 2.487, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 7.816123879306048e-05, | |
| "loss": 2.5139, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 7.810554398894376e-05, | |
| "loss": 2.5117, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 7.804903868931584e-05, | |
| "loss": 2.5537, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 7.79917240960305e-05, | |
| "loss": 2.4394, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 7.7933601428155e-05, | |
| "loss": 2.4285, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 7.78746719219441e-05, | |
| "loss": 2.5693, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 7.781493683081388e-05, | |
| "loss": 2.4932, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 7.775439742531495e-05, | |
| "loss": 2.4468, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 7.769305499310553e-05, | |
| "loss": 2.4614, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 7.763091083892402e-05, | |
| "loss": 2.6549, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 7.756796628456121e-05, | |
| "loss": 2.4871, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 7.750422266883222e-05, | |
| "loss": 2.413, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 7.743968134754806e-05, | |
| "loss": 2.3566, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 7.737434369348664e-05, | |
| "loss": 2.4936, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 7.730821109636379e-05, | |
| "loss": 2.453, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 7.724128496280346e-05, | |
| "loss": 2.4851, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 7.717356671630802e-05, | |
| "loss": 2.4564, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 7.710505779722786e-05, | |
| "loss": 2.5133, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 7.703575966273073e-05, | |
| "loss": 2.4546, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 7.696567378677089e-05, | |
| "loss": 2.5157, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 7.689480166005756e-05, | |
| "loss": 2.4248, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 7.682314479002344e-05, | |
| "loss": 2.4853, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 7.67507047007924e-05, | |
| "loss": 2.4615, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 7.667748293314729e-05, | |
| "loss": 2.5391, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 7.6603481044497e-05, | |
| "loss": 2.4464, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 7.652870060884345e-05, | |
| "loss": 2.4941, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 7.645314321674803e-05, | |
| "loss": 2.4708, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 7.637681047529781e-05, | |
| "loss": 2.5972, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 7.629970400807136e-05, | |
| "loss": 2.5369, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 7.622182545510419e-05, | |
| "loss": 2.4348, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 7.61431764728539e-05, | |
| "loss": 2.4546, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 7.606375873416491e-05, | |
| "loss": 2.4378, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 7.598357392823292e-05, | |
| "loss": 2.471, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 7.590262376056896e-05, | |
| "loss": 2.4677, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 7.58209099529631e-05, | |
| "loss": 2.4245, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.573843424344783e-05, | |
| "loss": 2.4734, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.565519838626113e-05, | |
| "loss": 2.4158, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.557120415180916e-05, | |
| "loss": 2.5098, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 7.548645332662853e-05, | |
| "loss": 2.5478, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 7.540094771334835e-05, | |
| "loss": 2.4502, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 7.531468913065192e-05, | |
| "loss": 2.4264, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 7.522767941323798e-05, | |
| "loss": 2.4903, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 7.513992041178174e-05, | |
| "loss": 2.4486, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 7.505141399289549e-05, | |
| "loss": 2.5171, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 7.496216203908891e-05, | |
| "loss": 2.5396, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 7.487216644872901e-05, | |
| "loss": 2.4514, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 7.478142913599978e-05, | |
| "loss": 2.4017, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 7.468995203086146e-05, | |
| "loss": 2.4591, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 7.459773707900946e-05, | |
| "loss": 2.5764, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 7.450478624183306e-05, | |
| "loss": 2.5013, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 7.441110149637363e-05, | |
| "loss": 2.51, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 7.431668483528254e-05, | |
| "loss": 2.3992, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 7.422153826677887e-05, | |
| "loss": 2.4671, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 7.412566381460662e-05, | |
| "loss": 2.5362, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 7.402906351799175e-05, | |
| "loss": 2.4981, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 7.39317394315987e-05, | |
| "loss": 2.4574, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 7.383369362548674e-05, | |
| "loss": 2.4777, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 7.373492818506597e-05, | |
| "loss": 2.4628, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 7.363544521105292e-05, | |
| "loss": 2.4506, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 7.353524681942585e-05, | |
| "loss": 2.5644, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 7.343433514137987e-05, | |
| "loss": 2.5131, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 7.333271232328141e-05, | |
| "loss": 2.5575, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 7.32303805266227e-05, | |
| "loss": 2.5901, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 7.312734192797583e-05, | |
| "loss": 2.5701, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 7.302359871894635e-05, | |
| "loss": 2.5201, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 7.291915310612666e-05, | |
| "loss": 2.5402, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 7.281400731104918e-05, | |
| "loss": 2.4797, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 7.2708163570139e-05, | |
| "loss": 2.4836, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 7.260162413466636e-05, | |
| "loss": 2.4707, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 7.249439127069873e-05, | |
| "loss": 2.4334, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_bleu": 1.0, | |
| "eval_brevity_penalty": 1.0, | |
| "eval_length_ratio": 1.0, | |
| "eval_loss": 2.243044853210449, | |
| "eval_precisions": [ | |
| 1.0, | |
| 1.0, | |
| 1.0, | |
| 1.0 | |
| ], | |
| "eval_reference_length": 7761920, | |
| "eval_runtime": 17988.3779, | |
| "eval_samples_per_second": 0.843, | |
| "eval_steps_per_second": 0.421, | |
| "eval_translation_length": 7761920, | |
| "step": 68219 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 7.238646725905267e-05, | |
| "loss": 2.3238, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 7.227785439524523e-05, | |
| "loss": 2.1194, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 7.216855498944522e-05, | |
| "loss": 2.1879, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 7.205857136642402e-05, | |
| "loss": 2.0581, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 7.194790586550611e-05, | |
| "loss": 2.1256, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 7.183656084051938e-05, | |
| "loss": 2.1933, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 7.172453865974503e-05, | |
| "loss": 2.1403, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 7.161184170586717e-05, | |
| "loss": 2.1402, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 7.149847237592218e-05, | |
| "loss": 2.0527, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 7.138443308124771e-05, | |
| "loss": 2.169, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 7.12697262474314e-05, | |
| "loss": 2.1674, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 7.115435431425928e-05, | |
| "loss": 2.0606, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 7.103831973566391e-05, | |
| "loss": 2.1421, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 7.092162497967207e-05, | |
| "loss": 2.1357, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 7.080427252835243e-05, | |
| "loss": 2.1281, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 7.068626487776266e-05, | |
| "loss": 2.095, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 7.056760453789635e-05, | |
| "loss": 2.1362, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 7.044829403262961e-05, | |
| "loss": 2.1443, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 7.032833589966745e-05, | |
| "loss": 2.2343, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 7.02077326904897e-05, | |
| "loss": 2.0832, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 7.008648697029687e-05, | |
| "loss": 2.087, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 6.996460131795548e-05, | |
| "loss": 2.146, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 6.984207832594325e-05, | |
| "loss": 2.1542, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 6.971892060029398e-05, | |
| "loss": 2.0636, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 6.959513076054207e-05, | |
| "loss": 2.1275, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 6.947071143966686e-05, | |
| "loss": 2.1221, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 6.934566528403655e-05, | |
| "loss": 2.097, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 6.921999495335204e-05, | |
| "loss": 2.1852, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 6.909370312059018e-05, | |
| "loss": 2.125, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 6.896679247194712e-05, | |
| "loss": 2.1584, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 6.883926570678098e-05, | |
| "loss": 2.203, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 6.871112553755457e-05, | |
| "loss": 2.1556, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 6.858237468977765e-05, | |
| "loss": 2.081, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 6.845301590194898e-05, | |
| "loss": 2.2032, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 6.8323051925498e-05, | |
| "loss": 2.169, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 6.819248552472642e-05, | |
| "loss": 2.1197, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 6.806131947674935e-05, | |
| "loss": 2.1572, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 6.792955657143622e-05, | |
| "loss": 2.1894, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 6.779719961135148e-05, | |
| "loss": 2.2259, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 6.766425141169503e-05, | |
| "loss": 2.0972, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 6.753071480024223e-05, | |
| "loss": 2.1572, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 6.739659261728381e-05, | |
| "loss": 2.1574, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 6.726188771556552e-05, | |
| "loss": 2.1689, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 6.712660296022731e-05, | |
| "loss": 2.1866, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 6.699074122874255e-05, | |
| "loss": 2.1624, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 6.685430541085673e-05, | |
| "loss": 2.147, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 6.671729840852598e-05, | |
| "loss": 2.2042, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 6.657972313585541e-05, | |
| "loss": 2.1646, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 6.644158251903713e-05, | |
| "loss": 2.151, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 6.630287949628791e-05, | |
| "loss": 2.1218, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 6.616361701778681e-05, | |
| "loss": 2.1922, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 6.602379804561237e-05, | |
| "loss": 2.1459, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 6.588342555367958e-05, | |
| "loss": 2.1345, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 6.57425025276767e-05, | |
| "loss": 2.2256, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 6.560103196500169e-05, | |
| "loss": 2.163, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 6.545901687469848e-05, | |
| "loss": 2.2915, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 6.531646027739298e-05, | |
| "loss": 2.1037, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 6.517336520522879e-05, | |
| "loss": 2.2256, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 6.50297347018028e-05, | |
| "loss": 2.1624, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 6.48855718221003e-05, | |
| "loss": 2.2168, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 6.47408796324302e-05, | |
| "loss": 2.2301, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 6.45956612103596e-05, | |
| "loss": 2.1579, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 6.444991964464851e-05, | |
| "loss": 2.1901, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 6.430365803518404e-05, | |
| "loss": 2.179, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 6.415687949291448e-05, | |
| "loss": 2.2105, | |
| "step": 100500 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 6.40095871397832e-05, | |
| "loss": 2.1975, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 6.386178410866217e-05, | |
| "loss": 2.1788, | |
| "step": 101500 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 6.371347354328537e-05, | |
| "loss": 2.2073, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 6.356465859818192e-05, | |
| "loss": 2.1706, | |
| "step": 102500 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 6.341534243860894e-05, | |
| "loss": 2.1783, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 6.326552824048426e-05, | |
| "loss": 2.1604, | |
| "step": 103500 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 6.31152191903189e-05, | |
| "loss": 2.1701, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 6.296441848514925e-05, | |
| "loss": 2.1284, | |
| "step": 104500 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 6.281312933246902e-05, | |
| "loss": 2.158, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 6.26613549501612e-05, | |
| "loss": 2.1539, | |
| "step": 105500 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 6.250909856642937e-05, | |
| "loss": 2.2176, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 6.235636341972923e-05, | |
| "loss": 2.227, | |
| "step": 106500 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 6.220315275869965e-05, | |
| "loss": 2.1622, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 6.204946984209355e-05, | |
| "loss": 2.2304, | |
| "step": 107500 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 6.189531793870862e-05, | |
| "loss": 2.1883, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 6.174070032731776e-05, | |
| "loss": 2.1939, | |
| "step": 108500 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 6.15856202965994e-05, | |
| "loss": 2.225, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 6.14300811450675e-05, | |
| "loss": 2.1847, | |
| "step": 109500 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 6.12740861810014e-05, | |
| "loss": 2.2027, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 6.111763872237548e-05, | |
| "loss": 2.1629, | |
| "step": 110500 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 6.096074209678854e-05, | |
| "loss": 2.2492, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 6.080339964139306e-05, | |
| "loss": 2.1836, | |
| "step": 111500 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 6.0645614702824225e-05, | |
| "loss": 2.2321, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 6.0487390637128715e-05, | |
| "loss": 2.1377, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 6.032873080969333e-05, | |
| "loss": 2.1757, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 6.016963859517342e-05, | |
| "loss": 2.2636, | |
| "step": 113500 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 6.00101173774211e-05, | |
| "loss": 2.1175, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 5.9850170549413294e-05, | |
| "loss": 2.2032, | |
| "step": 114500 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 5.968980151317953e-05, | |
| "loss": 2.1685, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 5.9529013679729616e-05, | |
| "loss": 2.1367, | |
| "step": 115500 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 5.936781046898106e-05, | |
| "loss": 2.144, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 5.920619530968634e-05, | |
| "loss": 2.2216, | |
| "step": 116500 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 5.904417163936001e-05, | |
| "loss": 2.1873, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 5.88817429042055e-05, | |
| "loss": 2.2138, | |
| "step": 117500 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 5.871891255904191e-05, | |
| "loss": 2.1854, | |
| "step": 118000 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 5.855568406723049e-05, | |
| "loss": 2.2012, | |
| "step": 118500 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 5.839206090060094e-05, | |
| "loss": 2.2523, | |
| "step": 119000 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 5.82280465393776e-05, | |
| "loss": 2.2806, | |
| "step": 119500 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 5.806364447210545e-05, | |
| "loss": 2.2008, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 5.789885819557585e-05, | |
| "loss": 2.2525, | |
| "step": 120500 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 5.773369121475223e-05, | |
| "loss": 2.2509, | |
| "step": 121000 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 5.756814704269547e-05, | |
| "loss": 2.2109, | |
| "step": 121500 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 5.7402229200489246e-05, | |
| "loss": 2.1961, | |
| "step": 122000 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 5.7235941217165076e-05, | |
| "loss": 2.2703, | |
| "step": 122500 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 5.706928662962732e-05, | |
| "loss": 2.2202, | |
| "step": 123000 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 5.690226898257789e-05, | |
| "loss": 2.2061, | |
| "step": 123500 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 5.6734891828440906e-05, | |
| "loss": 2.2972, | |
| "step": 124000 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 5.6567158727287094e-05, | |
| "loss": 2.2, | |
| "step": 124500 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 5.639907324675809e-05, | |
| "loss": 2.2358, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 5.623063896199059e-05, | |
| "loss": 2.2434, | |
| "step": 125500 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 5.606185945554023e-05, | |
| "loss": 2.1901, | |
| "step": 126000 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 5.589273831730541e-05, | |
| "loss": 2.1735, | |
| "step": 126500 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 5.572327914445101e-05, | |
| "loss": 2.2449, | |
| "step": 127000 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 5.555348554133178e-05, | |
| "loss": 2.1991, | |
| "step": 127500 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 5.538336111941571e-05, | |
| "loss": 2.1521, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 5.521290949720728e-05, | |
| "loss": 2.2191, | |
| "step": 128500 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 5.504213430017038e-05, | |
| "loss": 2.237, | |
| "step": 129000 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 5.4871039160651295e-05, | |
| "loss": 2.2709, | |
| "step": 129500 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 5.4699627717801396e-05, | |
| "loss": 2.2217, | |
| "step": 130000 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 5.452790361749973e-05, | |
| "loss": 2.2418, | |
| "step": 130500 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 5.4355870512275515e-05, | |
| "loss": 2.2513, | |
| "step": 131000 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 5.418353206123042e-05, | |
| "loss": 2.2671, | |
| "step": 131500 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 5.401089192996074e-05, | |
| "loss": 2.2706, | |
| "step": 132000 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 5.383795379047944e-05, | |
| "loss": 2.23, | |
| "step": 132500 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 5.3664721321138046e-05, | |
| "loss": 2.2251, | |
| "step": 133000 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 5.349119820654839e-05, | |
| "loss": 2.2968, | |
| "step": 133500 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 5.331738813750429e-05, | |
| "loss": 2.2243, | |
| "step": 134000 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 5.314329481090299e-05, | |
| "loss": 2.277, | |
| "step": 134500 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 5.296892192966653e-05, | |
| "loss": 2.2377, | |
| "step": 135000 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 5.2794273202663085e-05, | |
| "loss": 2.2582, | |
| "step": 135500 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 5.261935234462791e-05, | |
| "loss": 2.2732, | |
| "step": 136000 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_bleu": 1.0, | |
| "eval_brevity_penalty": 1.0, | |
| "eval_length_ratio": 1.0, | |
| "eval_loss": 2.259059429168701, | |
| "eval_precisions": [ | |
| 1.0, | |
| 1.0, | |
| 1.0, | |
| 1.0 | |
| ], | |
| "eval_reference_length": 7761920, | |
| "eval_runtime": 17938.9236, | |
| "eval_samples_per_second": 0.845, | |
| "eval_steps_per_second": 0.423, | |
| "eval_translation_length": 7761920, | |
| "step": 136438 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 5.2444163076084484e-05, | |
| "loss": 2.1805, | |
| "step": 136500 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 5.226870912326529e-05, | |
| "loss": 1.8318, | |
| "step": 137000 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 5.209299421803256e-05, | |
| "loss": 1.7868, | |
| "step": 137500 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 5.1917022097798964e-05, | |
| "loss": 1.8616, | |
| "step": 138000 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 5.174079650544804e-05, | |
| "loss": 1.862, | |
| "step": 138500 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 5.1564321189254605e-05, | |
| "loss": 1.7721, | |
| "step": 139000 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 5.138759990280507e-05, | |
| "loss": 1.8478, | |
| "step": 139500 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 5.1210636404917554e-05, | |
| "loss": 1.935, | |
| "step": 140000 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 5.1033434459561965e-05, | |
| "loss": 1.8431, | |
| "step": 140500 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 5.0855997835779926e-05, | |
| "loss": 1.8654, | |
| "step": 141000 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 5.067833030760462e-05, | |
| "loss": 1.7853, | |
| "step": 141500 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 5.0500435653980455e-05, | |
| "loss": 1.8631, | |
| "step": 142000 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 5.032231765868284e-05, | |
| "loss": 1.8427, | |
| "step": 142500 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 5.014398011023752e-05, | |
| "loss": 1.8528, | |
| "step": 143000 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 4.9965426801840127e-05, | |
| "loss": 1.8048, | |
| "step": 143500 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 4.9786661531275425e-05, | |
| "loss": 1.8064, | |
| "step": 144000 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 4.96076881008366e-05, | |
| "loss": 1.9335, | |
| "step": 144500 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 4.9428510317244295e-05, | |
| "loss": 1.9127, | |
| "step": 145000 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 4.924913199156575e-05, | |
| "loss": 1.8177, | |
| "step": 145500 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 4.906955693913363e-05, | |
| "loss": 1.8531, | |
| "step": 146000 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 4.8889788979465e-05, | |
| "loss": 1.8327, | |
| "step": 146500 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 4.870983193617993e-05, | |
| "loss": 1.8751, | |
| "step": 147000 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 4.8529689636920306e-05, | |
| "loss": 1.865, | |
| "step": 147500 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 4.834936591326834e-05, | |
| "loss": 1.9178, | |
| "step": 148000 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 4.8168864600665134e-05, | |
| "loss": 1.8717, | |
| "step": 148500 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 4.798818953832902e-05, | |
| "loss": 1.8991, | |
| "step": 149000 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 4.780734456917396e-05, | |
| "loss": 1.8914, | |
| "step": 149500 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 4.7626333539727793e-05, | |
| "loss": 1.9122, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 4.744516030005041e-05, | |
| "loss": 1.8922, | |
| "step": 150500 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 4.72638287036519e-05, | |
| "loss": 1.929, | |
| "step": 151000 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 4.7082342607410514e-05, | |
| "loss": 1.8535, | |
| "step": 151500 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 4.6900705871490736e-05, | |
| "loss": 1.8546, | |
| "step": 152000 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 4.671892235926106e-05, | |
| "loss": 1.8847, | |
| "step": 152500 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 4.653699593721192e-05, | |
| "loss": 1.8957, | |
| "step": 153000 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 4.635493047487339e-05, | |
| "loss": 1.909, | |
| "step": 153500 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 4.61727298447329e-05, | |
| "loss": 1.8491, | |
| "step": 154000 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 4.5990397922152834e-05, | |
| "loss": 1.9239, | |
| "step": 154500 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 4.580793858528819e-05, | |
| "loss": 1.893, | |
| "step": 155000 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 4.562535571500399e-05, | |
| "loss": 1.8956, | |
| "step": 155500 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 4.5442653194792806e-05, | |
| "loss": 1.9018, | |
| "step": 156000 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 4.525983491069215e-05, | |
| "loss": 1.8923, | |
| "step": 156500 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 4.507690475120176e-05, | |
| "loss": 1.8883, | |
| "step": 157000 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 4.489386660720095e-05, | |
| "loss": 1.8547, | |
| "step": 157500 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 4.471072437186585e-05, | |
| "loss": 1.8442, | |
| "step": 158000 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 4.452748194058659e-05, | |
| "loss": 1.8919, | |
| "step": 158500 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 4.4344143210884436e-05, | |
| "loss": 1.8636, | |
| "step": 159000 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 4.416071208232888e-05, | |
| "loss": 1.8752, | |
| "step": 159500 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 4.397719245645475e-05, | |
| "loss": 1.8668, | |
| "step": 160000 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 4.3793588236679173e-05, | |
| "loss": 1.9294, | |
| "step": 160500 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 4.360990332821855e-05, | |
| "loss": 1.8352, | |
| "step": 161000 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 4.342614163800553e-05, | |
| "loss": 1.9387, | |
| "step": 161500 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 4.32423070746059e-05, | |
| "loss": 1.8752, | |
| "step": 162000 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 4.3058403548135426e-05, | |
| "loss": 1.9911, | |
| "step": 162500 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 4.287443497017667e-05, | |
| "loss": 1.8686, | |
| "step": 163000 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 4.269040525369591e-05, | |
| "loss": 1.8261, | |
| "step": 163500 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 4.250631831295975e-05, | |
| "loss": 1.8804, | |
| "step": 164000 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 4.2322178063451956e-05, | |
| "loss": 1.8813, | |
| "step": 164500 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 4.2137988421790176e-05, | |
| "loss": 1.8637, | |
| "step": 165000 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 4.195375330564259e-05, | |
| "loss": 1.8864, | |
| "step": 165500 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 4.176947663364464e-05, | |
| "loss": 1.8915, | |
| "step": 166000 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 4.158516232531565e-05, | |
| "loss": 1.8492, | |
| "step": 166500 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 4.140081430097541e-05, | |
| "loss": 1.8539, | |
| "step": 167000 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 4.121643648166088e-05, | |
| "loss": 1.8811, | |
| "step": 167500 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 4.1032032789042785e-05, | |
| "loss": 1.8897, | |
| "step": 168000 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 4.084760714534209e-05, | |
| "loss": 1.8895, | |
| "step": 168500 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 4.066316347324673e-05, | |
| "loss": 1.9344, | |
| "step": 169000 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 4.047870569582805e-05, | |
| "loss": 1.8943, | |
| "step": 169500 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 4.029423773645742e-05, | |
| "loss": 1.8904, | |
| "step": 170000 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 4.0109763518722786e-05, | |
| "loss": 1.8938, | |
| "step": 170500 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 3.992528696634522e-05, | |
| "loss": 1.937, | |
| "step": 171000 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 3.9740812003095435e-05, | |
| "loss": 1.9084, | |
| "step": 171500 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 3.9556342552710344e-05, | |
| "loss": 1.8989, | |
| "step": 172000 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 3.937188253880958e-05, | |
| "loss": 1.9068, | |
| "step": 172500 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 3.9187435884812134e-05, | |
| "loss": 1.8497, | |
| "step": 173000 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 3.900300651385277e-05, | |
| "loss": 1.8392, | |
| "step": 173500 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 3.8818598348698666e-05, | |
| "loss": 1.9431, | |
| "step": 174000 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 3.863421531166596e-05, | |
| "loss": 1.9361, | |
| "step": 174500 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 3.844986132453632e-05, | |
| "loss": 1.8738, | |
| "step": 175000 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 3.826554030847353e-05, | |
| "loss": 1.9457, | |
| "step": 175500 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 3.8081256183940086e-05, | |
| "loss": 1.9293, | |
| "step": 176000 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 3.7897012870613815e-05, | |
| "loss": 1.9107, | |
| "step": 176500 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 3.771281428730448e-05, | |
| "loss": 1.8691, | |
| "step": 177000 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 3.75286643518705e-05, | |
| "loss": 1.856, | |
| "step": 177500 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 3.7344566981135476e-05, | |
| "loss": 1.9258, | |
| "step": 178000 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 3.716052609080507e-05, | |
| "loss": 1.8476, | |
| "step": 178500 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 3.697654559538356e-05, | |
| "loss": 1.8763, | |
| "step": 179000 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 3.6792629408090645e-05, | |
| "loss": 1.903, | |
| "step": 179500 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 3.660878144077822e-05, | |
| "loss": 1.8962, | |
| "step": 180000 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 3.642500560384714e-05, | |
| "loss": 1.965, | |
| "step": 180500 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 3.62413058061641e-05, | |
| "loss": 1.9176, | |
| "step": 181000 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 3.605768595497842e-05, | |
| "loss": 1.9391, | |
| "step": 181500 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 3.5874149955839e-05, | |
| "loss": 1.9248, | |
| "step": 182000 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 3.569070171251121e-05, | |
| "loss": 1.9606, | |
| "step": 182500 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 3.5507345126893905e-05, | |
| "loss": 1.9195, | |
| "step": 183000 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 3.5324084098936375e-05, | |
| "loss": 1.8899, | |
| "step": 183500 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 3.514092252655542e-05, | |
| "loss": 1.9041, | |
| "step": 184000 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 3.495786430555247e-05, | |
| "loss": 1.8918, | |
| "step": 184500 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 3.477491332953069e-05, | |
| "loss": 1.8615, | |
| "step": 185000 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 3.459207348981214e-05, | |
| "loss": 1.9503, | |
| "step": 185500 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 3.4409348675355084e-05, | |
| "loss": 1.8883, | |
| "step": 186000 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 3.422674277267118e-05, | |
| "loss": 1.8797, | |
| "step": 186500 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 3.404425966574292e-05, | |
| "loss": 1.9118, | |
| "step": 187000 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 3.386190323594091e-05, | |
| "loss": 1.8573, | |
| "step": 187500 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 3.3679677361941365e-05, | |
| "loss": 1.9455, | |
| "step": 188000 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 3.349758591964365e-05, | |
| "loss": 1.9058, | |
| "step": 188500 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 3.3315632782087784e-05, | |
| "loss": 1.9369, | |
| "step": 189000 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 3.3133821819372065e-05, | |
| "loss": 1.9005, | |
| "step": 189500 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 3.295215689857077e-05, | |
| "loss": 1.9293, | |
| "step": 190000 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 3.277064188365191e-05, | |
| "loss": 1.9078, | |
| "step": 190500 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 3.258928063539502e-05, | |
| "loss": 1.928, | |
| "step": 191000 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 3.240807701130909e-05, | |
| "loss": 1.9753, | |
| "step": 191500 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 3.2227034865550444e-05, | |
| "loss": 1.9069, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 3.204615804884081e-05, | |
| "loss": 1.9923, | |
| "step": 192500 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 3.1865450408385434e-05, | |
| "loss": 1.8337, | |
| "step": 193000 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 3.1684915787791185e-05, | |
| "loss": 1.9092, | |
| "step": 193500 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 3.150455802698488e-05, | |
| "loss": 1.8451, | |
| "step": 194000 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 3.132438096213156e-05, | |
| "loss": 1.8952, | |
| "step": 194500 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 3.114438842555287e-05, | |
| "loss": 1.9238, | |
| "step": 195000 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 3.096458424564564e-05, | |
| "loss": 1.8765, | |
| "step": 195500 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 3.07849722468004e-05, | |
| "loss": 1.8946, | |
| "step": 196000 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 3.060555624931998e-05, | |
| "loss": 1.8671, | |
| "step": 196500 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 3.042634006933835e-05, | |
| "loss": 1.9126, | |
| "step": 197000 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 3.024732751873942e-05, | |
| "loss": 1.9464, | |
| "step": 197500 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 3.006852240507591e-05, | |
| "loss": 1.8723, | |
| "step": 198000 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 2.988992853148847e-05, | |
| "loss": 1.892, | |
| "step": 198500 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 2.9711549696624666e-05, | |
| "loss": 1.8947, | |
| "step": 199000 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 2.9533389694558266e-05, | |
| "loss": 1.9721, | |
| "step": 199500 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 2.9355452314708538e-05, | |
| "loss": 1.9074, | |
| "step": 200000 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 2.9177741341759592e-05, | |
| "loss": 1.9363, | |
| "step": 200500 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 2.9000260555579957e-05, | |
| "loss": 1.8815, | |
| "step": 201000 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 2.8823013731142127e-05, | |
| "loss": 1.9579, | |
| "step": 201500 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 2.8646004638442282e-05, | |
| "loss": 1.9756, | |
| "step": 202000 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 2.8469237042420128e-05, | |
| "loss": 1.8637, | |
| "step": 202500 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 2.8292714702878754e-05, | |
| "loss": 1.8707, | |
| "step": 203000 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 2.811644137440477e-05, | |
| "loss": 1.9256, | |
| "step": 203500 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 2.7940420806288327e-05, | |
| "loss": 1.9739, | |
| "step": 204000 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 2.776465674244345e-05, | |
| "loss": 1.8675, | |
| "step": 204500 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_bleu": 1.0, | |
| "eval_brevity_penalty": 1.0, | |
| "eval_length_ratio": 1.0, | |
| "eval_loss": 2.297696352005005, | |
| "eval_precisions": [ | |
| 1.0, | |
| 1.0, | |
| 1.0, | |
| 1.0 | |
| ], | |
| "eval_reference_length": 7761920, | |
| "eval_runtime": 17697.7556, | |
| "eval_samples_per_second": 0.857, | |
| "eval_steps_per_second": 0.428, | |
| "eval_translation_length": 7761920, | |
| "step": 204657 | |
| } | |
| ], | |
| "logging_steps": 500, | |
| "max_steps": 341095, | |
| "num_train_epochs": 5, | |
| "save_steps": 5000, | |
| "total_flos": 9.433737890333983e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |