| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 10.0, | |
| "global_step": 84090, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.970448329171126e-05, | |
| "loss": 3.6877, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.9407777381377095e-05, | |
| "loss": 3.4145, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.911047687002022e-05, | |
| "loss": 3.3234, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.881317635866334e-05, | |
| "loss": 3.2698, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.851587584730646e-05, | |
| "loss": 3.2274, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.821857533594958e-05, | |
| "loss": 3.1993, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.79212748245927e-05, | |
| "loss": 3.1753, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 4.762397431323582e-05, | |
| "loss": 3.1476, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.732667380187894e-05, | |
| "loss": 3.1343, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.702937329052206e-05, | |
| "loss": 3.1173, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 4.673207277916518e-05, | |
| "loss": 3.0942, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.6435366868831015e-05, | |
| "loss": 3.0873, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 4.613806635747413e-05, | |
| "loss": 3.0778, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 4.584076584611726e-05, | |
| "loss": 3.065, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 4.5543465334760376e-05, | |
| "loss": 3.0566, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 4.5246164823403495e-05, | |
| "loss": 3.0428, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.494945891306933e-05, | |
| "loss": 3.03, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 4.4652158401712454e-05, | |
| "loss": 2.9815, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 4.435485789035557e-05, | |
| "loss": 2.9757, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 4.405755737899869e-05, | |
| "loss": 2.9704, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 4.3760851468664525e-05, | |
| "loss": 2.9729, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 4.346355095730765e-05, | |
| "loss": 2.9675, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 4.316625044595077e-05, | |
| "loss": 2.9551, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 4.286894993459389e-05, | |
| "loss": 2.95, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 4.257164942323701e-05, | |
| "loss": 2.949, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 4.2274943512902846e-05, | |
| "loss": 2.9451, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 4.1977643001545964e-05, | |
| "loss": 2.9412, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 4.168034249018908e-05, | |
| "loss": 2.9367, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 4.13830419788322e-05, | |
| "loss": 2.9247, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 4.1085741467475326e-05, | |
| "loss": 2.9287, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 4.078903555714116e-05, | |
| "loss": 2.913, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 4.049173504578428e-05, | |
| "loss": 2.923, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 4.01944345344274e-05, | |
| "loss": 2.9127, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 3.989713402307052e-05, | |
| "loss": 2.8935, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 3.959983351171364e-05, | |
| "loss": 2.8674, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 3.930253300035676e-05, | |
| "loss": 2.8602, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 3.900582709002259e-05, | |
| "loss": 2.8658, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 3.870852657866572e-05, | |
| "loss": 2.8655, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 3.8411226067308836e-05, | |
| "loss": 2.8594, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 3.8113925555951955e-05, | |
| "loss": 2.8506, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 3.781662504459508e-05, | |
| "loss": 2.8645, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 3.75193245332382e-05, | |
| "loss": 2.8637, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 3.722202402188132e-05, | |
| "loss": 2.8506, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 3.692472351052444e-05, | |
| "loss": 2.8443, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 3.6628017600190276e-05, | |
| "loss": 2.8417, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 3.6330717088833394e-05, | |
| "loss": 2.8418, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 3.603341657747651e-05, | |
| "loss": 2.8462, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 3.573611606611964e-05, | |
| "loss": 2.8516, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 3.5438815554762756e-05, | |
| "loss": 2.8411, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 3.5141515043405875e-05, | |
| "loss": 2.8285, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 3.4844214532049e-05, | |
| "loss": 2.8064, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 3.454691402069212e-05, | |
| "loss": 2.7888, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 3.425020811035795e-05, | |
| "loss": 2.7971, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 3.395290759900107e-05, | |
| "loss": 2.793, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 3.365560708764419e-05, | |
| "loss": 2.7998, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 3.3358306576287314e-05, | |
| "loss": 2.7935, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 3.306160066595315e-05, | |
| "loss": 2.8, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 3.2764300154596267e-05, | |
| "loss": 2.7915, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 3.24675942442621e-05, | |
| "loss": 2.7924, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 3.217029373290522e-05, | |
| "loss": 2.7915, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 3.1872993221548344e-05, | |
| "loss": 2.7827, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 3.157569271019146e-05, | |
| "loss": 2.7958, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 3.127839219883458e-05, | |
| "loss": 2.7905, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 3.0981686288500415e-05, | |
| "loss": 2.778, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 3.068438577714354e-05, | |
| "loss": 2.7901, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 3.038708526578666e-05, | |
| "loss": 2.7851, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 3.0089784754429777e-05, | |
| "loss": 2.7936, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 2.97924842430729e-05, | |
| "loss": 2.765, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 2.9495778332738732e-05, | |
| "loss": 2.7393, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 2.9198477821381854e-05, | |
| "loss": 2.7426, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "learning_rate": 2.8901177310024973e-05, | |
| "loss": 2.7467, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 2.8603876798668094e-05, | |
| "loss": 2.7458, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 2.8306576287311216e-05, | |
| "loss": 2.7524, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 2.8009275775954335e-05, | |
| "loss": 2.7428, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "learning_rate": 2.7711975264597456e-05, | |
| "loss": 2.7437, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 2.7414674753240578e-05, | |
| "loss": 2.7496, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 2.7117968842906412e-05, | |
| "loss": 2.7503, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 2.682066833154953e-05, | |
| "loss": 2.7481, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 2.6523962421215365e-05, | |
| "loss": 2.7446, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "learning_rate": 2.6226661909858486e-05, | |
| "loss": 2.7428, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "learning_rate": 2.5929361398501605e-05, | |
| "loss": 2.7429, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 2.5632060887144726e-05, | |
| "loss": 2.7472, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 4.94, | |
| "learning_rate": 2.5334760375787848e-05, | |
| "loss": 2.7497, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 2.5037459864430967e-05, | |
| "loss": 2.7491, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 5.05, | |
| "learning_rate": 2.474015935307409e-05, | |
| "loss": 2.7089, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 2.444285884171721e-05, | |
| "loss": 2.7098, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 5.17, | |
| "learning_rate": 2.4146152931383044e-05, | |
| "loss": 2.71, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 5.23, | |
| "learning_rate": 2.3848852420026163e-05, | |
| "loss": 2.7139, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 5.29, | |
| "learning_rate": 2.3551551908669284e-05, | |
| "loss": 2.7187, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 5.35, | |
| "learning_rate": 2.3254251397312403e-05, | |
| "loss": 2.7163, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "learning_rate": 2.2956950885955525e-05, | |
| "loss": 2.7097, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 5.47, | |
| "learning_rate": 2.266024497562136e-05, | |
| "loss": 2.7085, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 5.53, | |
| "learning_rate": 2.236294446426448e-05, | |
| "loss": 2.7001, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 5.59, | |
| "learning_rate": 2.20656439529076e-05, | |
| "loss": 2.7102, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 5.65, | |
| "learning_rate": 2.176834344155072e-05, | |
| "loss": 2.7191, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 5.71, | |
| "learning_rate": 2.1471637531216554e-05, | |
| "loss": 2.7124, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 5.77, | |
| "learning_rate": 2.1174337019859676e-05, | |
| "loss": 2.7145, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 5.83, | |
| "learning_rate": 2.0877631109525507e-05, | |
| "loss": 2.7162, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 5.89, | |
| "learning_rate": 2.058033059816863e-05, | |
| "loss": 2.7112, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 5.95, | |
| "learning_rate": 2.028303008681175e-05, | |
| "loss": 2.7122, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 6.01, | |
| "learning_rate": 1.998572957545487e-05, | |
| "loss": 2.7143, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 6.06, | |
| "learning_rate": 1.968842906409799e-05, | |
| "loss": 2.6759, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 6.12, | |
| "learning_rate": 1.9391128552741112e-05, | |
| "loss": 2.6826, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 6.18, | |
| "learning_rate": 1.909382804138423e-05, | |
| "loss": 2.6815, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 6.24, | |
| "learning_rate": 1.8796527530027352e-05, | |
| "loss": 2.6825, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 6.3, | |
| "learning_rate": 1.8499227018670474e-05, | |
| "loss": 2.678, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 6.36, | |
| "learning_rate": 1.8202521108336308e-05, | |
| "loss": 2.6829, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 6.42, | |
| "learning_rate": 1.7905220596979427e-05, | |
| "loss": 2.6912, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 6.48, | |
| "learning_rate": 1.760792008562255e-05, | |
| "loss": 2.6928, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 6.54, | |
| "learning_rate": 1.731061957426567e-05, | |
| "loss": 2.6916, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 6.6, | |
| "learning_rate": 1.70139136639315e-05, | |
| "loss": 2.6829, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 6.66, | |
| "learning_rate": 1.6716613152574623e-05, | |
| "loss": 2.6827, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 6.72, | |
| "learning_rate": 1.6419312641217744e-05, | |
| "loss": 2.684, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 6.78, | |
| "learning_rate": 1.6122012129860863e-05, | |
| "loss": 2.6825, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 6.84, | |
| "learning_rate": 1.5824711618503984e-05, | |
| "loss": 2.6761, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 6.9, | |
| "learning_rate": 1.552800570816982e-05, | |
| "loss": 2.6913, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 6.96, | |
| "learning_rate": 1.5230705196812939e-05, | |
| "loss": 2.6902, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 7.02, | |
| "learning_rate": 1.4933404685456059e-05, | |
| "loss": 2.6832, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 7.08, | |
| "learning_rate": 1.463610417409918e-05, | |
| "loss": 2.6619, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 7.14, | |
| "learning_rate": 1.4339398263765014e-05, | |
| "loss": 2.6594, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 7.19, | |
| "learning_rate": 1.4042097752408134e-05, | |
| "loss": 2.6659, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 7.25, | |
| "learning_rate": 1.3745391842073968e-05, | |
| "loss": 2.6654, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 7.31, | |
| "learning_rate": 1.3448091330717088e-05, | |
| "loss": 2.6676, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 7.37, | |
| "learning_rate": 1.3150790819360209e-05, | |
| "loss": 2.6582, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 7.43, | |
| "learning_rate": 1.285349030800333e-05, | |
| "loss": 2.6585, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 7.49, | |
| "learning_rate": 1.255618979664645e-05, | |
| "loss": 2.6636, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 7.55, | |
| "learning_rate": 1.225888928528957e-05, | |
| "loss": 2.6627, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 7.61, | |
| "learning_rate": 1.1961588773932692e-05, | |
| "loss": 2.656, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 7.67, | |
| "learning_rate": 1.1664288262575812e-05, | |
| "loss": 2.6634, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 7.73, | |
| "learning_rate": 1.1366987751218933e-05, | |
| "loss": 2.6616, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 7.79, | |
| "learning_rate": 1.1070281840884766e-05, | |
| "loss": 2.6614, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 7.85, | |
| "learning_rate": 1.0772981329527887e-05, | |
| "loss": 2.6709, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 7.91, | |
| "learning_rate": 1.0475680818171008e-05, | |
| "loss": 2.6681, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 7.97, | |
| "learning_rate": 1.0178380306814128e-05, | |
| "loss": 2.6731, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 8.03, | |
| "learning_rate": 9.881674396479962e-06, | |
| "loss": 2.6451, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 8.09, | |
| "learning_rate": 9.584968486145796e-06, | |
| "loss": 2.6538, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 8.15, | |
| "learning_rate": 9.287667974788918e-06, | |
| "loss": 2.6463, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 8.21, | |
| "learning_rate": 8.990367463432038e-06, | |
| "loss": 2.6519, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 8.26, | |
| "learning_rate": 8.693066952075157e-06, | |
| "loss": 2.6401, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 8.32, | |
| "learning_rate": 8.395766440718278e-06, | |
| "loss": 2.6448, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 8.38, | |
| "learning_rate": 8.098465929361398e-06, | |
| "loss": 2.6482, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 8.44, | |
| "learning_rate": 7.801165418004519e-06, | |
| "loss": 2.6444, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 8.5, | |
| "learning_rate": 7.5038649066476395e-06, | |
| "loss": 2.6438, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 8.56, | |
| "learning_rate": 7.2065643952907604e-06, | |
| "loss": 2.6448, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 8.62, | |
| "learning_rate": 6.9098584849565936e-06, | |
| "loss": 2.6443, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 8.68, | |
| "learning_rate": 6.6125579735997145e-06, | |
| "loss": 2.6521, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 8.74, | |
| "learning_rate": 6.3152574622428354e-06, | |
| "loss": 2.6491, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 8.8, | |
| "learning_rate": 6.018551551908669e-06, | |
| "loss": 2.6341, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 8.86, | |
| "learning_rate": 5.72125104055179e-06, | |
| "loss": 2.6593, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 8.92, | |
| "learning_rate": 5.42395052919491e-06, | |
| "loss": 2.6501, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 8.98, | |
| "learning_rate": 5.126650017838031e-06, | |
| "loss": 2.6496, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 9.04, | |
| "learning_rate": 4.8293495064811514e-06, | |
| "loss": 2.6418, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 9.1, | |
| "learning_rate": 4.5320489951242715e-06, | |
| "loss": 2.6389, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 9.16, | |
| "learning_rate": 4.2347484837673925e-06, | |
| "loss": 2.6309, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 9.22, | |
| "learning_rate": 3.9374479724105126e-06, | |
| "loss": 2.6315, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 9.28, | |
| "learning_rate": 3.6407420620763465e-06, | |
| "loss": 2.6371, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 9.34, | |
| "learning_rate": 3.343441550719467e-06, | |
| "loss": 2.6414, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 9.39, | |
| "learning_rate": 3.046141039362588e-06, | |
| "loss": 2.6361, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 9.45, | |
| "learning_rate": 2.7488405280057085e-06, | |
| "loss": 2.632, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 9.51, | |
| "learning_rate": 2.4521346176715424e-06, | |
| "loss": 2.6387, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 9.57, | |
| "learning_rate": 2.154834106314663e-06, | |
| "loss": 2.6371, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 9.63, | |
| "learning_rate": 1.8575335949577837e-06, | |
| "loss": 2.6383, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 9.69, | |
| "learning_rate": 1.5602330836009038e-06, | |
| "loss": 2.6379, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 9.75, | |
| "learning_rate": 1.2629325722440245e-06, | |
| "loss": 2.6272, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 9.81, | |
| "learning_rate": 9.668212629325723e-07, | |
| "loss": 2.6353, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 9.87, | |
| "learning_rate": 6.695207515756928e-07, | |
| "loss": 2.6404, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 9.93, | |
| "learning_rate": 3.722202402188132e-07, | |
| "loss": 2.6373, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 9.99, | |
| "learning_rate": 7.491972886193364e-08, | |
| "loss": 2.6304, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "step": 84090, | |
| "total_flos": 2.5714385279778816e+17, | |
| "train_runtime": 27131.1773, | |
| "train_samples_per_second": 3.099 | |
| } | |
| ], | |
| "max_steps": 84090, | |
| "num_train_epochs": 10, | |
| "total_flos": 2.5714385279778816e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |