| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 19.999910828404126, | |
| "global_step": 168200, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 2.3305588585017838e-07, | |
| "loss": 1.5468, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.7086801426872774e-07, | |
| "loss": 1.1544, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 7.082045184304399e-07, | |
| "loss": 1.0372, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 9.460166468489894e-07, | |
| "loss": 0.9823, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.1838287752675386e-06, | |
| "loss": 0.9462, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.4216409036860881e-06, | |
| "loss": 0.9246, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.6594530321046376e-06, | |
| "loss": 0.8938, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.8972651605231867e-06, | |
| "loss": 0.8919, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.135077288941736e-06, | |
| "loss": 0.863, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 2.3728894173602857e-06, | |
| "loss": 0.8513, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 2.6107015457788348e-06, | |
| "loss": 0.8337, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 2.8485136741973847e-06, | |
| "loss": 0.8228, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 3.0863258026159338e-06, | |
| "loss": 0.787, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.324137931034483e-06, | |
| "loss": 0.7696, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 3.5619500594530328e-06, | |
| "loss": 0.7353, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.799762187871582e-06, | |
| "loss": 0.7291, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.037574316290131e-06, | |
| "loss": 0.7122, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 4.27538644470868e-06, | |
| "loss": 0.7098, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 4.5131985731272295e-06, | |
| "loss": 0.6884, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 4.751010701545779e-06, | |
| "loss": 0.678, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 4.9888228299643285e-06, | |
| "loss": 0.6688, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 5.2266349583828776e-06, | |
| "loss": 0.6706, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 5.4644470868014275e-06, | |
| "loss": 0.6705, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 5.7022592152199766e-06, | |
| "loss": 0.6593, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 5.940071343638526e-06, | |
| "loss": 0.669, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 6.1778834720570756e-06, | |
| "loss": 0.6495, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 6.415695600475625e-06, | |
| "loss": 0.6554, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 6.653507728894174e-06, | |
| "loss": 0.6467, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 6.891319857312724e-06, | |
| "loss": 0.6479, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 7.128180737217598e-06, | |
| "loss": 0.643, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 7.365992865636148e-06, | |
| "loss": 0.6369, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 7.603804994054697e-06, | |
| "loss": 0.6363, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 7.841617122473245e-06, | |
| "loss": 0.6272, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 8.079429250891796e-06, | |
| "loss": 0.6269, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 8.316765755053508e-06, | |
| "loss": 0.6116, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 8.554577883472059e-06, | |
| "loss": 0.6149, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 8.792390011890606e-06, | |
| "loss": 0.6143, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 9.030202140309157e-06, | |
| "loss": 0.6139, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 9.268014268727706e-06, | |
| "loss": 0.6048, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 9.505826397146255e-06, | |
| "loss": 0.6095, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 9.743638525564804e-06, | |
| "loss": 0.5988, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 9.981450653983355e-06, | |
| "loss": 0.595, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 1.0219262782401904e-05, | |
| "loss": 0.6005, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 1.0457074910820453e-05, | |
| "loss": 0.598, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 1.0694411414982165e-05, | |
| "loss": 0.5968, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 1.0932223543400716e-05, | |
| "loss": 0.5934, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 1.1170035671819263e-05, | |
| "loss": 0.5965, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 1.1407847800237812e-05, | |
| "loss": 0.5946, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 1.1645659928656363e-05, | |
| "loss": 0.5904, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 1.1882996432818075e-05, | |
| "loss": 0.5917, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 1.2120808561236624e-05, | |
| "loss": 0.5802, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 1.2358620689655175e-05, | |
| "loss": 0.5753, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 1.2595957193816886e-05, | |
| "loss": 0.5687, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 1.2833769322235434e-05, | |
| "loss": 0.5755, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 1.3071581450653985e-05, | |
| "loss": 0.5718, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 1.3309393579072534e-05, | |
| "loss": 0.5759, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 1.3547205707491083e-05, | |
| "loss": 0.5748, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 1.3785017835909633e-05, | |
| "loss": 0.571, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 1.4022829964328183e-05, | |
| "loss": 0.5713, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 1.426064209274673e-05, | |
| "loss": 0.5688, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 1.4497978596908443e-05, | |
| "loss": 0.5744, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 1.4735790725326993e-05, | |
| "loss": 0.573, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 1.4973602853745542e-05, | |
| "loss": 0.5711, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 1.5211414982164092e-05, | |
| "loss": 0.5679, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 1.544922711058264e-05, | |
| "loss": 0.5702, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 1.568703923900119e-05, | |
| "loss": 0.57, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 1.592485136741974e-05, | |
| "loss": 0.5659, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 1.616266349583829e-05, | |
| "loss": 0.5553, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 1.6400475624256836e-05, | |
| "loss": 0.5508, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 1.663828775267539e-05, | |
| "loss": 0.5466, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "learning_rate": 1.6876099881093938e-05, | |
| "loss": 0.5479, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 1.7113912009512487e-05, | |
| "loss": 0.5472, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 1.73512485136742e-05, | |
| "loss": 0.5462, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 1.758858501783591e-05, | |
| "loss": 0.5517, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "learning_rate": 1.782639714625446e-05, | |
| "loss": 0.5448, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 1.8064209274673012e-05, | |
| "loss": 0.5473, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 1.8302021403091558e-05, | |
| "loss": 0.547, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 1.8539833531510107e-05, | |
| "loss": 0.5481, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 1.877764565992866e-05, | |
| "loss": 0.5534, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "learning_rate": 1.901498216409037e-05, | |
| "loss": 0.5456, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "learning_rate": 1.925279429250892e-05, | |
| "loss": 0.5543, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 1.949060642092747e-05, | |
| "loss": 0.5462, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 1.9728418549346018e-05, | |
| "loss": 0.5443, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 1.9966230677764567e-05, | |
| "loss": 0.5429, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 5.05, | |
| "learning_rate": 1.9977328577090766e-05, | |
| "loss": 0.5261, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 1.9950905007266485e-05, | |
| "loss": 0.521, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 5.17, | |
| "learning_rate": 1.99244814374422e-05, | |
| "loss": 0.5263, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 5.23, | |
| "learning_rate": 1.9898057867617915e-05, | |
| "loss": 0.5268, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 5.29, | |
| "learning_rate": 1.9871634297793634e-05, | |
| "loss": 0.5232, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 5.35, | |
| "learning_rate": 1.9845263575108998e-05, | |
| "loss": 0.5304, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "learning_rate": 1.9818840005284717e-05, | |
| "loss": 0.52, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 5.47, | |
| "learning_rate": 1.9792416435460432e-05, | |
| "loss": 0.5266, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 5.53, | |
| "learning_rate": 1.976599286563615e-05, | |
| "loss": 0.5311, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 5.59, | |
| "learning_rate": 1.9739622142951514e-05, | |
| "loss": 0.526, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 5.65, | |
| "learning_rate": 1.971319857312723e-05, | |
| "loss": 0.5272, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 5.71, | |
| "learning_rate": 1.9686880697582245e-05, | |
| "loss": 0.5259, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 5.77, | |
| "learning_rate": 1.966045712775796e-05, | |
| "loss": 0.5324, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 5.83, | |
| "learning_rate": 1.963403355793368e-05, | |
| "loss": 0.532, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 5.89, | |
| "learning_rate": 1.9607609988109395e-05, | |
| "loss": 0.525, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 5.95, | |
| "learning_rate": 1.9581186418285113e-05, | |
| "loss": 0.528, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "learning_rate": 1.955476284846083e-05, | |
| "loss": 0.5291, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 6.06, | |
| "learning_rate": 1.9528339278636547e-05, | |
| "loss": 0.5051, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 6.12, | |
| "learning_rate": 1.9501915708812262e-05, | |
| "loss": 0.497, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 6.18, | |
| "learning_rate": 1.9475492138987978e-05, | |
| "loss": 0.5067, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 6.24, | |
| "learning_rate": 1.9449068569163696e-05, | |
| "loss": 0.5062, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 6.3, | |
| "learning_rate": 1.942264499933941e-05, | |
| "loss": 0.5046, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 6.36, | |
| "learning_rate": 1.939622142951513e-05, | |
| "loss": 0.5094, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 6.42, | |
| "learning_rate": 1.9369797859690846e-05, | |
| "loss": 0.5061, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 6.48, | |
| "learning_rate": 1.9343374289866564e-05, | |
| "loss": 0.512, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 6.54, | |
| "learning_rate": 1.931695072004228e-05, | |
| "loss": 0.5048, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 6.6, | |
| "learning_rate": 1.9290579997357643e-05, | |
| "loss": 0.5083, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 6.66, | |
| "learning_rate": 1.9264156427533362e-05, | |
| "loss": 0.5021, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 6.72, | |
| "learning_rate": 1.9237732857709077e-05, | |
| "loss": 0.5067, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 6.78, | |
| "learning_rate": 1.9211309287884793e-05, | |
| "loss": 0.5107, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 6.84, | |
| "learning_rate": 1.918488571806051e-05, | |
| "loss": 0.5121, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 6.9, | |
| "learning_rate": 1.9158462148236226e-05, | |
| "loss": 0.5092, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 6.96, | |
| "learning_rate": 1.9132091425551594e-05, | |
| "loss": 0.5106, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 7.02, | |
| "learning_rate": 1.910566785572731e-05, | |
| "loss": 0.5081, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 7.07, | |
| "learning_rate": 1.9079244285903028e-05, | |
| "loss": 0.4826, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 7.13, | |
| "learning_rate": 1.905287356321839e-05, | |
| "loss": 0.4854, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 7.19, | |
| "learning_rate": 1.902644999339411e-05, | |
| "loss": 0.48, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 7.25, | |
| "learning_rate": 1.9000026423569825e-05, | |
| "loss": 0.4918, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 7.31, | |
| "learning_rate": 1.8973602853745544e-05, | |
| "loss": 0.4867, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 7.37, | |
| "learning_rate": 1.8947232131060908e-05, | |
| "loss": 0.4935, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 7.43, | |
| "learning_rate": 1.8920808561236626e-05, | |
| "loss": 0.4855, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 7.49, | |
| "learning_rate": 1.8894384991412342e-05, | |
| "loss": 0.4907, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 7.55, | |
| "learning_rate": 1.886796142158806e-05, | |
| "loss": 0.4913, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 7.61, | |
| "learning_rate": 1.8841537851763776e-05, | |
| "loss": 0.4914, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 7.67, | |
| "learning_rate": 1.881511428193949e-05, | |
| "loss": 0.4962, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 7.73, | |
| "learning_rate": 1.8788690712115206e-05, | |
| "loss": 0.4904, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 7.79, | |
| "learning_rate": 1.8762267142290925e-05, | |
| "loss": 0.4927, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 7.85, | |
| "learning_rate": 1.873584357246664e-05, | |
| "loss": 0.4934, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 7.91, | |
| "learning_rate": 1.8709472849782007e-05, | |
| "loss": 0.4926, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 7.97, | |
| "learning_rate": 1.8683049279957723e-05, | |
| "loss": 0.4965, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 8.03, | |
| "learning_rate": 1.865662571013344e-05, | |
| "loss": 0.4783, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 8.09, | |
| "learning_rate": 1.8630202140309157e-05, | |
| "loss": 0.4617, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 8.15, | |
| "learning_rate": 1.8603831417624524e-05, | |
| "loss": 0.4649, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 8.2, | |
| "learning_rate": 1.857740784780024e-05, | |
| "loss": 0.4696, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 8.26, | |
| "learning_rate": 1.8550984277975958e-05, | |
| "loss": 0.467, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 8.32, | |
| "learning_rate": 1.8524560708151673e-05, | |
| "loss": 0.4689, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 8.38, | |
| "learning_rate": 1.849813713832739e-05, | |
| "loss": 0.4686, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 8.44, | |
| "learning_rate": 1.8471766415642755e-05, | |
| "loss": 0.476, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 8.5, | |
| "learning_rate": 1.8445342845818474e-05, | |
| "loss": 0.4721, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 8.56, | |
| "learning_rate": 1.841891927599419e-05, | |
| "loss": 0.483, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 8.62, | |
| "learning_rate": 1.8392495706169905e-05, | |
| "loss": 0.4728, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 8.68, | |
| "learning_rate": 1.836607213634562e-05, | |
| "loss": 0.4742, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 8.74, | |
| "learning_rate": 1.833964856652134e-05, | |
| "loss": 0.4774, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 8.8, | |
| "learning_rate": 1.8313277843836702e-05, | |
| "loss": 0.4801, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 8.86, | |
| "learning_rate": 1.828690712115207e-05, | |
| "loss": 0.4777, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 8.92, | |
| "learning_rate": 1.8260483551327785e-05, | |
| "loss": 0.4803, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 8.98, | |
| "learning_rate": 1.8234059981503504e-05, | |
| "loss": 0.4783, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 9.04, | |
| "learning_rate": 1.820763641167922e-05, | |
| "loss": 0.4596, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 9.1, | |
| "learning_rate": 1.8181212841854937e-05, | |
| "loss": 0.4442, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 9.16, | |
| "learning_rate": 1.8154789272030653e-05, | |
| "loss": 0.46, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 9.22, | |
| "learning_rate": 1.812836570220637e-05, | |
| "loss": 0.4597, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 9.27, | |
| "learning_rate": 1.8101942132382087e-05, | |
| "loss": 0.4522, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 9.33, | |
| "learning_rate": 1.8075518562557802e-05, | |
| "loss": 0.4592, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 9.39, | |
| "learning_rate": 1.804914783987317e-05, | |
| "loss": 0.4617, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 9.45, | |
| "learning_rate": 1.8022724270048884e-05, | |
| "loss": 0.4601, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 9.51, | |
| "learning_rate": 1.7996300700224603e-05, | |
| "loss": 0.4606, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 9.57, | |
| "learning_rate": 1.7969877130400318e-05, | |
| "loss": 0.4616, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 9.63, | |
| "learning_rate": 1.7943506407715682e-05, | |
| "loss": 0.4566, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 9.69, | |
| "learning_rate": 1.79170828378914e-05, | |
| "loss": 0.4638, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 9.75, | |
| "learning_rate": 1.7890659268067116e-05, | |
| "loss": 0.467, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 9.81, | |
| "learning_rate": 1.7864235698242835e-05, | |
| "loss": 0.4662, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 9.87, | |
| "learning_rate": 1.783781212841855e-05, | |
| "loss": 0.4636, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 9.93, | |
| "learning_rate": 1.781138855859427e-05, | |
| "loss": 0.4641, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 9.99, | |
| "learning_rate": 1.7784964988769984e-05, | |
| "loss": 0.461, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 10.05, | |
| "learning_rate": 1.7758594266085348e-05, | |
| "loss": 0.4463, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 10.11, | |
| "learning_rate": 1.7732223543400715e-05, | |
| "loss": 0.4379, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 10.17, | |
| "learning_rate": 1.770579997357643e-05, | |
| "loss": 0.4418, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 10.23, | |
| "learning_rate": 1.767937640375215e-05, | |
| "loss": 0.45, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 10.29, | |
| "learning_rate": 1.7652952833927864e-05, | |
| "loss": 0.4463, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 10.34, | |
| "learning_rate": 1.7626529264103583e-05, | |
| "loss": 0.4453, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 10.4, | |
| "learning_rate": 1.7600105694279298e-05, | |
| "loss": 0.4443, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 10.46, | |
| "learning_rate": 1.7573682124455017e-05, | |
| "loss": 0.4465, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 10.52, | |
| "learning_rate": 1.7547258554630732e-05, | |
| "loss": 0.4548, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 10.58, | |
| "learning_rate": 1.752083498480645e-05, | |
| "loss": 0.4485, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 10.64, | |
| "learning_rate": 1.7494411414982166e-05, | |
| "loss": 0.4437, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 10.7, | |
| "learning_rate": 1.7467987845157885e-05, | |
| "loss": 0.4508, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 10.76, | |
| "learning_rate": 1.74415642753336e-05, | |
| "loss": 0.4519, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 10.82, | |
| "learning_rate": 1.7415140705509315e-05, | |
| "loss": 0.4532, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 10.88, | |
| "learning_rate": 1.738876998282468e-05, | |
| "loss": 0.4536, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 10.94, | |
| "learning_rate": 1.7362399260140046e-05, | |
| "loss": 0.4571, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 11.0, | |
| "learning_rate": 1.733597569031576e-05, | |
| "loss": 0.4471, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 11.06, | |
| "learning_rate": 1.730955212049148e-05, | |
| "loss": 0.432, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 11.12, | |
| "learning_rate": 1.7283128550667195e-05, | |
| "loss": 0.4303, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 11.18, | |
| "learning_rate": 1.7256757827982563e-05, | |
| "loss": 0.4307, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 11.24, | |
| "learning_rate": 1.7230334258158278e-05, | |
| "loss": 0.4373, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 11.3, | |
| "learning_rate": 1.7203910688333996e-05, | |
| "loss": 0.4307, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 11.36, | |
| "learning_rate": 1.7177487118509712e-05, | |
| "loss": 0.4339, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 11.41, | |
| "learning_rate": 1.7151169242964728e-05, | |
| "loss": 0.4376, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 11.47, | |
| "learning_rate": 1.7124745673140443e-05, | |
| "loss": 0.4383, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 11.53, | |
| "learning_rate": 1.7098322103316158e-05, | |
| "loss": 0.4421, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 11.59, | |
| "learning_rate": 1.7071898533491877e-05, | |
| "loss": 0.4377, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 11.65, | |
| "learning_rate": 1.7045474963667592e-05, | |
| "loss": 0.4446, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 11.71, | |
| "learning_rate": 1.7019051393843307e-05, | |
| "loss": 0.4359, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 11.77, | |
| "learning_rate": 1.6992627824019026e-05, | |
| "loss": 0.4377, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 11.83, | |
| "learning_rate": 1.696620425419474e-05, | |
| "loss": 0.4412, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 11.89, | |
| "learning_rate": 1.693978068437046e-05, | |
| "loss": 0.4405, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 11.95, | |
| "learning_rate": 1.6913357114546175e-05, | |
| "loss": 0.4423, | |
| "step": 100500 | |
| }, | |
| { | |
| "epoch": 12.01, | |
| "learning_rate": 1.6886933544721894e-05, | |
| "loss": 0.4408, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 12.07, | |
| "learning_rate": 1.686050997489761e-05, | |
| "loss": 0.417, | |
| "step": 101500 | |
| }, | |
| { | |
| "epoch": 12.13, | |
| "learning_rate": 1.6834086405073328e-05, | |
| "loss": 0.4241, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 12.19, | |
| "learning_rate": 1.680771568238869e-05, | |
| "loss": 0.4251, | |
| "step": 102500 | |
| }, | |
| { | |
| "epoch": 12.25, | |
| "learning_rate": 1.678129211256441e-05, | |
| "loss": 0.4168, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 12.31, | |
| "learning_rate": 1.6754868542740125e-05, | |
| "loss": 0.421, | |
| "step": 103500 | |
| }, | |
| { | |
| "epoch": 12.37, | |
| "learning_rate": 1.6728444972915844e-05, | |
| "loss": 0.4247, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 12.43, | |
| "learning_rate": 1.6702074250231208e-05, | |
| "loss": 0.4226, | |
| "step": 104500 | |
| }, | |
| { | |
| "epoch": 12.49, | |
| "learning_rate": 1.6675650680406927e-05, | |
| "loss": 0.4232, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 12.54, | |
| "learning_rate": 1.6649227110582642e-05, | |
| "loss": 0.4287, | |
| "step": 105500 | |
| }, | |
| { | |
| "epoch": 12.6, | |
| "learning_rate": 1.6622803540758357e-05, | |
| "loss": 0.4273, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 12.66, | |
| "learning_rate": 1.6596379970934076e-05, | |
| "loss": 0.4346, | |
| "step": 106500 | |
| }, | |
| { | |
| "epoch": 12.72, | |
| "learning_rate": 1.656995640110979e-05, | |
| "loss": 0.4287, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 12.78, | |
| "learning_rate": 1.6543532831285506e-05, | |
| "loss": 0.429, | |
| "step": 107500 | |
| }, | |
| { | |
| "epoch": 12.84, | |
| "learning_rate": 1.6517162108600874e-05, | |
| "loss": 0.4254, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 12.9, | |
| "learning_rate": 1.649073853877659e-05, | |
| "loss": 0.427, | |
| "step": 108500 | |
| }, | |
| { | |
| "epoch": 12.96, | |
| "learning_rate": 1.6464314968952307e-05, | |
| "loss": 0.4303, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 13.02, | |
| "learning_rate": 1.6437891399128023e-05, | |
| "loss": 0.4239, | |
| "step": 109500 | |
| }, | |
| { | |
| "epoch": 13.08, | |
| "learning_rate": 1.641146782930374e-05, | |
| "loss": 0.401, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 13.14, | |
| "learning_rate": 1.6385097106619105e-05, | |
| "loss": 0.4092, | |
| "step": 110500 | |
| }, | |
| { | |
| "epoch": 13.2, | |
| "learning_rate": 1.6358726383934472e-05, | |
| "loss": 0.4076, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 13.26, | |
| "learning_rate": 1.6332355661249836e-05, | |
| "loss": 0.4118, | |
| "step": 111500 | |
| }, | |
| { | |
| "epoch": 13.32, | |
| "learning_rate": 1.6305932091425555e-05, | |
| "loss": 0.4145, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 13.38, | |
| "learning_rate": 1.627950852160127e-05, | |
| "loss": 0.4132, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 13.44, | |
| "learning_rate": 1.625308495177699e-05, | |
| "loss": 0.416, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 13.5, | |
| "learning_rate": 1.6226661381952704e-05, | |
| "loss": 0.416, | |
| "step": 113500 | |
| }, | |
| { | |
| "epoch": 13.56, | |
| "learning_rate": 1.620023781212842e-05, | |
| "loss": 0.4126, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 13.61, | |
| "learning_rate": 1.6173814242304135e-05, | |
| "loss": 0.4179, | |
| "step": 114500 | |
| }, | |
| { | |
| "epoch": 13.67, | |
| "learning_rate": 1.6147443519619502e-05, | |
| "loss": 0.4242, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 13.73, | |
| "learning_rate": 1.6121019949795217e-05, | |
| "loss": 0.418, | |
| "step": 115500 | |
| }, | |
| { | |
| "epoch": 13.79, | |
| "learning_rate": 1.6094596379970936e-05, | |
| "loss": 0.4205, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 13.85, | |
| "learning_rate": 1.606817281014665e-05, | |
| "loss": 0.422, | |
| "step": 116500 | |
| }, | |
| { | |
| "epoch": 13.91, | |
| "learning_rate": 1.604174924032237e-05, | |
| "loss": 0.4257, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 13.97, | |
| "learning_rate": 1.6015325670498085e-05, | |
| "loss": 0.4146, | |
| "step": 117500 | |
| }, | |
| { | |
| "epoch": 14.03, | |
| "learning_rate": 1.5988902100673804e-05, | |
| "loss": 0.4144, | |
| "step": 118000 | |
| }, | |
| { | |
| "epoch": 14.09, | |
| "learning_rate": 1.596247853084952e-05, | |
| "loss": 0.4009, | |
| "step": 118500 | |
| }, | |
| { | |
| "epoch": 14.15, | |
| "learning_rate": 1.5936054961025234e-05, | |
| "loss": 0.4037, | |
| "step": 119000 | |
| }, | |
| { | |
| "epoch": 14.21, | |
| "learning_rate": 1.59096842383406e-05, | |
| "loss": 0.4011, | |
| "step": 119500 | |
| }, | |
| { | |
| "epoch": 14.27, | |
| "learning_rate": 1.5883260668516317e-05, | |
| "loss": 0.4045, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 14.33, | |
| "learning_rate": 1.5856837098692035e-05, | |
| "loss": 0.4127, | |
| "step": 120500 | |
| }, | |
| { | |
| "epoch": 14.39, | |
| "learning_rate": 1.583041352886775e-05, | |
| "loss": 0.4051, | |
| "step": 121000 | |
| }, | |
| { | |
| "epoch": 14.45, | |
| "learning_rate": 1.580398995904347e-05, | |
| "loss": 0.4064, | |
| "step": 121500 | |
| }, | |
| { | |
| "epoch": 14.51, | |
| "learning_rate": 1.5777566389219184e-05, | |
| "loss": 0.4046, | |
| "step": 122000 | |
| }, | |
| { | |
| "epoch": 14.57, | |
| "learning_rate": 1.5751142819394903e-05, | |
| "loss": 0.4041, | |
| "step": 122500 | |
| }, | |
| { | |
| "epoch": 14.63, | |
| "learning_rate": 1.572471924957062e-05, | |
| "loss": 0.4116, | |
| "step": 123000 | |
| }, | |
| { | |
| "epoch": 14.68, | |
| "learning_rate": 1.5698295679746337e-05, | |
| "loss": 0.4061, | |
| "step": 123500 | |
| }, | |
| { | |
| "epoch": 14.74, | |
| "learning_rate": 1.5671872109922052e-05, | |
| "loss": 0.4119, | |
| "step": 124000 | |
| }, | |
| { | |
| "epoch": 14.8, | |
| "learning_rate": 1.564544854009777e-05, | |
| "loss": 0.4101, | |
| "step": 124500 | |
| }, | |
| { | |
| "epoch": 14.86, | |
| "learning_rate": 1.5619024970273483e-05, | |
| "loss": 0.412, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 14.92, | |
| "learning_rate": 1.55926014004492e-05, | |
| "loss": 0.4089, | |
| "step": 125500 | |
| }, | |
| { | |
| "epoch": 14.98, | |
| "learning_rate": 1.5566230677764565e-05, | |
| "loss": 0.4119, | |
| "step": 126000 | |
| }, | |
| { | |
| "epoch": 15.04, | |
| "learning_rate": 1.5539807107940284e-05, | |
| "loss": 0.3987, | |
| "step": 126500 | |
| }, | |
| { | |
| "epoch": 15.1, | |
| "learning_rate": 1.5513383538116e-05, | |
| "loss": 0.3884, | |
| "step": 127000 | |
| }, | |
| { | |
| "epoch": 15.16, | |
| "learning_rate": 1.5486959968291718e-05, | |
| "loss": 0.3889, | |
| "step": 127500 | |
| }, | |
| { | |
| "epoch": 15.22, | |
| "learning_rate": 1.5460589245607082e-05, | |
| "loss": 0.3947, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 15.28, | |
| "learning_rate": 1.543421852292245e-05, | |
| "loss": 0.3915, | |
| "step": 128500 | |
| }, | |
| { | |
| "epoch": 15.34, | |
| "learning_rate": 1.5407794953098164e-05, | |
| "loss": 0.3933, | |
| "step": 129000 | |
| }, | |
| { | |
| "epoch": 15.4, | |
| "learning_rate": 1.5381371383273883e-05, | |
| "loss": 0.3884, | |
| "step": 129500 | |
| }, | |
| { | |
| "epoch": 15.46, | |
| "learning_rate": 1.5354947813449598e-05, | |
| "loss": 0.3966, | |
| "step": 130000 | |
| }, | |
| { | |
| "epoch": 15.52, | |
| "learning_rate": 1.5328524243625317e-05, | |
| "loss": 0.4029, | |
| "step": 130500 | |
| }, | |
| { | |
| "epoch": 15.58, | |
| "learning_rate": 1.5302100673801032e-05, | |
| "loss": 0.394, | |
| "step": 131000 | |
| }, | |
| { | |
| "epoch": 15.64, | |
| "learning_rate": 1.527567710397675e-05, | |
| "loss": 0.3999, | |
| "step": 131500 | |
| }, | |
| { | |
| "epoch": 15.7, | |
| "learning_rate": 1.5249253534152466e-05, | |
| "loss": 0.4, | |
| "step": 132000 | |
| }, | |
| { | |
| "epoch": 15.75, | |
| "learning_rate": 1.522288281146783e-05, | |
| "loss": 0.4006, | |
| "step": 132500 | |
| }, | |
| { | |
| "epoch": 15.81, | |
| "learning_rate": 1.5196459241643547e-05, | |
| "loss": 0.4007, | |
| "step": 133000 | |
| }, | |
| { | |
| "epoch": 15.87, | |
| "learning_rate": 1.5170035671819264e-05, | |
| "loss": 0.4058, | |
| "step": 133500 | |
| }, | |
| { | |
| "epoch": 15.93, | |
| "learning_rate": 1.514361210199498e-05, | |
| "loss": 0.4032, | |
| "step": 134000 | |
| }, | |
| { | |
| "epoch": 15.99, | |
| "learning_rate": 1.5117188532170698e-05, | |
| "loss": 0.4027, | |
| "step": 134500 | |
| }, | |
| { | |
| "epoch": 16.05, | |
| "learning_rate": 1.5090817809486063e-05, | |
| "loss": 0.3827, | |
| "step": 135000 | |
| }, | |
| { | |
| "epoch": 16.11, | |
| "learning_rate": 1.5064394239661778e-05, | |
| "loss": 0.3889, | |
| "step": 135500 | |
| }, | |
| { | |
| "epoch": 16.17, | |
| "learning_rate": 1.5037970669837495e-05, | |
| "loss": 0.3876, | |
| "step": 136000 | |
| }, | |
| { | |
| "epoch": 16.23, | |
| "learning_rate": 1.5011547100013212e-05, | |
| "loss": 0.3825, | |
| "step": 136500 | |
| }, | |
| { | |
| "epoch": 16.29, | |
| "learning_rate": 1.498512353018893e-05, | |
| "loss": 0.3872, | |
| "step": 137000 | |
| }, | |
| { | |
| "epoch": 16.35, | |
| "learning_rate": 1.4958699960364646e-05, | |
| "loss": 0.3895, | |
| "step": 137500 | |
| }, | |
| { | |
| "epoch": 16.41, | |
| "learning_rate": 1.4932276390540363e-05, | |
| "loss": 0.3884, | |
| "step": 138000 | |
| }, | |
| { | |
| "epoch": 16.47, | |
| "learning_rate": 1.490585282071608e-05, | |
| "loss": 0.3863, | |
| "step": 138500 | |
| }, | |
| { | |
| "epoch": 16.53, | |
| "learning_rate": 1.4879482098031446e-05, | |
| "loss": 0.3914, | |
| "step": 139000 | |
| }, | |
| { | |
| "epoch": 16.59, | |
| "learning_rate": 1.4853111375346811e-05, | |
| "loss": 0.3982, | |
| "step": 139500 | |
| }, | |
| { | |
| "epoch": 16.65, | |
| "learning_rate": 1.4826687805522528e-05, | |
| "loss": 0.3949, | |
| "step": 140000 | |
| }, | |
| { | |
| "epoch": 16.71, | |
| "learning_rate": 1.4800264235698245e-05, | |
| "loss": 0.3924, | |
| "step": 140500 | |
| }, | |
| { | |
| "epoch": 16.77, | |
| "learning_rate": 1.4773893513013609e-05, | |
| "loss": 0.3933, | |
| "step": 141000 | |
| }, | |
| { | |
| "epoch": 16.83, | |
| "learning_rate": 1.4747469943189326e-05, | |
| "loss": 0.394, | |
| "step": 141500 | |
| }, | |
| { | |
| "epoch": 16.88, | |
| "learning_rate": 1.4721046373365043e-05, | |
| "loss": 0.3907, | |
| "step": 142000 | |
| }, | |
| { | |
| "epoch": 16.94, | |
| "learning_rate": 1.4694622803540758e-05, | |
| "loss": 0.3955, | |
| "step": 142500 | |
| }, | |
| { | |
| "epoch": 17.0, | |
| "learning_rate": 1.4668199233716475e-05, | |
| "loss": 0.3903, | |
| "step": 143000 | |
| }, | |
| { | |
| "epoch": 17.06, | |
| "learning_rate": 1.4641775663892192e-05, | |
| "loss": 0.3735, | |
| "step": 143500 | |
| }, | |
| { | |
| "epoch": 17.12, | |
| "learning_rate": 1.4615352094067909e-05, | |
| "loss": 0.379, | |
| "step": 144000 | |
| }, | |
| { | |
| "epoch": 17.18, | |
| "learning_rate": 1.4589034218522923e-05, | |
| "loss": 0.3799, | |
| "step": 144500 | |
| }, | |
| { | |
| "epoch": 17.24, | |
| "learning_rate": 1.456261064869864e-05, | |
| "loss": 0.3772, | |
| "step": 145000 | |
| }, | |
| { | |
| "epoch": 17.3, | |
| "learning_rate": 1.4536187078874357e-05, | |
| "loss": 0.3806, | |
| "step": 145500 | |
| }, | |
| { | |
| "epoch": 17.36, | |
| "learning_rate": 1.4509763509050074e-05, | |
| "loss": 0.3775, | |
| "step": 146000 | |
| }, | |
| { | |
| "epoch": 17.42, | |
| "learning_rate": 1.4483339939225791e-05, | |
| "loss": 0.3799, | |
| "step": 146500 | |
| }, | |
| { | |
| "epoch": 17.48, | |
| "learning_rate": 1.4456916369401508e-05, | |
| "loss": 0.3835, | |
| "step": 147000 | |
| }, | |
| { | |
| "epoch": 17.54, | |
| "learning_rate": 1.4430492799577225e-05, | |
| "loss": 0.3806, | |
| "step": 147500 | |
| }, | |
| { | |
| "epoch": 17.6, | |
| "learning_rate": 1.4404069229752942e-05, | |
| "loss": 0.3825, | |
| "step": 148000 | |
| }, | |
| { | |
| "epoch": 17.66, | |
| "learning_rate": 1.4377645659928659e-05, | |
| "loss": 0.3873, | |
| "step": 148500 | |
| }, | |
| { | |
| "epoch": 17.72, | |
| "learning_rate": 1.4351222090104376e-05, | |
| "loss": 0.3889, | |
| "step": 149000 | |
| }, | |
| { | |
| "epoch": 17.78, | |
| "learning_rate": 1.4324798520280093e-05, | |
| "loss": 0.3855, | |
| "step": 149500 | |
| }, | |
| { | |
| "epoch": 17.84, | |
| "learning_rate": 1.4298374950455806e-05, | |
| "loss": 0.3866, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 17.9, | |
| "learning_rate": 1.4271951380631523e-05, | |
| "loss": 0.385, | |
| "step": 150500 | |
| }, | |
| { | |
| "epoch": 17.95, | |
| "learning_rate": 1.424552781080724e-05, | |
| "loss": 0.391, | |
| "step": 151000 | |
| }, | |
| { | |
| "epoch": 18.01, | |
| "learning_rate": 1.4219104240982957e-05, | |
| "loss": 0.379, | |
| "step": 151500 | |
| }, | |
| { | |
| "epoch": 18.07, | |
| "learning_rate": 1.4192680671158674e-05, | |
| "loss": 0.369, | |
| "step": 152000 | |
| }, | |
| { | |
| "epoch": 18.13, | |
| "learning_rate": 1.416630994847404e-05, | |
| "loss": 0.3725, | |
| "step": 152500 | |
| }, | |
| { | |
| "epoch": 18.19, | |
| "learning_rate": 1.4139886378649757e-05, | |
| "loss": 0.3731, | |
| "step": 153000 | |
| }, | |
| { | |
| "epoch": 18.25, | |
| "learning_rate": 1.4113462808825474e-05, | |
| "loss": 0.3704, | |
| "step": 153500 | |
| }, | |
| { | |
| "epoch": 18.31, | |
| "learning_rate": 1.408703923900119e-05, | |
| "loss": 0.3696, | |
| "step": 154000 | |
| }, | |
| { | |
| "epoch": 18.37, | |
| "learning_rate": 1.4060668516316556e-05, | |
| "loss": 0.3682, | |
| "step": 154500 | |
| }, | |
| { | |
| "epoch": 18.43, | |
| "learning_rate": 1.4034244946492273e-05, | |
| "loss": 0.3772, | |
| "step": 155000 | |
| }, | |
| { | |
| "epoch": 18.49, | |
| "learning_rate": 1.400782137666799e-05, | |
| "loss": 0.3794, | |
| "step": 155500 | |
| }, | |
| { | |
| "epoch": 18.55, | |
| "learning_rate": 1.3981397806843705e-05, | |
| "loss": 0.3799, | |
| "step": 156000 | |
| }, | |
| { | |
| "epoch": 18.61, | |
| "learning_rate": 1.3954974237019422e-05, | |
| "loss": 0.3695, | |
| "step": 156500 | |
| }, | |
| { | |
| "epoch": 18.67, | |
| "learning_rate": 1.392855066719514e-05, | |
| "loss": 0.3768, | |
| "step": 157000 | |
| }, | |
| { | |
| "epoch": 18.73, | |
| "learning_rate": 1.3902179944510505e-05, | |
| "loss": 0.381, | |
| "step": 157500 | |
| }, | |
| { | |
| "epoch": 18.79, | |
| "learning_rate": 1.3875756374686222e-05, | |
| "loss": 0.375, | |
| "step": 158000 | |
| }, | |
| { | |
| "epoch": 18.85, | |
| "learning_rate": 1.3849332804861937e-05, | |
| "loss": 0.377, | |
| "step": 158500 | |
| }, | |
| { | |
| "epoch": 18.91, | |
| "learning_rate": 1.3822909235037654e-05, | |
| "loss": 0.3853, | |
| "step": 159000 | |
| }, | |
| { | |
| "epoch": 18.97, | |
| "learning_rate": 1.3796485665213371e-05, | |
| "loss": 0.3778, | |
| "step": 159500 | |
| }, | |
| { | |
| "epoch": 19.02, | |
| "learning_rate": 1.3770062095389088e-05, | |
| "loss": 0.3688, | |
| "step": 160000 | |
| }, | |
| { | |
| "epoch": 19.08, | |
| "learning_rate": 1.3743691372704453e-05, | |
| "loss": 0.3653, | |
| "step": 160500 | |
| }, | |
| { | |
| "epoch": 19.14, | |
| "learning_rate": 1.371726780288017e-05, | |
| "loss": 0.355, | |
| "step": 161000 | |
| }, | |
| { | |
| "epoch": 19.2, | |
| "learning_rate": 1.3690844233055887e-05, | |
| "loss": 0.3644, | |
| "step": 161500 | |
| }, | |
| { | |
| "epoch": 19.26, | |
| "learning_rate": 1.3664420663231603e-05, | |
| "loss": 0.3673, | |
| "step": 162000 | |
| }, | |
| { | |
| "epoch": 19.32, | |
| "learning_rate": 1.363799709340732e-05, | |
| "loss": 0.3691, | |
| "step": 162500 | |
| }, | |
| { | |
| "epoch": 19.38, | |
| "learning_rate": 1.3611626370722685e-05, | |
| "loss": 0.3544, | |
| "step": 163000 | |
| }, | |
| { | |
| "epoch": 19.44, | |
| "learning_rate": 1.3585202800898402e-05, | |
| "loss": 0.368, | |
| "step": 163500 | |
| }, | |
| { | |
| "epoch": 19.5, | |
| "learning_rate": 1.3558779231074119e-05, | |
| "loss": 0.3691, | |
| "step": 164000 | |
| }, | |
| { | |
| "epoch": 19.56, | |
| "learning_rate": 1.3532355661249836e-05, | |
| "loss": 0.3655, | |
| "step": 164500 | |
| }, | |
| { | |
| "epoch": 19.62, | |
| "learning_rate": 1.3505932091425553e-05, | |
| "loss": 0.3692, | |
| "step": 165000 | |
| }, | |
| { | |
| "epoch": 19.68, | |
| "learning_rate": 1.3479561368740918e-05, | |
| "loss": 0.3782, | |
| "step": 165500 | |
| }, | |
| { | |
| "epoch": 19.74, | |
| "learning_rate": 1.3453137798916635e-05, | |
| "loss": 0.3708, | |
| "step": 166000 | |
| }, | |
| { | |
| "epoch": 19.8, | |
| "learning_rate": 1.3426714229092352e-05, | |
| "loss": 0.3673, | |
| "step": 166500 | |
| }, | |
| { | |
| "epoch": 19.86, | |
| "learning_rate": 1.3400343506407716e-05, | |
| "loss": 0.3678, | |
| "step": 167000 | |
| }, | |
| { | |
| "epoch": 19.92, | |
| "learning_rate": 1.3373919936583433e-05, | |
| "loss": 0.3721, | |
| "step": 167500 | |
| }, | |
| { | |
| "epoch": 19.98, | |
| "learning_rate": 1.334749636675915e-05, | |
| "loss": 0.3741, | |
| "step": 168000 | |
| } | |
| ], | |
| "max_steps": 420500, | |
| "num_train_epochs": 50, | |
| "total_flos": 7.76486352033908e+19, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |