| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 11.0, | |
| "eval_steps": 500, | |
| "global_step": 374517, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.981643022880136e-05, | |
| "loss": 6.6691, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.963286045760273e-05, | |
| "loss": 6.0224, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.944929068640409e-05, | |
| "loss": 5.9452, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.926572091520545e-05, | |
| "loss": 5.8693, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.908215114400682e-05, | |
| "loss": 5.7055, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.8898581372808176e-05, | |
| "loss": 5.3394, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.871501160160954e-05, | |
| "loss": 4.8459, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.853144183041091e-05, | |
| "loss": 4.4414, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.834787205921227e-05, | |
| "loss": 4.1545, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.816430228801363e-05, | |
| "loss": 3.9574, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.798073251681499e-05, | |
| "loss": 3.8106, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.779716274561636e-05, | |
| "loss": 3.6914, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.761359297441772e-05, | |
| "loss": 3.5966, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.743002320321908e-05, | |
| "loss": 3.5248, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.724645343202044e-05, | |
| "loss": 3.4544, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.706288366082181e-05, | |
| "loss": 3.3976, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.687931388962317e-05, | |
| "loss": 3.3415, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.669574411842453e-05, | |
| "loss": 3.2958, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.65121743472259e-05, | |
| "loss": 3.2533, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.6328604576027256e-05, | |
| "loss": 3.2139, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.614503480482862e-05, | |
| "loss": 3.1807, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.596146503362999e-05, | |
| "loss": 3.1433, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.5777895262431346e-05, | |
| "loss": 3.1161, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.559432549123271e-05, | |
| "loss": 3.0855, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.541075572003407e-05, | |
| "loss": 3.0553, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 4.522718594883544e-05, | |
| "loss": 3.0269, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.50436161776368e-05, | |
| "loss": 3.0031, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 4.486004640643816e-05, | |
| "loss": 2.986, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 4.467647663523952e-05, | |
| "loss": 2.9609, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.4492906864040886e-05, | |
| "loss": 2.9432, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 4.430933709284225e-05, | |
| "loss": 2.9238, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 4.412576732164361e-05, | |
| "loss": 2.9081, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 4.394219755044498e-05, | |
| "loss": 2.8925, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 4.3758627779246336e-05, | |
| "loss": 2.8701, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.35750580080477e-05, | |
| "loss": 2.856, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.339148823684907e-05, | |
| "loss": 2.8412, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.3207918465650426e-05, | |
| "loss": 2.8267, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 4.3024348694451785e-05, | |
| "loss": 2.8136, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.284077892325315e-05, | |
| "loss": 2.8015, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.2657209152054517e-05, | |
| "loss": 2.7876, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 4.2473639380855875e-05, | |
| "loss": 2.7741, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 4.229006960965724e-05, | |
| "loss": 2.7578, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 4.21064998384586e-05, | |
| "loss": 2.7494, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 4.1922930067259966e-05, | |
| "loss": 2.7375, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 4.173936029606133e-05, | |
| "loss": 2.724, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.155579052486269e-05, | |
| "loss": 2.718, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.1372220753664056e-05, | |
| "loss": 2.7104, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.1188650982465415e-05, | |
| "loss": 2.6971, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 4.100508121126678e-05, | |
| "loss": 2.6908, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 4.082151144006815e-05, | |
| "loss": 2.6824, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 4.0637941668869506e-05, | |
| "loss": 2.6755, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 4.0454371897670865e-05, | |
| "loss": 2.6597, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 4.027080212647223e-05, | |
| "loss": 2.6489, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 4.0087232355273596e-05, | |
| "loss": 2.6471, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 3.9903662584074955e-05, | |
| "loss": 2.6413, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.972009281287632e-05, | |
| "loss": 2.6255, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.953652304167768e-05, | |
| "loss": 2.6253, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 3.9352953270479045e-05, | |
| "loss": 2.6137, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 3.916938349928041e-05, | |
| "loss": 2.6076, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 3.898581372808177e-05, | |
| "loss": 2.5993, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 3.8802243956883136e-05, | |
| "loss": 2.5967, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 3.8618674185684495e-05, | |
| "loss": 2.5867, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 3.843510441448586e-05, | |
| "loss": 2.5784, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 3.8251534643287226e-05, | |
| "loss": 2.5768, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.8067964872088585e-05, | |
| "loss": 2.5698, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 3.7884395100889944e-05, | |
| "loss": 2.5589, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 3.770082532969131e-05, | |
| "loss": 2.5576, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.7517255558492676e-05, | |
| "loss": 2.5536, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.4933474314917615e-05, | |
| "loss": 2.5485, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 4.486004640643816e-05, | |
| "loss": 2.538, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 4.47866184979587e-05, | |
| "loss": 2.5377, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 4.4713190589479254e-05, | |
| "loss": 2.5319, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 4.4639762680999794e-05, | |
| "loss": 2.5271, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 4.456633477252034e-05, | |
| "loss": 2.5166, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 4.4492906864040886e-05, | |
| "loss": 2.5167, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 4.441947895556143e-05, | |
| "loss": 2.5056, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 4.434605104708198e-05, | |
| "loss": 2.5006, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 4.427262313860252e-05, | |
| "loss": 2.4967, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 4.419919523012307e-05, | |
| "loss": 2.4878, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 4.412576732164361e-05, | |
| "loss": 2.4895, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 4.405233941316416e-05, | |
| "loss": 2.4795, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 4.39789115046847e-05, | |
| "loss": 2.4732, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 4.390548359620525e-05, | |
| "loss": 2.4701, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 4.383205568772579e-05, | |
| "loss": 2.4666, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 4.3758627779246336e-05, | |
| "loss": 2.46, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 4.368519987076688e-05, | |
| "loss": 2.4594, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 4.361177196228743e-05, | |
| "loss": 2.4494, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 4.3538344053807975e-05, | |
| "loss": 2.4459, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 4.3464916145328514e-05, | |
| "loss": 2.4392, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 4.339148823684907e-05, | |
| "loss": 2.4388, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 4.331806032836961e-05, | |
| "loss": 2.4341, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 4.324463241989015e-05, | |
| "loss": 2.4285, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 4.31712045114107e-05, | |
| "loss": 2.4294, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 4.3097776602931246e-05, | |
| "loss": 2.4258, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 4.3024348694451785e-05, | |
| "loss": 2.4172, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 4.295092078597233e-05, | |
| "loss": 2.414, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 4.287749287749288e-05, | |
| "loss": 2.4119, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 4.2804064969013424e-05, | |
| "loss": 2.4052, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 4.273063706053397e-05, | |
| "loss": 2.4068, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 4.2657209152054517e-05, | |
| "loss": 2.3989, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 4.258378124357506e-05, | |
| "loss": 2.395, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 4.25103533350956e-05, | |
| "loss": 2.3924, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 4.243692542661615e-05, | |
| "loss": 2.3903, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 4.2363497518136695e-05, | |
| "loss": 2.3871, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 4.229006960965724e-05, | |
| "loss": 2.3795, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 4.221664170117778e-05, | |
| "loss": 2.3813, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 4.2143213792698334e-05, | |
| "loss": 2.3751, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 4.206978588421887e-05, | |
| "loss": 2.3729, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 4.199635797573942e-05, | |
| "loss": 2.3667, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 4.1922930067259966e-05, | |
| "loss": 2.3627, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 4.184950215878051e-05, | |
| "loss": 2.3608, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 4.177607425030106e-05, | |
| "loss": 2.3626, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 4.17026463418216e-05, | |
| "loss": 2.3538, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 4.162921843334215e-05, | |
| "loss": 2.3539, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 4.155579052486269e-05, | |
| "loss": 2.3496, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 4.148236261638324e-05, | |
| "loss": 2.3445, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 4.140893470790378e-05, | |
| "loss": 2.3473, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 4.133550679942433e-05, | |
| "loss": 2.342, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 4.126207889094487e-05, | |
| "loss": 2.3426, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 4.1188650982465415e-05, | |
| "loss": 2.3373, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 4.111522307398596e-05, | |
| "loss": 2.3299, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 4.104179516550651e-05, | |
| "loss": 2.3306, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 4.0968367257027054e-05, | |
| "loss": 2.3281, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 4.08949393485476e-05, | |
| "loss": 2.326, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 4.082151144006815e-05, | |
| "loss": 2.3243, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 4.0748083531588686e-05, | |
| "loss": 2.3241, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 4.067465562310923e-05, | |
| "loss": 2.3178, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 4.060122771462978e-05, | |
| "loss": 2.3177, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 4.0527799806150325e-05, | |
| "loss": 2.3128, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 4.0454371897670865e-05, | |
| "loss": 2.3109, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 4.038094398919142e-05, | |
| "loss": 2.3021, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 4.030751608071196e-05, | |
| "loss": 2.305, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 4.0234088172232504e-05, | |
| "loss": 2.3041, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 4.016066026375305e-05, | |
| "loss": 2.3022, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 4.0087232355273596e-05, | |
| "loss": 2.3009, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 4.001380444679414e-05, | |
| "loss": 2.2957, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 3.994037653831468e-05, | |
| "loss": 2.2897, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 3.9866948629835235e-05, | |
| "loss": 2.2909, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 3.9793520721355775e-05, | |
| "loss": 2.2868, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 3.972009281287632e-05, | |
| "loss": 2.2869, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 3.964666490439686e-05, | |
| "loss": 2.2871, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 3.9573236995917413e-05, | |
| "loss": 2.2812, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 3.949980908743795e-05, | |
| "loss": 2.2839, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 3.94263811789585e-05, | |
| "loss": 2.278, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 3.9352953270479045e-05, | |
| "loss": 2.2762, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 3.927952536199959e-05, | |
| "loss": 2.2747, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 3.920609745352014e-05, | |
| "loss": 2.2709, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 3.913266954504068e-05, | |
| "loss": 2.2703, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 3.905924163656123e-05, | |
| "loss": 2.2653, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 3.898581372808177e-05, | |
| "loss": 2.2686, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 3.8912385819602316e-05, | |
| "loss": 2.2631, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 3.883895791112286e-05, | |
| "loss": 2.2648, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 3.876553000264341e-05, | |
| "loss": 2.2639, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 3.869210209416395e-05, | |
| "loss": 2.2578, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 3.8618674185684495e-05, | |
| "loss": 2.2586, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 3.854524627720504e-05, | |
| "loss": 2.2527, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 3.847181836872559e-05, | |
| "loss": 2.2518, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 3.8398390460246134e-05, | |
| "loss": 2.2538, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 3.832496255176668e-05, | |
| "loss": 2.2495, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 3.8251534643287226e-05, | |
| "loss": 2.2501, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 3.8178106734807766e-05, | |
| "loss": 2.2466, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 3.810467882632831e-05, | |
| "loss": 2.249, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 3.803125091784886e-05, | |
| "loss": 2.2462, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 3.7957823009369405e-05, | |
| "loss": 2.2452, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 3.7884395100889944e-05, | |
| "loss": 2.2402, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 3.78109671924105e-05, | |
| "loss": 2.2379, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 3.773753928393104e-05, | |
| "loss": 2.2356, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 3.766411137545158e-05, | |
| "loss": 2.2364, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 3.759068346697213e-05, | |
| "loss": 2.2305, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 3.7517255558492676e-05, | |
| "loss": 2.2286, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 3.744382765001322e-05, | |
| "loss": 2.2339, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 3.737039974153376e-05, | |
| "loss": 2.2304, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 3.7296971833054315e-05, | |
| "loss": 2.2274, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 3.7223543924574854e-05, | |
| "loss": 2.2248, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 3.71501160160954e-05, | |
| "loss": 2.2275, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 3.707668810761594e-05, | |
| "loss": 2.2245, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 3.700326019913649e-05, | |
| "loss": 2.2249, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 3.692983229065703e-05, | |
| "loss": 2.2238, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 3.685640438217758e-05, | |
| "loss": 2.2197, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 3.6782976473698125e-05, | |
| "loss": 2.2214, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 3.670954856521867e-05, | |
| "loss": 2.2199, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 3.663612065673922e-05, | |
| "loss": 2.2156, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 3.656269274825976e-05, | |
| "loss": 2.2159, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 3.648926483978031e-05, | |
| "loss": 2.217, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 3.641583693130085e-05, | |
| "loss": 2.2145, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 3.6342409022821396e-05, | |
| "loss": 2.2096, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 3.626898111434194e-05, | |
| "loss": 2.2078, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 3.619555320586249e-05, | |
| "loss": 2.2061, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 3.612212529738303e-05, | |
| "loss": 2.203, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 3.6048697388903574e-05, | |
| "loss": 2.2053, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 3.597526948042412e-05, | |
| "loss": 2.2017, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 3.590184157194467e-05, | |
| "loss": 2.1999, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 3.582841366346521e-05, | |
| "loss": 2.2005, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 3.575498575498576e-05, | |
| "loss": 2.1985, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 3.5681557846506306e-05, | |
| "loss": 2.1972, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 3.5608129938026845e-05, | |
| "loss": 2.1985, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 3.553470202954739e-05, | |
| "loss": 2.1961, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 3.546127412106794e-05, | |
| "loss": 2.1909, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 3.5387846212588484e-05, | |
| "loss": 2.1899, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 3.5314418304109024e-05, | |
| "loss": 2.1927, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 3.524099039562958e-05, | |
| "loss": 2.1916, | |
| "step": 100500 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 3.5167562487150116e-05, | |
| "loss": 2.1902, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 3.509413457867066e-05, | |
| "loss": 2.19, | |
| "step": 101500 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 3.502070667019121e-05, | |
| "loss": 2.1792, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 3.4947278761711755e-05, | |
| "loss": 2.1871, | |
| "step": 102500 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 3.48738508532323e-05, | |
| "loss": 2.183, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 3.480042294475284e-05, | |
| "loss": 2.1846, | |
| "step": 103500 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 3.4726995036273394e-05, | |
| "loss": 2.1777, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 3.4653567127793934e-05, | |
| "loss": 2.1802, | |
| "step": 104500 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 3.458013921931448e-05, | |
| "loss": 2.1812, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 3.450671131083502e-05, | |
| "loss": 2.1771, | |
| "step": 105500 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 3.443328340235557e-05, | |
| "loss": 2.1741, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 3.435985549387611e-05, | |
| "loss": 2.1775, | |
| "step": 106500 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 3.428642758539666e-05, | |
| "loss": 2.1723, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 3.4212999676917205e-05, | |
| "loss": 2.1743, | |
| "step": 107500 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 3.413957176843775e-05, | |
| "loss": 2.1738, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 3.406614385995829e-05, | |
| "loss": 2.1689, | |
| "step": 108500 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 3.399271595147884e-05, | |
| "loss": 2.169, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 3.391928804299938e-05, | |
| "loss": 2.1672, | |
| "step": 109500 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 3.384586013451993e-05, | |
| "loss": 2.1705, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 3.3772432226040476e-05, | |
| "loss": 2.1644, | |
| "step": 110500 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 3.369900431756102e-05, | |
| "loss": 2.1651, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 3.362557640908157e-05, | |
| "loss": 2.165, | |
| "step": 111500 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 3.355214850060211e-05, | |
| "loss": 2.1651, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 3.3478720592122654e-05, | |
| "loss": 2.161, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 3.34052926836432e-05, | |
| "loss": 2.1635, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 3.333186477516375e-05, | |
| "loss": 2.1632, | |
| "step": 113500 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 3.3258436866684286e-05, | |
| "loss": 2.1634, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 3.318500895820484e-05, | |
| "loss": 2.158, | |
| "step": 114500 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 3.311158104972538e-05, | |
| "loss": 2.1601, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 3.3038153141245925e-05, | |
| "loss": 2.1577, | |
| "step": 115500 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 3.296472523276647e-05, | |
| "loss": 2.1578, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 3.289129732428702e-05, | |
| "loss": 2.1565, | |
| "step": 116500 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 3.2817869415807564e-05, | |
| "loss": 2.1551, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 3.2744441507328103e-05, | |
| "loss": 2.1556, | |
| "step": 117500 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 3.2671013598848657e-05, | |
| "loss": 2.1545, | |
| "step": 118000 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 3.2597585690369196e-05, | |
| "loss": 2.1515, | |
| "step": 118500 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 3.252415778188974e-05, | |
| "loss": 2.1498, | |
| "step": 119000 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 3.245072987341028e-05, | |
| "loss": 2.1519, | |
| "step": 119500 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 3.2377301964930835e-05, | |
| "loss": 2.1493, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 3.2303874056451374e-05, | |
| "loss": 2.149, | |
| "step": 120500 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 3.223044614797192e-05, | |
| "loss": 2.1475, | |
| "step": 121000 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 3.215701823949247e-05, | |
| "loss": 2.148, | |
| "step": 121500 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 3.208359033101301e-05, | |
| "loss": 2.1456, | |
| "step": 122000 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 3.201016242253356e-05, | |
| "loss": 2.1439, | |
| "step": 122500 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 3.19367345140541e-05, | |
| "loss": 2.1442, | |
| "step": 123000 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 3.186330660557465e-05, | |
| "loss": 2.1449, | |
| "step": 123500 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 3.178987869709519e-05, | |
| "loss": 2.1429, | |
| "step": 124000 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 3.171645078861574e-05, | |
| "loss": 2.1402, | |
| "step": 124500 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 3.1643022880136284e-05, | |
| "loss": 2.1378, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 3.156959497165683e-05, | |
| "loss": 2.1401, | |
| "step": 125500 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 3.149616706317737e-05, | |
| "loss": 2.1387, | |
| "step": 126000 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 3.1422739154697916e-05, | |
| "loss": 2.1334, | |
| "step": 126500 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 3.134931124621846e-05, | |
| "loss": 2.139, | |
| "step": 127000 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 3.127588333773901e-05, | |
| "loss": 2.1368, | |
| "step": 127500 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 3.1202455429259555e-05, | |
| "loss": 2.1346, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 3.11290275207801e-05, | |
| "loss": 2.135, | |
| "step": 128500 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 3.105559961230065e-05, | |
| "loss": 2.1325, | |
| "step": 129000 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 3.098217170382119e-05, | |
| "loss": 2.1329, | |
| "step": 129500 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 3.0908743795341734e-05, | |
| "loss": 2.1353, | |
| "step": 130000 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 3.083531588686228e-05, | |
| "loss": 2.127, | |
| "step": 130500 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 3.0761887978382826e-05, | |
| "loss": 2.134, | |
| "step": 131000 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 3.0688460069903366e-05, | |
| "loss": 2.1304, | |
| "step": 131500 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 3.061503216142392e-05, | |
| "loss": 2.1294, | |
| "step": 132000 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 3.054160425294446e-05, | |
| "loss": 2.1291, | |
| "step": 132500 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 3.0468176344465005e-05, | |
| "loss": 2.1284, | |
| "step": 133000 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 3.0394748435985548e-05, | |
| "loss": 2.128, | |
| "step": 133500 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 3.0321320527506097e-05, | |
| "loss": 2.1242, | |
| "step": 134000 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 3.024789261902664e-05, | |
| "loss": 2.1246, | |
| "step": 134500 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 3.0174464710547183e-05, | |
| "loss": 2.1272, | |
| "step": 135000 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 3.0101036802067733e-05, | |
| "loss": 2.1204, | |
| "step": 135500 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 3.0027608893588276e-05, | |
| "loss": 2.1224, | |
| "step": 136000 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 2.9954180985108822e-05, | |
| "loss": 2.1176, | |
| "step": 136500 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 2.9880753076629365e-05, | |
| "loss": 2.1229, | |
| "step": 137000 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 2.9807325168149915e-05, | |
| "loss": 2.1228, | |
| "step": 137500 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 2.9733897259670457e-05, | |
| "loss": 2.1189, | |
| "step": 138000 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "learning_rate": 2.9660469351191e-05, | |
| "loss": 2.1208, | |
| "step": 138500 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 2.958704144271155e-05, | |
| "loss": 2.1152, | |
| "step": 139000 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 2.9513613534232093e-05, | |
| "loss": 2.1164, | |
| "step": 139500 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "learning_rate": 2.9440185625752636e-05, | |
| "loss": 2.1179, | |
| "step": 140000 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 2.936675771727318e-05, | |
| "loss": 2.1168, | |
| "step": 140500 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 2.929332980879373e-05, | |
| "loss": 2.1188, | |
| "step": 141000 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 2.921990190031427e-05, | |
| "loss": 2.1162, | |
| "step": 141500 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 2.9146473991834818e-05, | |
| "loss": 2.1141, | |
| "step": 142000 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 2.9073046083355364e-05, | |
| "loss": 2.1142, | |
| "step": 142500 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "learning_rate": 2.899961817487591e-05, | |
| "loss": 2.1142, | |
| "step": 143000 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 2.8926190266396453e-05, | |
| "loss": 2.1116, | |
| "step": 143500 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 2.8852762357916996e-05, | |
| "loss": 2.1097, | |
| "step": 144000 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 2.8779334449437546e-05, | |
| "loss": 2.1107, | |
| "step": 144500 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 2.870590654095809e-05, | |
| "loss": 2.1115, | |
| "step": 145000 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "learning_rate": 2.863247863247863e-05, | |
| "loss": 2.1115, | |
| "step": 145500 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "learning_rate": 2.855905072399918e-05, | |
| "loss": 2.1084, | |
| "step": 146000 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 2.8485622815519724e-05, | |
| "loss": 2.109, | |
| "step": 146500 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 2.8412194907040267e-05, | |
| "loss": 2.1096, | |
| "step": 147000 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "learning_rate": 2.8338766998560813e-05, | |
| "loss": 2.1089, | |
| "step": 147500 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "learning_rate": 2.826533909008136e-05, | |
| "loss": 2.1094, | |
| "step": 148000 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "learning_rate": 2.8191911181601906e-05, | |
| "loss": 2.106, | |
| "step": 148500 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 2.811848327312245e-05, | |
| "loss": 2.107, | |
| "step": 149000 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "learning_rate": 2.8045055364643e-05, | |
| "loss": 2.1048, | |
| "step": 149500 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 2.797162745616354e-05, | |
| "loss": 2.1028, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 4.42, | |
| "learning_rate": 2.7898199547684084e-05, | |
| "loss": 2.1013, | |
| "step": 150500 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 2.7824771639204627e-05, | |
| "loss": 2.1015, | |
| "step": 151000 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "learning_rate": 2.7751343730725177e-05, | |
| "loss": 2.105, | |
| "step": 151500 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "learning_rate": 2.767791582224572e-05, | |
| "loss": 2.1019, | |
| "step": 152000 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "learning_rate": 2.7604487913766263e-05, | |
| "loss": 2.102, | |
| "step": 152500 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 2.7531060005286812e-05, | |
| "loss": 2.0979, | |
| "step": 153000 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "learning_rate": 2.7457632096807355e-05, | |
| "loss": 2.0946, | |
| "step": 153500 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 2.73842041883279e-05, | |
| "loss": 2.0981, | |
| "step": 154000 | |
| }, | |
| { | |
| "epoch": 4.54, | |
| "learning_rate": 2.7310776279848444e-05, | |
| "loss": 2.0971, | |
| "step": 154500 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "learning_rate": 2.7237348371368994e-05, | |
| "loss": 2.0979, | |
| "step": 155000 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "learning_rate": 2.7163920462889537e-05, | |
| "loss": 2.0975, | |
| "step": 155500 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 2.709049255441008e-05, | |
| "loss": 2.0993, | |
| "step": 156000 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "learning_rate": 2.701706464593063e-05, | |
| "loss": 2.0952, | |
| "step": 156500 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "learning_rate": 2.6943636737451173e-05, | |
| "loss": 2.0926, | |
| "step": 157000 | |
| }, | |
| { | |
| "epoch": 4.63, | |
| "learning_rate": 2.6870208828971715e-05, | |
| "loss": 2.095, | |
| "step": 157500 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 2.679678092049226e-05, | |
| "loss": 2.0936, | |
| "step": 158000 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "learning_rate": 2.6723353012012808e-05, | |
| "loss": 2.0977, | |
| "step": 158500 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "learning_rate": 2.664992510353335e-05, | |
| "loss": 2.0946, | |
| "step": 159000 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 2.6576497195053894e-05, | |
| "loss": 2.0962, | |
| "step": 159500 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 2.6503069286574444e-05, | |
| "loss": 2.0941, | |
| "step": 160000 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 2.6429641378094986e-05, | |
| "loss": 2.0908, | |
| "step": 160500 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "learning_rate": 2.6356213469615533e-05, | |
| "loss": 2.095, | |
| "step": 161000 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 2.6282785561136076e-05, | |
| "loss": 2.091, | |
| "step": 161500 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "learning_rate": 2.6209357652656625e-05, | |
| "loss": 2.0873, | |
| "step": 162000 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 2.6135929744177168e-05, | |
| "loss": 2.0896, | |
| "step": 162500 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 2.606250183569771e-05, | |
| "loss": 2.0887, | |
| "step": 163000 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "learning_rate": 2.598907392721826e-05, | |
| "loss": 2.0914, | |
| "step": 163500 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "learning_rate": 2.5915646018738804e-05, | |
| "loss": 2.09, | |
| "step": 164000 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 2.5842218110259347e-05, | |
| "loss": 2.0878, | |
| "step": 164500 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 2.576879020177989e-05, | |
| "loss": 2.0878, | |
| "step": 165000 | |
| }, | |
| { | |
| "epoch": 4.86, | |
| "learning_rate": 2.569536229330044e-05, | |
| "loss": 2.0865, | |
| "step": 165500 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 2.5621934384820982e-05, | |
| "loss": 2.088, | |
| "step": 166000 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 2.554850647634153e-05, | |
| "loss": 2.0856, | |
| "step": 166500 | |
| }, | |
| { | |
| "epoch": 4.9, | |
| "learning_rate": 2.5475078567862075e-05, | |
| "loss": 2.0872, | |
| "step": 167000 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "learning_rate": 2.540165065938262e-05, | |
| "loss": 2.0864, | |
| "step": 167500 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 2.5328222750903164e-05, | |
| "loss": 2.0822, | |
| "step": 168000 | |
| }, | |
| { | |
| "epoch": 4.95, | |
| "learning_rate": 2.5254794842423707e-05, | |
| "loss": 2.0795, | |
| "step": 168500 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "learning_rate": 2.5181366933944256e-05, | |
| "loss": 2.0851, | |
| "step": 169000 | |
| }, | |
| { | |
| "epoch": 4.98, | |
| "learning_rate": 2.51079390254648e-05, | |
| "loss": 2.081, | |
| "step": 169500 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 2.5034511116985342e-05, | |
| "loss": 2.0813, | |
| "step": 170000 | |
| }, | |
| { | |
| "epoch": 5.01, | |
| "learning_rate": 2.496108320850589e-05, | |
| "loss": 2.0794, | |
| "step": 170500 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "learning_rate": 2.4887655300026435e-05, | |
| "loss": 2.0816, | |
| "step": 171000 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "learning_rate": 2.4814227391546978e-05, | |
| "loss": 2.0745, | |
| "step": 171500 | |
| }, | |
| { | |
| "epoch": 5.05, | |
| "learning_rate": 2.4740799483067524e-05, | |
| "loss": 2.0825, | |
| "step": 172000 | |
| }, | |
| { | |
| "epoch": 5.07, | |
| "learning_rate": 2.466737157458807e-05, | |
| "loss": 2.0782, | |
| "step": 172500 | |
| }, | |
| { | |
| "epoch": 5.08, | |
| "learning_rate": 2.4593943666108617e-05, | |
| "loss": 2.0795, | |
| "step": 173000 | |
| }, | |
| { | |
| "epoch": 5.1, | |
| "learning_rate": 2.4520515757629163e-05, | |
| "loss": 2.0787, | |
| "step": 173500 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 2.4447087849149706e-05, | |
| "loss": 2.0749, | |
| "step": 174000 | |
| }, | |
| { | |
| "epoch": 5.13, | |
| "learning_rate": 2.4373659940670252e-05, | |
| "loss": 2.0762, | |
| "step": 174500 | |
| }, | |
| { | |
| "epoch": 5.14, | |
| "learning_rate": 2.4300232032190795e-05, | |
| "loss": 2.0763, | |
| "step": 175000 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "learning_rate": 2.422680412371134e-05, | |
| "loss": 2.0729, | |
| "step": 175500 | |
| }, | |
| { | |
| "epoch": 5.17, | |
| "learning_rate": 2.4153376215231888e-05, | |
| "loss": 2.0748, | |
| "step": 176000 | |
| }, | |
| { | |
| "epoch": 5.18, | |
| "learning_rate": 2.407994830675243e-05, | |
| "loss": 2.0762, | |
| "step": 176500 | |
| }, | |
| { | |
| "epoch": 5.2, | |
| "learning_rate": 2.4006520398272977e-05, | |
| "loss": 2.0761, | |
| "step": 177000 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "learning_rate": 2.393309248979352e-05, | |
| "loss": 2.0739, | |
| "step": 177500 | |
| }, | |
| { | |
| "epoch": 5.23, | |
| "learning_rate": 2.3859664581314066e-05, | |
| "loss": 2.073, | |
| "step": 178000 | |
| }, | |
| { | |
| "epoch": 5.24, | |
| "learning_rate": 2.3786236672834612e-05, | |
| "loss": 2.0691, | |
| "step": 178500 | |
| }, | |
| { | |
| "epoch": 5.26, | |
| "learning_rate": 2.371280876435516e-05, | |
| "loss": 2.0735, | |
| "step": 179000 | |
| }, | |
| { | |
| "epoch": 5.27, | |
| "learning_rate": 2.3639380855875705e-05, | |
| "loss": 2.0725, | |
| "step": 179500 | |
| }, | |
| { | |
| "epoch": 5.29, | |
| "learning_rate": 2.3565952947396248e-05, | |
| "loss": 2.0716, | |
| "step": 180000 | |
| }, | |
| { | |
| "epoch": 5.3, | |
| "learning_rate": 2.3492525038916794e-05, | |
| "loss": 2.0694, | |
| "step": 180500 | |
| }, | |
| { | |
| "epoch": 5.32, | |
| "learning_rate": 2.3419097130437337e-05, | |
| "loss": 2.0728, | |
| "step": 181000 | |
| }, | |
| { | |
| "epoch": 5.33, | |
| "learning_rate": 2.3345669221957883e-05, | |
| "loss": 2.0681, | |
| "step": 181500 | |
| }, | |
| { | |
| "epoch": 5.35, | |
| "learning_rate": 2.3272241313478426e-05, | |
| "loss": 2.0706, | |
| "step": 182000 | |
| }, | |
| { | |
| "epoch": 5.36, | |
| "learning_rate": 2.3198813404998972e-05, | |
| "loss": 2.069, | |
| "step": 182500 | |
| }, | |
| { | |
| "epoch": 5.37, | |
| "learning_rate": 2.312538549651952e-05, | |
| "loss": 2.0667, | |
| "step": 183000 | |
| }, | |
| { | |
| "epoch": 5.39, | |
| "learning_rate": 2.305195758804006e-05, | |
| "loss": 2.0717, | |
| "step": 183500 | |
| }, | |
| { | |
| "epoch": 5.4, | |
| "learning_rate": 2.2978529679560608e-05, | |
| "loss": 2.0686, | |
| "step": 184000 | |
| }, | |
| { | |
| "epoch": 5.42, | |
| "learning_rate": 2.2905101771081154e-05, | |
| "loss": 2.0671, | |
| "step": 184500 | |
| }, | |
| { | |
| "epoch": 5.43, | |
| "learning_rate": 2.28316738626017e-05, | |
| "loss": 2.0689, | |
| "step": 185000 | |
| }, | |
| { | |
| "epoch": 5.45, | |
| "learning_rate": 2.2758245954122243e-05, | |
| "loss": 2.0687, | |
| "step": 185500 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "learning_rate": 2.268481804564279e-05, | |
| "loss": 2.066, | |
| "step": 186000 | |
| }, | |
| { | |
| "epoch": 5.48, | |
| "learning_rate": 2.2611390137163336e-05, | |
| "loss": 2.0663, | |
| "step": 186500 | |
| }, | |
| { | |
| "epoch": 5.49, | |
| "learning_rate": 2.253796222868388e-05, | |
| "loss": 2.0619, | |
| "step": 187000 | |
| }, | |
| { | |
| "epoch": 5.51, | |
| "learning_rate": 2.2464534320204425e-05, | |
| "loss": 2.0682, | |
| "step": 187500 | |
| }, | |
| { | |
| "epoch": 5.52, | |
| "learning_rate": 2.2391106411724968e-05, | |
| "loss": 2.0619, | |
| "step": 188000 | |
| }, | |
| { | |
| "epoch": 5.54, | |
| "learning_rate": 2.2317678503245514e-05, | |
| "loss": 2.066, | |
| "step": 188500 | |
| }, | |
| { | |
| "epoch": 5.55, | |
| "learning_rate": 2.2244250594766057e-05, | |
| "loss": 2.0625, | |
| "step": 189000 | |
| }, | |
| { | |
| "epoch": 5.57, | |
| "learning_rate": 2.2170822686286604e-05, | |
| "loss": 2.0639, | |
| "step": 189500 | |
| }, | |
| { | |
| "epoch": 5.58, | |
| "learning_rate": 2.209739477780715e-05, | |
| "loss": 2.0648, | |
| "step": 190000 | |
| }, | |
| { | |
| "epoch": 5.6, | |
| "learning_rate": 2.2023966869327696e-05, | |
| "loss": 2.0607, | |
| "step": 190500 | |
| }, | |
| { | |
| "epoch": 5.61, | |
| "learning_rate": 2.1950538960848243e-05, | |
| "loss": 2.0634, | |
| "step": 191000 | |
| }, | |
| { | |
| "epoch": 5.62, | |
| "learning_rate": 2.1877111052368785e-05, | |
| "loss": 2.0638, | |
| "step": 191500 | |
| }, | |
| { | |
| "epoch": 5.64, | |
| "learning_rate": 2.1803683143889332e-05, | |
| "loss": 2.0605, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 5.65, | |
| "learning_rate": 2.1730255235409875e-05, | |
| "loss": 2.0614, | |
| "step": 192500 | |
| }, | |
| { | |
| "epoch": 5.67, | |
| "learning_rate": 2.165682732693042e-05, | |
| "loss": 2.06, | |
| "step": 193000 | |
| }, | |
| { | |
| "epoch": 5.68, | |
| "learning_rate": 2.1583399418450967e-05, | |
| "loss": 2.0619, | |
| "step": 193500 | |
| }, | |
| { | |
| "epoch": 5.7, | |
| "learning_rate": 2.150997150997151e-05, | |
| "loss": 2.0581, | |
| "step": 194000 | |
| }, | |
| { | |
| "epoch": 5.71, | |
| "learning_rate": 2.1436543601492056e-05, | |
| "loss": 2.0587, | |
| "step": 194500 | |
| }, | |
| { | |
| "epoch": 5.73, | |
| "learning_rate": 2.13631156930126e-05, | |
| "loss": 2.0588, | |
| "step": 195000 | |
| }, | |
| { | |
| "epoch": 5.74, | |
| "learning_rate": 2.1289687784533146e-05, | |
| "loss": 2.0554, | |
| "step": 195500 | |
| }, | |
| { | |
| "epoch": 5.76, | |
| "learning_rate": 2.121625987605369e-05, | |
| "loss": 2.0568, | |
| "step": 196000 | |
| }, | |
| { | |
| "epoch": 5.77, | |
| "learning_rate": 2.1142831967574235e-05, | |
| "loss": 2.0614, | |
| "step": 196500 | |
| }, | |
| { | |
| "epoch": 5.79, | |
| "learning_rate": 2.106940405909478e-05, | |
| "loss": 2.0553, | |
| "step": 197000 | |
| }, | |
| { | |
| "epoch": 5.8, | |
| "learning_rate": 2.0995976150615327e-05, | |
| "loss": 2.0561, | |
| "step": 197500 | |
| }, | |
| { | |
| "epoch": 5.82, | |
| "learning_rate": 2.0922548242135874e-05, | |
| "loss": 2.0576, | |
| "step": 198000 | |
| }, | |
| { | |
| "epoch": 5.83, | |
| "learning_rate": 2.0849120333656417e-05, | |
| "loss": 2.0562, | |
| "step": 198500 | |
| }, | |
| { | |
| "epoch": 5.84, | |
| "learning_rate": 2.0775692425176963e-05, | |
| "loss": 2.0573, | |
| "step": 199000 | |
| }, | |
| { | |
| "epoch": 5.86, | |
| "learning_rate": 2.0702264516697506e-05, | |
| "loss": 2.0578, | |
| "step": 199500 | |
| }, | |
| { | |
| "epoch": 5.87, | |
| "learning_rate": 2.0628836608218052e-05, | |
| "loss": 2.0563, | |
| "step": 200000 | |
| }, | |
| { | |
| "epoch": 5.89, | |
| "learning_rate": 2.05554086997386e-05, | |
| "loss": 2.056, | |
| "step": 200500 | |
| }, | |
| { | |
| "epoch": 5.9, | |
| "learning_rate": 2.048198079125914e-05, | |
| "loss": 2.0541, | |
| "step": 201000 | |
| }, | |
| { | |
| "epoch": 5.92, | |
| "learning_rate": 2.0408552882779688e-05, | |
| "loss": 2.0552, | |
| "step": 201500 | |
| }, | |
| { | |
| "epoch": 5.93, | |
| "learning_rate": 2.033512497430023e-05, | |
| "loss": 2.0548, | |
| "step": 202000 | |
| }, | |
| { | |
| "epoch": 5.95, | |
| "learning_rate": 2.0261697065820777e-05, | |
| "loss": 2.0565, | |
| "step": 202500 | |
| }, | |
| { | |
| "epoch": 5.96, | |
| "learning_rate": 2.0188269157341323e-05, | |
| "loss": 2.0527, | |
| "step": 203000 | |
| }, | |
| { | |
| "epoch": 5.98, | |
| "learning_rate": 2.011484124886187e-05, | |
| "loss": 2.0531, | |
| "step": 203500 | |
| }, | |
| { | |
| "epoch": 5.99, | |
| "learning_rate": 2.0041413340382416e-05, | |
| "loss": 2.0519, | |
| "step": 204000 | |
| }, | |
| { | |
| "epoch": 6.01, | |
| "learning_rate": 1.996798543190296e-05, | |
| "loss": 2.0501, | |
| "step": 204500 | |
| }, | |
| { | |
| "epoch": 6.02, | |
| "learning_rate": 1.9894557523423505e-05, | |
| "loss": 2.0527, | |
| "step": 205000 | |
| }, | |
| { | |
| "epoch": 6.04, | |
| "learning_rate": 1.9821129614944048e-05, | |
| "loss": 2.0484, | |
| "step": 205500 | |
| }, | |
| { | |
| "epoch": 6.05, | |
| "learning_rate": 1.9747701706464594e-05, | |
| "loss": 2.0485, | |
| "step": 206000 | |
| }, | |
| { | |
| "epoch": 6.07, | |
| "learning_rate": 1.967427379798514e-05, | |
| "loss": 2.0467, | |
| "step": 206500 | |
| }, | |
| { | |
| "epoch": 6.08, | |
| "learning_rate": 1.9600845889505683e-05, | |
| "loss": 2.0513, | |
| "step": 207000 | |
| }, | |
| { | |
| "epoch": 6.09, | |
| "learning_rate": 1.952741798102623e-05, | |
| "loss": 2.0488, | |
| "step": 207500 | |
| }, | |
| { | |
| "epoch": 6.11, | |
| "learning_rate": 1.9453990072546772e-05, | |
| "loss": 2.0513, | |
| "step": 208000 | |
| }, | |
| { | |
| "epoch": 6.12, | |
| "learning_rate": 1.938056216406732e-05, | |
| "loss": 2.0463, | |
| "step": 208500 | |
| }, | |
| { | |
| "epoch": 6.14, | |
| "learning_rate": 1.9307134255587865e-05, | |
| "loss": 2.0496, | |
| "step": 209000 | |
| }, | |
| { | |
| "epoch": 6.15, | |
| "learning_rate": 1.923370634710841e-05, | |
| "loss": 2.0492, | |
| "step": 209500 | |
| }, | |
| { | |
| "epoch": 6.17, | |
| "learning_rate": 1.9160278438628958e-05, | |
| "loss": 2.0437, | |
| "step": 210000 | |
| }, | |
| { | |
| "epoch": 6.18, | |
| "learning_rate": 1.90868505301495e-05, | |
| "loss": 2.0473, | |
| "step": 210500 | |
| }, | |
| { | |
| "epoch": 6.2, | |
| "learning_rate": 1.9013422621670047e-05, | |
| "loss": 2.0482, | |
| "step": 211000 | |
| }, | |
| { | |
| "epoch": 6.21, | |
| "learning_rate": 1.893999471319059e-05, | |
| "loss": 2.0478, | |
| "step": 211500 | |
| }, | |
| { | |
| "epoch": 6.23, | |
| "learning_rate": 1.8866566804711136e-05, | |
| "loss": 2.0463, | |
| "step": 212000 | |
| }, | |
| { | |
| "epoch": 6.24, | |
| "learning_rate": 1.879313889623168e-05, | |
| "loss": 2.0443, | |
| "step": 212500 | |
| }, | |
| { | |
| "epoch": 6.26, | |
| "learning_rate": 1.8719710987752225e-05, | |
| "loss": 2.0445, | |
| "step": 213000 | |
| }, | |
| { | |
| "epoch": 6.27, | |
| "learning_rate": 1.864628307927277e-05, | |
| "loss": 2.0427, | |
| "step": 213500 | |
| }, | |
| { | |
| "epoch": 6.29, | |
| "learning_rate": 1.8572855170793314e-05, | |
| "loss": 2.0404, | |
| "step": 214000 | |
| }, | |
| { | |
| "epoch": 6.3, | |
| "learning_rate": 1.849942726231386e-05, | |
| "loss": 2.046, | |
| "step": 214500 | |
| }, | |
| { | |
| "epoch": 6.31, | |
| "learning_rate": 1.8425999353834407e-05, | |
| "loss": 2.0438, | |
| "step": 215000 | |
| }, | |
| { | |
| "epoch": 6.33, | |
| "learning_rate": 1.8352571445354953e-05, | |
| "loss": 2.044, | |
| "step": 215500 | |
| }, | |
| { | |
| "epoch": 6.34, | |
| "learning_rate": 1.8279143536875496e-05, | |
| "loss": 2.0435, | |
| "step": 216000 | |
| }, | |
| { | |
| "epoch": 6.36, | |
| "learning_rate": 1.8205715628396043e-05, | |
| "loss": 2.0432, | |
| "step": 216500 | |
| }, | |
| { | |
| "epoch": 6.37, | |
| "learning_rate": 1.813228771991659e-05, | |
| "loss": 2.0431, | |
| "step": 217000 | |
| }, | |
| { | |
| "epoch": 6.39, | |
| "learning_rate": 1.8058859811437132e-05, | |
| "loss": 2.0393, | |
| "step": 217500 | |
| }, | |
| { | |
| "epoch": 6.4, | |
| "learning_rate": 1.7985431902957678e-05, | |
| "loss": 2.042, | |
| "step": 218000 | |
| }, | |
| { | |
| "epoch": 6.42, | |
| "learning_rate": 1.791200399447822e-05, | |
| "loss": 2.0402, | |
| "step": 218500 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "learning_rate": 1.7838576085998767e-05, | |
| "loss": 2.0404, | |
| "step": 219000 | |
| }, | |
| { | |
| "epoch": 6.45, | |
| "learning_rate": 1.776514817751931e-05, | |
| "loss": 2.0432, | |
| "step": 219500 | |
| }, | |
| { | |
| "epoch": 6.46, | |
| "learning_rate": 1.7691720269039856e-05, | |
| "loss": 2.0411, | |
| "step": 220000 | |
| }, | |
| { | |
| "epoch": 6.48, | |
| "learning_rate": 1.7618292360560403e-05, | |
| "loss": 2.0408, | |
| "step": 220500 | |
| }, | |
| { | |
| "epoch": 6.49, | |
| "learning_rate": 1.754486445208095e-05, | |
| "loss": 2.0426, | |
| "step": 221000 | |
| }, | |
| { | |
| "epoch": 6.51, | |
| "learning_rate": 1.7471436543601495e-05, | |
| "loss": 2.0408, | |
| "step": 221500 | |
| }, | |
| { | |
| "epoch": 6.52, | |
| "learning_rate": 1.7398008635122038e-05, | |
| "loss": 2.0406, | |
| "step": 222000 | |
| }, | |
| { | |
| "epoch": 6.54, | |
| "learning_rate": 1.7324580726642584e-05, | |
| "loss": 2.0391, | |
| "step": 222500 | |
| }, | |
| { | |
| "epoch": 6.55, | |
| "learning_rate": 1.7251152818163127e-05, | |
| "loss": 2.0374, | |
| "step": 223000 | |
| }, | |
| { | |
| "epoch": 6.56, | |
| "learning_rate": 1.7177724909683674e-05, | |
| "loss": 2.0374, | |
| "step": 223500 | |
| }, | |
| { | |
| "epoch": 6.58, | |
| "learning_rate": 1.710429700120422e-05, | |
| "loss": 2.0373, | |
| "step": 224000 | |
| }, | |
| { | |
| "epoch": 6.59, | |
| "learning_rate": 1.7030869092724763e-05, | |
| "loss": 2.0347, | |
| "step": 224500 | |
| }, | |
| { | |
| "epoch": 6.61, | |
| "learning_rate": 1.695744118424531e-05, | |
| "loss": 2.0356, | |
| "step": 225000 | |
| }, | |
| { | |
| "epoch": 6.62, | |
| "learning_rate": 1.6884013275765852e-05, | |
| "loss": 2.0407, | |
| "step": 225500 | |
| }, | |
| { | |
| "epoch": 6.64, | |
| "learning_rate": 1.68105853672864e-05, | |
| "loss": 2.0389, | |
| "step": 226000 | |
| }, | |
| { | |
| "epoch": 6.65, | |
| "learning_rate": 1.6737157458806945e-05, | |
| "loss": 2.0347, | |
| "step": 226500 | |
| }, | |
| { | |
| "epoch": 6.67, | |
| "learning_rate": 1.666372955032749e-05, | |
| "loss": 2.0397, | |
| "step": 227000 | |
| }, | |
| { | |
| "epoch": 6.68, | |
| "learning_rate": 1.6590301641848037e-05, | |
| "loss": 2.0348, | |
| "step": 227500 | |
| }, | |
| { | |
| "epoch": 6.7, | |
| "learning_rate": 1.651687373336858e-05, | |
| "loss": 2.0386, | |
| "step": 228000 | |
| }, | |
| { | |
| "epoch": 6.71, | |
| "learning_rate": 1.6443445824889126e-05, | |
| "loss": 2.0351, | |
| "step": 228500 | |
| }, | |
| { | |
| "epoch": 6.73, | |
| "learning_rate": 1.637001791640967e-05, | |
| "loss": 2.0363, | |
| "step": 229000 | |
| }, | |
| { | |
| "epoch": 6.74, | |
| "learning_rate": 1.6296590007930216e-05, | |
| "loss": 2.034, | |
| "step": 229500 | |
| }, | |
| { | |
| "epoch": 6.76, | |
| "learning_rate": 1.622316209945076e-05, | |
| "loss": 2.0386, | |
| "step": 230000 | |
| }, | |
| { | |
| "epoch": 6.77, | |
| "learning_rate": 1.6149734190971305e-05, | |
| "loss": 2.0334, | |
| "step": 230500 | |
| }, | |
| { | |
| "epoch": 6.78, | |
| "learning_rate": 1.607630628249185e-05, | |
| "loss": 2.0346, | |
| "step": 231000 | |
| }, | |
| { | |
| "epoch": 6.8, | |
| "learning_rate": 1.6002878374012394e-05, | |
| "loss": 2.0294, | |
| "step": 231500 | |
| }, | |
| { | |
| "epoch": 6.81, | |
| "learning_rate": 1.592945046553294e-05, | |
| "loss": 2.0342, | |
| "step": 232000 | |
| }, | |
| { | |
| "epoch": 6.83, | |
| "learning_rate": 1.5856022557053483e-05, | |
| "loss": 2.0332, | |
| "step": 232500 | |
| }, | |
| { | |
| "epoch": 6.84, | |
| "learning_rate": 1.578259464857403e-05, | |
| "loss": 2.0337, | |
| "step": 233000 | |
| }, | |
| { | |
| "epoch": 6.86, | |
| "learning_rate": 1.5709166740094576e-05, | |
| "loss": 2.0323, | |
| "step": 233500 | |
| }, | |
| { | |
| "epoch": 6.87, | |
| "learning_rate": 1.5635738831615122e-05, | |
| "loss": 2.0329, | |
| "step": 234000 | |
| }, | |
| { | |
| "epoch": 6.89, | |
| "learning_rate": 1.556231092313567e-05, | |
| "loss": 2.0314, | |
| "step": 234500 | |
| }, | |
| { | |
| "epoch": 6.9, | |
| "learning_rate": 1.548888301465621e-05, | |
| "loss": 2.0294, | |
| "step": 235000 | |
| }, | |
| { | |
| "epoch": 6.92, | |
| "learning_rate": 1.5415455106176758e-05, | |
| "loss": 2.0318, | |
| "step": 235500 | |
| }, | |
| { | |
| "epoch": 6.93, | |
| "learning_rate": 1.53420271976973e-05, | |
| "loss": 2.03, | |
| "step": 236000 | |
| }, | |
| { | |
| "epoch": 6.95, | |
| "learning_rate": 1.5268599289217847e-05, | |
| "loss": 2.0342, | |
| "step": 236500 | |
| }, | |
| { | |
| "epoch": 6.96, | |
| "learning_rate": 1.519517138073839e-05, | |
| "loss": 2.0324, | |
| "step": 237000 | |
| }, | |
| { | |
| "epoch": 6.98, | |
| "learning_rate": 1.5121743472258936e-05, | |
| "loss": 2.0318, | |
| "step": 237500 | |
| }, | |
| { | |
| "epoch": 6.99, | |
| "learning_rate": 1.5048315563779484e-05, | |
| "loss": 2.0309, | |
| "step": 238000 | |
| }, | |
| { | |
| "epoch": 7.01, | |
| "learning_rate": 1.4974887655300027e-05, | |
| "loss": 2.0298, | |
| "step": 238500 | |
| }, | |
| { | |
| "epoch": 7.02, | |
| "learning_rate": 1.4901459746820573e-05, | |
| "loss": 2.0298, | |
| "step": 239000 | |
| }, | |
| { | |
| "epoch": 7.03, | |
| "learning_rate": 1.4828031838341116e-05, | |
| "loss": 2.0289, | |
| "step": 239500 | |
| }, | |
| { | |
| "epoch": 7.05, | |
| "learning_rate": 1.4754603929861662e-05, | |
| "loss": 2.0261, | |
| "step": 240000 | |
| }, | |
| { | |
| "epoch": 7.06, | |
| "learning_rate": 1.4681176021382207e-05, | |
| "loss": 2.0322, | |
| "step": 240500 | |
| }, | |
| { | |
| "epoch": 7.08, | |
| "learning_rate": 1.4607748112902753e-05, | |
| "loss": 2.0266, | |
| "step": 241000 | |
| }, | |
| { | |
| "epoch": 7.09, | |
| "learning_rate": 1.45343202044233e-05, | |
| "loss": 2.0277, | |
| "step": 241500 | |
| }, | |
| { | |
| "epoch": 7.11, | |
| "learning_rate": 1.4460892295943842e-05, | |
| "loss": 2.025, | |
| "step": 242000 | |
| }, | |
| { | |
| "epoch": 7.12, | |
| "learning_rate": 1.4387464387464389e-05, | |
| "loss": 2.0253, | |
| "step": 242500 | |
| }, | |
| { | |
| "epoch": 7.14, | |
| "learning_rate": 1.4314036478984932e-05, | |
| "loss": 2.0283, | |
| "step": 243000 | |
| }, | |
| { | |
| "epoch": 7.15, | |
| "learning_rate": 1.4240608570505478e-05, | |
| "loss": 2.0273, | |
| "step": 243500 | |
| }, | |
| { | |
| "epoch": 7.17, | |
| "learning_rate": 1.4167180662026023e-05, | |
| "loss": 2.0276, | |
| "step": 244000 | |
| }, | |
| { | |
| "epoch": 7.18, | |
| "learning_rate": 1.4093752753546569e-05, | |
| "loss": 2.0274, | |
| "step": 244500 | |
| }, | |
| { | |
| "epoch": 7.2, | |
| "learning_rate": 1.4020324845067115e-05, | |
| "loss": 2.0284, | |
| "step": 245000 | |
| }, | |
| { | |
| "epoch": 7.21, | |
| "learning_rate": 1.3946896936587658e-05, | |
| "loss": 2.027, | |
| "step": 245500 | |
| }, | |
| { | |
| "epoch": 7.23, | |
| "learning_rate": 1.3873469028108204e-05, | |
| "loss": 2.0236, | |
| "step": 246000 | |
| }, | |
| { | |
| "epoch": 7.24, | |
| "learning_rate": 1.3800041119628749e-05, | |
| "loss": 2.0289, | |
| "step": 246500 | |
| }, | |
| { | |
| "epoch": 7.25, | |
| "learning_rate": 1.3726613211149295e-05, | |
| "loss": 2.0229, | |
| "step": 247000 | |
| }, | |
| { | |
| "epoch": 7.27, | |
| "learning_rate": 1.3653185302669838e-05, | |
| "loss": 2.0246, | |
| "step": 247500 | |
| }, | |
| { | |
| "epoch": 7.28, | |
| "learning_rate": 1.3579757394190384e-05, | |
| "loss": 2.0221, | |
| "step": 248000 | |
| }, | |
| { | |
| "epoch": 7.3, | |
| "learning_rate": 1.350632948571093e-05, | |
| "loss": 2.0237, | |
| "step": 248500 | |
| }, | |
| { | |
| "epoch": 7.31, | |
| "learning_rate": 1.3432901577231474e-05, | |
| "loss": 2.0274, | |
| "step": 249000 | |
| }, | |
| { | |
| "epoch": 7.33, | |
| "learning_rate": 1.335947366875202e-05, | |
| "loss": 2.0228, | |
| "step": 249500 | |
| }, | |
| { | |
| "epoch": 7.34, | |
| "learning_rate": 1.3286045760272565e-05, | |
| "loss": 2.0241, | |
| "step": 250000 | |
| }, | |
| { | |
| "epoch": 7.36, | |
| "learning_rate": 1.321261785179311e-05, | |
| "loss": 2.0262, | |
| "step": 250500 | |
| }, | |
| { | |
| "epoch": 7.37, | |
| "learning_rate": 1.3139189943313654e-05, | |
| "loss": 2.0219, | |
| "step": 251000 | |
| }, | |
| { | |
| "epoch": 7.39, | |
| "learning_rate": 1.30657620348342e-05, | |
| "loss": 2.023, | |
| "step": 251500 | |
| }, | |
| { | |
| "epoch": 7.4, | |
| "learning_rate": 1.2992334126354746e-05, | |
| "loss": 2.0245, | |
| "step": 252000 | |
| }, | |
| { | |
| "epoch": 7.42, | |
| "learning_rate": 1.2918906217875291e-05, | |
| "loss": 2.0245, | |
| "step": 252500 | |
| }, | |
| { | |
| "epoch": 7.43, | |
| "learning_rate": 1.2845478309395837e-05, | |
| "loss": 2.0265, | |
| "step": 253000 | |
| }, | |
| { | |
| "epoch": 7.45, | |
| "learning_rate": 1.277205040091638e-05, | |
| "loss": 2.0244, | |
| "step": 253500 | |
| }, | |
| { | |
| "epoch": 7.46, | |
| "learning_rate": 1.2698622492436926e-05, | |
| "loss": 2.0212, | |
| "step": 254000 | |
| }, | |
| { | |
| "epoch": 7.47, | |
| "learning_rate": 1.262519458395747e-05, | |
| "loss": 2.0212, | |
| "step": 254500 | |
| }, | |
| { | |
| "epoch": 7.49, | |
| "learning_rate": 1.2551766675478016e-05, | |
| "loss": 2.02, | |
| "step": 255000 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "learning_rate": 1.247833876699856e-05, | |
| "loss": 2.0223, | |
| "step": 255500 | |
| }, | |
| { | |
| "epoch": 7.52, | |
| "learning_rate": 1.2404910858519106e-05, | |
| "loss": 2.0222, | |
| "step": 256000 | |
| }, | |
| { | |
| "epoch": 7.53, | |
| "learning_rate": 1.2331482950039653e-05, | |
| "loss": 2.0217, | |
| "step": 256500 | |
| }, | |
| { | |
| "epoch": 7.55, | |
| "learning_rate": 1.2258055041560197e-05, | |
| "loss": 2.0179, | |
| "step": 257000 | |
| }, | |
| { | |
| "epoch": 7.56, | |
| "learning_rate": 1.2184627133080742e-05, | |
| "loss": 2.0218, | |
| "step": 257500 | |
| }, | |
| { | |
| "epoch": 7.58, | |
| "learning_rate": 1.2111199224601287e-05, | |
| "loss": 2.0214, | |
| "step": 258000 | |
| }, | |
| { | |
| "epoch": 7.59, | |
| "learning_rate": 1.2037771316121831e-05, | |
| "loss": 2.0184, | |
| "step": 258500 | |
| }, | |
| { | |
| "epoch": 7.61, | |
| "learning_rate": 1.1964343407642377e-05, | |
| "loss": 2.0222, | |
| "step": 259000 | |
| }, | |
| { | |
| "epoch": 7.62, | |
| "learning_rate": 1.1890915499162922e-05, | |
| "loss": 2.0169, | |
| "step": 259500 | |
| }, | |
| { | |
| "epoch": 7.64, | |
| "learning_rate": 1.1817487590683468e-05, | |
| "loss": 2.0164, | |
| "step": 260000 | |
| }, | |
| { | |
| "epoch": 7.65, | |
| "learning_rate": 1.1744059682204013e-05, | |
| "loss": 2.0156, | |
| "step": 260500 | |
| }, | |
| { | |
| "epoch": 7.67, | |
| "learning_rate": 1.1670631773724558e-05, | |
| "loss": 2.017, | |
| "step": 261000 | |
| }, | |
| { | |
| "epoch": 7.68, | |
| "learning_rate": 1.1597203865245102e-05, | |
| "loss": 2.0181, | |
| "step": 261500 | |
| }, | |
| { | |
| "epoch": 7.7, | |
| "learning_rate": 1.1523775956765648e-05, | |
| "loss": 2.0174, | |
| "step": 262000 | |
| }, | |
| { | |
| "epoch": 7.71, | |
| "learning_rate": 1.1450348048286193e-05, | |
| "loss": 2.0194, | |
| "step": 262500 | |
| }, | |
| { | |
| "epoch": 7.72, | |
| "learning_rate": 1.1376920139806738e-05, | |
| "loss": 2.0185, | |
| "step": 263000 | |
| }, | |
| { | |
| "epoch": 7.74, | |
| "learning_rate": 1.1303492231327284e-05, | |
| "loss": 2.019, | |
| "step": 263500 | |
| }, | |
| { | |
| "epoch": 7.75, | |
| "learning_rate": 1.1230064322847829e-05, | |
| "loss": 2.0173, | |
| "step": 264000 | |
| }, | |
| { | |
| "epoch": 7.77, | |
| "learning_rate": 1.1156636414368373e-05, | |
| "loss": 2.0181, | |
| "step": 264500 | |
| }, | |
| { | |
| "epoch": 7.78, | |
| "learning_rate": 1.108320850588892e-05, | |
| "loss": 2.0172, | |
| "step": 265000 | |
| }, | |
| { | |
| "epoch": 7.8, | |
| "learning_rate": 1.1009780597409464e-05, | |
| "loss": 2.0168, | |
| "step": 265500 | |
| }, | |
| { | |
| "epoch": 7.81, | |
| "learning_rate": 1.0936352688930009e-05, | |
| "loss": 2.0191, | |
| "step": 266000 | |
| }, | |
| { | |
| "epoch": 7.83, | |
| "learning_rate": 1.0862924780450553e-05, | |
| "loss": 2.0172, | |
| "step": 266500 | |
| }, | |
| { | |
| "epoch": 7.84, | |
| "learning_rate": 1.07894968719711e-05, | |
| "loss": 2.0167, | |
| "step": 267000 | |
| }, | |
| { | |
| "epoch": 7.86, | |
| "learning_rate": 1.0716068963491644e-05, | |
| "loss": 2.0157, | |
| "step": 267500 | |
| }, | |
| { | |
| "epoch": 7.87, | |
| "learning_rate": 1.064264105501219e-05, | |
| "loss": 2.0151, | |
| "step": 268000 | |
| }, | |
| { | |
| "epoch": 7.89, | |
| "learning_rate": 1.0569213146532735e-05, | |
| "loss": 2.0165, | |
| "step": 268500 | |
| }, | |
| { | |
| "epoch": 7.9, | |
| "learning_rate": 1.049578523805328e-05, | |
| "loss": 2.0135, | |
| "step": 269000 | |
| }, | |
| { | |
| "epoch": 7.92, | |
| "learning_rate": 1.0422357329573824e-05, | |
| "loss": 2.0153, | |
| "step": 269500 | |
| }, | |
| { | |
| "epoch": 7.93, | |
| "learning_rate": 1.0348929421094369e-05, | |
| "loss": 2.0145, | |
| "step": 270000 | |
| }, | |
| { | |
| "epoch": 7.94, | |
| "learning_rate": 1.0275501512614915e-05, | |
| "loss": 2.0175, | |
| "step": 270500 | |
| }, | |
| { | |
| "epoch": 7.96, | |
| "learning_rate": 1.0202073604135461e-05, | |
| "loss": 2.0149, | |
| "step": 271000 | |
| }, | |
| { | |
| "epoch": 7.97, | |
| "learning_rate": 1.0128645695656006e-05, | |
| "loss": 2.0184, | |
| "step": 271500 | |
| }, | |
| { | |
| "epoch": 7.99, | |
| "learning_rate": 1.005521778717655e-05, | |
| "loss": 2.0161, | |
| "step": 272000 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "learning_rate": 9.981789878697095e-06, | |
| "loss": 2.015, | |
| "step": 272500 | |
| }, | |
| { | |
| "epoch": 8.02, | |
| "learning_rate": 9.90836197021764e-06, | |
| "loss": 2.0154, | |
| "step": 273000 | |
| }, | |
| { | |
| "epoch": 8.03, | |
| "learning_rate": 9.834934061738186e-06, | |
| "loss": 2.0158, | |
| "step": 273500 | |
| }, | |
| { | |
| "epoch": 8.05, | |
| "learning_rate": 9.76150615325873e-06, | |
| "loss": 2.0139, | |
| "step": 274000 | |
| }, | |
| { | |
| "epoch": 8.06, | |
| "learning_rate": 9.688078244779277e-06, | |
| "loss": 2.0107, | |
| "step": 274500 | |
| }, | |
| { | |
| "epoch": 8.08, | |
| "learning_rate": 9.614650336299822e-06, | |
| "loss": 2.0124, | |
| "step": 275000 | |
| }, | |
| { | |
| "epoch": 8.09, | |
| "learning_rate": 9.541222427820366e-06, | |
| "loss": 2.0123, | |
| "step": 275500 | |
| }, | |
| { | |
| "epoch": 8.11, | |
| "learning_rate": 9.46779451934091e-06, | |
| "loss": 2.0131, | |
| "step": 276000 | |
| }, | |
| { | |
| "epoch": 8.12, | |
| "learning_rate": 9.394366610861455e-06, | |
| "loss": 2.0113, | |
| "step": 276500 | |
| }, | |
| { | |
| "epoch": 8.14, | |
| "learning_rate": 9.320938702382002e-06, | |
| "loss": 2.0085, | |
| "step": 277000 | |
| }, | |
| { | |
| "epoch": 8.15, | |
| "learning_rate": 9.247510793902548e-06, | |
| "loss": 2.0122, | |
| "step": 277500 | |
| }, | |
| { | |
| "epoch": 8.17, | |
| "learning_rate": 9.174082885423093e-06, | |
| "loss": 2.0124, | |
| "step": 278000 | |
| }, | |
| { | |
| "epoch": 8.18, | |
| "learning_rate": 9.100654976943637e-06, | |
| "loss": 2.0118, | |
| "step": 278500 | |
| }, | |
| { | |
| "epoch": 8.19, | |
| "learning_rate": 9.027227068464182e-06, | |
| "loss": 2.0106, | |
| "step": 279000 | |
| }, | |
| { | |
| "epoch": 8.21, | |
| "learning_rate": 8.953799159984726e-06, | |
| "loss": 2.0123, | |
| "step": 279500 | |
| }, | |
| { | |
| "epoch": 8.22, | |
| "learning_rate": 8.880371251505273e-06, | |
| "loss": 2.0113, | |
| "step": 280000 | |
| }, | |
| { | |
| "epoch": 8.24, | |
| "learning_rate": 8.806943343025819e-06, | |
| "loss": 2.0126, | |
| "step": 280500 | |
| }, | |
| { | |
| "epoch": 8.25, | |
| "learning_rate": 8.733515434546364e-06, | |
| "loss": 2.0123, | |
| "step": 281000 | |
| }, | |
| { | |
| "epoch": 8.27, | |
| "learning_rate": 8.660087526066908e-06, | |
| "loss": 2.011, | |
| "step": 281500 | |
| }, | |
| { | |
| "epoch": 8.28, | |
| "learning_rate": 8.586659617587453e-06, | |
| "loss": 2.0082, | |
| "step": 282000 | |
| }, | |
| { | |
| "epoch": 8.3, | |
| "learning_rate": 8.513231709107997e-06, | |
| "loss": 2.0084, | |
| "step": 282500 | |
| }, | |
| { | |
| "epoch": 8.31, | |
| "learning_rate": 8.439803800628544e-06, | |
| "loss": 2.0106, | |
| "step": 283000 | |
| }, | |
| { | |
| "epoch": 8.33, | |
| "learning_rate": 8.366375892149088e-06, | |
| "loss": 2.0123, | |
| "step": 283500 | |
| }, | |
| { | |
| "epoch": 8.34, | |
| "learning_rate": 8.292947983669635e-06, | |
| "loss": 2.0069, | |
| "step": 284000 | |
| }, | |
| { | |
| "epoch": 8.36, | |
| "learning_rate": 8.219520075190179e-06, | |
| "loss": 2.0082, | |
| "step": 284500 | |
| }, | |
| { | |
| "epoch": 8.37, | |
| "learning_rate": 8.146092166710724e-06, | |
| "loss": 2.0102, | |
| "step": 285000 | |
| }, | |
| { | |
| "epoch": 8.39, | |
| "learning_rate": 8.072664258231268e-06, | |
| "loss": 2.0099, | |
| "step": 285500 | |
| }, | |
| { | |
| "epoch": 8.4, | |
| "learning_rate": 7.999236349751815e-06, | |
| "loss": 2.0047, | |
| "step": 286000 | |
| }, | |
| { | |
| "epoch": 8.41, | |
| "learning_rate": 7.92580844127236e-06, | |
| "loss": 2.0122, | |
| "step": 286500 | |
| }, | |
| { | |
| "epoch": 8.43, | |
| "learning_rate": 7.852380532792904e-06, | |
| "loss": 2.01, | |
| "step": 287000 | |
| }, | |
| { | |
| "epoch": 8.44, | |
| "learning_rate": 7.77895262431345e-06, | |
| "loss": 2.0081, | |
| "step": 287500 | |
| }, | |
| { | |
| "epoch": 8.46, | |
| "learning_rate": 7.705524715833995e-06, | |
| "loss": 2.007, | |
| "step": 288000 | |
| }, | |
| { | |
| "epoch": 8.47, | |
| "learning_rate": 7.63209680735454e-06, | |
| "loss": 2.005, | |
| "step": 288500 | |
| }, | |
| { | |
| "epoch": 8.49, | |
| "learning_rate": 7.558668898875085e-06, | |
| "loss": 2.0107, | |
| "step": 289000 | |
| }, | |
| { | |
| "epoch": 8.5, | |
| "learning_rate": 7.485240990395629e-06, | |
| "loss": 2.0109, | |
| "step": 289500 | |
| }, | |
| { | |
| "epoch": 8.52, | |
| "learning_rate": 7.411813081916175e-06, | |
| "loss": 2.0084, | |
| "step": 290000 | |
| }, | |
| { | |
| "epoch": 8.53, | |
| "learning_rate": 7.338385173436719e-06, | |
| "loss": 2.009, | |
| "step": 290500 | |
| }, | |
| { | |
| "epoch": 8.55, | |
| "learning_rate": 7.264957264957266e-06, | |
| "loss": 2.0065, | |
| "step": 291000 | |
| }, | |
| { | |
| "epoch": 8.56, | |
| "learning_rate": 7.191529356477811e-06, | |
| "loss": 2.0098, | |
| "step": 291500 | |
| }, | |
| { | |
| "epoch": 8.58, | |
| "learning_rate": 7.118101447998356e-06, | |
| "loss": 2.0087, | |
| "step": 292000 | |
| }, | |
| { | |
| "epoch": 8.59, | |
| "learning_rate": 7.0446735395189e-06, | |
| "loss": 2.0073, | |
| "step": 292500 | |
| }, | |
| { | |
| "epoch": 8.61, | |
| "learning_rate": 6.971245631039446e-06, | |
| "loss": 2.01, | |
| "step": 293000 | |
| }, | |
| { | |
| "epoch": 8.62, | |
| "learning_rate": 6.89781772255999e-06, | |
| "loss": 2.0087, | |
| "step": 293500 | |
| }, | |
| { | |
| "epoch": 8.64, | |
| "learning_rate": 6.824389814080535e-06, | |
| "loss": 2.0061, | |
| "step": 294000 | |
| }, | |
| { | |
| "epoch": 8.65, | |
| "learning_rate": 6.750961905601082e-06, | |
| "loss": 2.0119, | |
| "step": 294500 | |
| }, | |
| { | |
| "epoch": 8.66, | |
| "learning_rate": 6.677533997121627e-06, | |
| "loss": 2.0071, | |
| "step": 295000 | |
| }, | |
| { | |
| "epoch": 8.68, | |
| "learning_rate": 6.604106088642171e-06, | |
| "loss": 2.0115, | |
| "step": 295500 | |
| }, | |
| { | |
| "epoch": 8.69, | |
| "learning_rate": 6.530678180162717e-06, | |
| "loss": 2.0021, | |
| "step": 296000 | |
| }, | |
| { | |
| "epoch": 8.71, | |
| "learning_rate": 6.457250271683261e-06, | |
| "loss": 2.0075, | |
| "step": 296500 | |
| }, | |
| { | |
| "epoch": 8.72, | |
| "learning_rate": 6.383822363203806e-06, | |
| "loss": 2.0059, | |
| "step": 297000 | |
| }, | |
| { | |
| "epoch": 8.74, | |
| "learning_rate": 6.310394454724351e-06, | |
| "loss": 2.006, | |
| "step": 297500 | |
| }, | |
| { | |
| "epoch": 8.75, | |
| "learning_rate": 6.236966546244897e-06, | |
| "loss": 2.0052, | |
| "step": 298000 | |
| }, | |
| { | |
| "epoch": 8.77, | |
| "learning_rate": 6.163538637765442e-06, | |
| "loss": 2.0064, | |
| "step": 298500 | |
| }, | |
| { | |
| "epoch": 8.78, | |
| "learning_rate": 6.090110729285988e-06, | |
| "loss": 2.0103, | |
| "step": 299000 | |
| }, | |
| { | |
| "epoch": 8.8, | |
| "learning_rate": 6.016682820806532e-06, | |
| "loss": 2.004, | |
| "step": 299500 | |
| }, | |
| { | |
| "epoch": 8.81, | |
| "learning_rate": 5.943254912327077e-06, | |
| "loss": 2.0042, | |
| "step": 300000 | |
| }, | |
| { | |
| "epoch": 8.83, | |
| "learning_rate": 5.869827003847623e-06, | |
| "loss": 2.0072, | |
| "step": 300500 | |
| }, | |
| { | |
| "epoch": 8.84, | |
| "learning_rate": 5.796399095368168e-06, | |
| "loss": 2.0051, | |
| "step": 301000 | |
| }, | |
| { | |
| "epoch": 8.86, | |
| "learning_rate": 5.7229711868887124e-06, | |
| "loss": 2.0044, | |
| "step": 301500 | |
| }, | |
| { | |
| "epoch": 8.87, | |
| "learning_rate": 5.649543278409259e-06, | |
| "loss": 2.0032, | |
| "step": 302000 | |
| }, | |
| { | |
| "epoch": 8.88, | |
| "learning_rate": 5.576115369929803e-06, | |
| "loss": 2.0039, | |
| "step": 302500 | |
| }, | |
| { | |
| "epoch": 8.9, | |
| "learning_rate": 5.502687461450348e-06, | |
| "loss": 2.0084, | |
| "step": 303000 | |
| }, | |
| { | |
| "epoch": 8.91, | |
| "learning_rate": 5.429259552970893e-06, | |
| "loss": 2.0048, | |
| "step": 303500 | |
| }, | |
| { | |
| "epoch": 8.93, | |
| "learning_rate": 5.355831644491439e-06, | |
| "loss": 2.0011, | |
| "step": 304000 | |
| }, | |
| { | |
| "epoch": 8.94, | |
| "learning_rate": 5.2824037360119834e-06, | |
| "loss": 2.0066, | |
| "step": 304500 | |
| }, | |
| { | |
| "epoch": 8.96, | |
| "learning_rate": 5.208975827532529e-06, | |
| "loss": 2.005, | |
| "step": 305000 | |
| }, | |
| { | |
| "epoch": 8.97, | |
| "learning_rate": 5.135547919053074e-06, | |
| "loss": 2.0046, | |
| "step": 305500 | |
| }, | |
| { | |
| "epoch": 8.99, | |
| "learning_rate": 5.062120010573619e-06, | |
| "loss": 2.0027, | |
| "step": 306000 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "learning_rate": 4.988692102094164e-06, | |
| "loss": 2.0056, | |
| "step": 306500 | |
| }, | |
| { | |
| "epoch": 9.02, | |
| "learning_rate": 4.915264193614709e-06, | |
| "loss": 2.0008, | |
| "step": 307000 | |
| }, | |
| { | |
| "epoch": 9.03, | |
| "learning_rate": 4.841836285135254e-06, | |
| "loss": 2.0039, | |
| "step": 307500 | |
| }, | |
| { | |
| "epoch": 9.05, | |
| "learning_rate": 4.7684083766558e-06, | |
| "loss": 2.0003, | |
| "step": 308000 | |
| }, | |
| { | |
| "epoch": 9.06, | |
| "learning_rate": 4.6949804681763445e-06, | |
| "loss": 2.0001, | |
| "step": 308500 | |
| }, | |
| { | |
| "epoch": 9.08, | |
| "learning_rate": 4.62155255969689e-06, | |
| "loss": 2.003, | |
| "step": 309000 | |
| }, | |
| { | |
| "epoch": 9.09, | |
| "learning_rate": 4.548124651217435e-06, | |
| "loss": 2.0019, | |
| "step": 309500 | |
| }, | |
| { | |
| "epoch": 9.11, | |
| "learning_rate": 4.47469674273798e-06, | |
| "loss": 2.0012, | |
| "step": 310000 | |
| }, | |
| { | |
| "epoch": 9.12, | |
| "learning_rate": 4.401268834258525e-06, | |
| "loss": 2.0018, | |
| "step": 310500 | |
| }, | |
| { | |
| "epoch": 9.13, | |
| "learning_rate": 4.327840925779071e-06, | |
| "loss": 2.0018, | |
| "step": 311000 | |
| }, | |
| { | |
| "epoch": 9.15, | |
| "learning_rate": 4.2544130172996154e-06, | |
| "loss": 2.0024, | |
| "step": 311500 | |
| }, | |
| { | |
| "epoch": 9.16, | |
| "learning_rate": 4.18098510882016e-06, | |
| "loss": 1.9982, | |
| "step": 312000 | |
| }, | |
| { | |
| "epoch": 9.18, | |
| "learning_rate": 4.1075572003407055e-06, | |
| "loss": 2.0031, | |
| "step": 312500 | |
| }, | |
| { | |
| "epoch": 9.19, | |
| "learning_rate": 4.034129291861251e-06, | |
| "loss": 2.0048, | |
| "step": 313000 | |
| }, | |
| { | |
| "epoch": 9.21, | |
| "learning_rate": 3.9607013833817955e-06, | |
| "loss": 2.004, | |
| "step": 313500 | |
| }, | |
| { | |
| "epoch": 9.22, | |
| "learning_rate": 3.887273474902341e-06, | |
| "loss": 2.0027, | |
| "step": 314000 | |
| }, | |
| { | |
| "epoch": 9.24, | |
| "learning_rate": 3.8138455664228864e-06, | |
| "loss": 2.0035, | |
| "step": 314500 | |
| }, | |
| { | |
| "epoch": 9.25, | |
| "learning_rate": 3.740417657943431e-06, | |
| "loss": 2.0036, | |
| "step": 315000 | |
| }, | |
| { | |
| "epoch": 9.27, | |
| "learning_rate": 3.666989749463976e-06, | |
| "loss": 1.9977, | |
| "step": 315500 | |
| }, | |
| { | |
| "epoch": 9.28, | |
| "learning_rate": 3.593561840984522e-06, | |
| "loss": 2.0023, | |
| "step": 316000 | |
| }, | |
| { | |
| "epoch": 9.3, | |
| "learning_rate": 3.5201339325050665e-06, | |
| "loss": 2.0017, | |
| "step": 316500 | |
| }, | |
| { | |
| "epoch": 9.31, | |
| "learning_rate": 3.4467060240256115e-06, | |
| "loss": 2.0015, | |
| "step": 317000 | |
| }, | |
| { | |
| "epoch": 9.33, | |
| "learning_rate": 3.3732781155461574e-06, | |
| "loss": 2.0024, | |
| "step": 317500 | |
| }, | |
| { | |
| "epoch": 9.34, | |
| "learning_rate": 3.299850207066702e-06, | |
| "loss": 2.0012, | |
| "step": 318000 | |
| }, | |
| { | |
| "epoch": 9.35, | |
| "learning_rate": 3.226422298587247e-06, | |
| "loss": 2.0004, | |
| "step": 318500 | |
| }, | |
| { | |
| "epoch": 9.37, | |
| "learning_rate": 3.152994390107792e-06, | |
| "loss": 2.0048, | |
| "step": 319000 | |
| }, | |
| { | |
| "epoch": 9.38, | |
| "learning_rate": 3.0795664816283375e-06, | |
| "loss": 2.0005, | |
| "step": 319500 | |
| }, | |
| { | |
| "epoch": 9.4, | |
| "learning_rate": 3.0061385731488825e-06, | |
| "loss": 2.0022, | |
| "step": 320000 | |
| }, | |
| { | |
| "epoch": 9.41, | |
| "learning_rate": 2.932710664669428e-06, | |
| "loss": 2.0022, | |
| "step": 320500 | |
| }, | |
| { | |
| "epoch": 9.43, | |
| "learning_rate": 2.8592827561899726e-06, | |
| "loss": 1.9982, | |
| "step": 321000 | |
| }, | |
| { | |
| "epoch": 9.44, | |
| "learning_rate": 2.785854847710518e-06, | |
| "loss": 2.0, | |
| "step": 321500 | |
| }, | |
| { | |
| "epoch": 9.46, | |
| "learning_rate": 2.712426939231063e-06, | |
| "loss": 1.9994, | |
| "step": 322000 | |
| }, | |
| { | |
| "epoch": 9.47, | |
| "learning_rate": 2.638999030751608e-06, | |
| "loss": 1.9991, | |
| "step": 322500 | |
| }, | |
| { | |
| "epoch": 9.49, | |
| "learning_rate": 2.5655711222721535e-06, | |
| "loss": 1.9989, | |
| "step": 323000 | |
| }, | |
| { | |
| "epoch": 9.5, | |
| "learning_rate": 2.4921432137926985e-06, | |
| "loss": 2.0017, | |
| "step": 323500 | |
| }, | |
| { | |
| "epoch": 9.52, | |
| "learning_rate": 2.4187153053132435e-06, | |
| "loss": 1.9981, | |
| "step": 324000 | |
| }, | |
| { | |
| "epoch": 9.53, | |
| "learning_rate": 2.3452873968337886e-06, | |
| "loss": 1.9998, | |
| "step": 324500 | |
| }, | |
| { | |
| "epoch": 9.55, | |
| "learning_rate": 2.271859488354334e-06, | |
| "loss": 1.9978, | |
| "step": 325000 | |
| }, | |
| { | |
| "epoch": 9.56, | |
| "learning_rate": 2.198431579874879e-06, | |
| "loss": 1.9981, | |
| "step": 325500 | |
| }, | |
| { | |
| "epoch": 9.57, | |
| "learning_rate": 2.125003671395424e-06, | |
| "loss": 2.001, | |
| "step": 326000 | |
| }, | |
| { | |
| "epoch": 9.59, | |
| "learning_rate": 2.0515757629159695e-06, | |
| "loss": 2.0024, | |
| "step": 326500 | |
| }, | |
| { | |
| "epoch": 9.6, | |
| "learning_rate": 1.978147854436514e-06, | |
| "loss": 1.9995, | |
| "step": 327000 | |
| }, | |
| { | |
| "epoch": 9.62, | |
| "learning_rate": 1.9047199459570595e-06, | |
| "loss": 1.9989, | |
| "step": 327500 | |
| }, | |
| { | |
| "epoch": 9.63, | |
| "learning_rate": 1.8312920374776044e-06, | |
| "loss": 2.0, | |
| "step": 328000 | |
| }, | |
| { | |
| "epoch": 9.65, | |
| "learning_rate": 1.7578641289981498e-06, | |
| "loss": 1.9958, | |
| "step": 328500 | |
| }, | |
| { | |
| "epoch": 9.66, | |
| "learning_rate": 1.684436220518695e-06, | |
| "loss": 1.9983, | |
| "step": 329000 | |
| }, | |
| { | |
| "epoch": 9.68, | |
| "learning_rate": 1.6110083120392398e-06, | |
| "loss": 1.9986, | |
| "step": 329500 | |
| }, | |
| { | |
| "epoch": 9.69, | |
| "learning_rate": 1.537580403559785e-06, | |
| "loss": 2.0003, | |
| "step": 330000 | |
| }, | |
| { | |
| "epoch": 9.71, | |
| "learning_rate": 1.4641524950803303e-06, | |
| "loss": 1.9978, | |
| "step": 330500 | |
| }, | |
| { | |
| "epoch": 9.72, | |
| "learning_rate": 1.3907245866008753e-06, | |
| "loss": 2.0016, | |
| "step": 331000 | |
| }, | |
| { | |
| "epoch": 9.74, | |
| "learning_rate": 1.3172966781214204e-06, | |
| "loss": 2.0, | |
| "step": 331500 | |
| }, | |
| { | |
| "epoch": 9.75, | |
| "learning_rate": 1.2438687696419656e-06, | |
| "loss": 1.9973, | |
| "step": 332000 | |
| }, | |
| { | |
| "epoch": 9.77, | |
| "learning_rate": 1.1704408611625106e-06, | |
| "loss": 2.0012, | |
| "step": 332500 | |
| }, | |
| { | |
| "epoch": 9.78, | |
| "learning_rate": 1.0970129526830556e-06, | |
| "loss": 2.0012, | |
| "step": 333000 | |
| }, | |
| { | |
| "epoch": 9.8, | |
| "learning_rate": 1.023585044203601e-06, | |
| "loss": 1.9966, | |
| "step": 333500 | |
| }, | |
| { | |
| "epoch": 9.81, | |
| "learning_rate": 9.501571357241461e-07, | |
| "loss": 2.0031, | |
| "step": 334000 | |
| }, | |
| { | |
| "epoch": 9.82, | |
| "learning_rate": 8.767292272446912e-07, | |
| "loss": 1.9996, | |
| "step": 334500 | |
| }, | |
| { | |
| "epoch": 9.84, | |
| "learning_rate": 8.033013187652363e-07, | |
| "loss": 1.9967, | |
| "step": 335000 | |
| }, | |
| { | |
| "epoch": 9.85, | |
| "learning_rate": 7.298734102857815e-07, | |
| "loss": 2.0005, | |
| "step": 335500 | |
| }, | |
| { | |
| "epoch": 9.87, | |
| "learning_rate": 6.564455018063266e-07, | |
| "loss": 1.9987, | |
| "step": 336000 | |
| }, | |
| { | |
| "epoch": 9.88, | |
| "learning_rate": 5.830175933268716e-07, | |
| "loss": 1.9945, | |
| "step": 336500 | |
| }, | |
| { | |
| "epoch": 9.9, | |
| "learning_rate": 5.095896848474169e-07, | |
| "loss": 1.9971, | |
| "step": 337000 | |
| }, | |
| { | |
| "epoch": 9.91, | |
| "learning_rate": 4.3616177636796196e-07, | |
| "loss": 1.9995, | |
| "step": 337500 | |
| }, | |
| { | |
| "epoch": 9.93, | |
| "learning_rate": 3.627338678885071e-07, | |
| "loss": 1.9997, | |
| "step": 338000 | |
| }, | |
| { | |
| "epoch": 9.94, | |
| "learning_rate": 2.893059594090522e-07, | |
| "loss": 1.9993, | |
| "step": 338500 | |
| }, | |
| { | |
| "epoch": 9.96, | |
| "learning_rate": 2.1587805092959734e-07, | |
| "loss": 1.9961, | |
| "step": 339000 | |
| }, | |
| { | |
| "epoch": 9.97, | |
| "learning_rate": 1.4245014245014247e-07, | |
| "loss": 1.9989, | |
| "step": 339500 | |
| }, | |
| { | |
| "epoch": 9.99, | |
| "learning_rate": 6.902223397068759e-08, | |
| "loss": 1.9942, | |
| "step": 340000 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "learning_rate": 4.541449386810212e-06, | |
| "loss": 2.0143, | |
| "step": 340500 | |
| }, | |
| { | |
| "epoch": 10.02, | |
| "learning_rate": 4.47469674273798e-06, | |
| "loss": 1.9987, | |
| "step": 341000 | |
| }, | |
| { | |
| "epoch": 10.03, | |
| "learning_rate": 4.407944098665749e-06, | |
| "loss": 2.0004, | |
| "step": 341500 | |
| }, | |
| { | |
| "epoch": 10.04, | |
| "learning_rate": 4.341191454593517e-06, | |
| "loss": 2.0011, | |
| "step": 342000 | |
| }, | |
| { | |
| "epoch": 10.06, | |
| "learning_rate": 4.2744388105212845e-06, | |
| "loss": 1.9983, | |
| "step": 342500 | |
| }, | |
| { | |
| "epoch": 10.07, | |
| "learning_rate": 4.207686166449053e-06, | |
| "loss": 1.9996, | |
| "step": 343000 | |
| }, | |
| { | |
| "epoch": 10.09, | |
| "learning_rate": 4.140933522376821e-06, | |
| "loss": 1.9999, | |
| "step": 343500 | |
| }, | |
| { | |
| "epoch": 10.1, | |
| "learning_rate": 4.07418087830459e-06, | |
| "loss": 1.9965, | |
| "step": 344000 | |
| }, | |
| { | |
| "epoch": 10.12, | |
| "learning_rate": 4.007428234232359e-06, | |
| "loss": 1.9985, | |
| "step": 344500 | |
| }, | |
| { | |
| "epoch": 10.13, | |
| "learning_rate": 3.9406755901601265e-06, | |
| "loss": 1.996, | |
| "step": 345000 | |
| }, | |
| { | |
| "epoch": 10.15, | |
| "learning_rate": 3.873922946087894e-06, | |
| "loss": 2.0013, | |
| "step": 345500 | |
| }, | |
| { | |
| "epoch": 10.16, | |
| "learning_rate": 3.8071703020156635e-06, | |
| "loss": 1.9996, | |
| "step": 346000 | |
| }, | |
| { | |
| "epoch": 10.18, | |
| "learning_rate": 3.740417657943431e-06, | |
| "loss": 1.998, | |
| "step": 346500 | |
| }, | |
| { | |
| "epoch": 10.19, | |
| "learning_rate": 3.6736650138711998e-06, | |
| "loss": 1.9969, | |
| "step": 347000 | |
| }, | |
| { | |
| "epoch": 10.21, | |
| "learning_rate": 3.6069123697989677e-06, | |
| "loss": 2.0031, | |
| "step": 347500 | |
| }, | |
| { | |
| "epoch": 10.22, | |
| "learning_rate": 3.540159725726736e-06, | |
| "loss": 1.9984, | |
| "step": 348000 | |
| }, | |
| { | |
| "epoch": 10.24, | |
| "learning_rate": 3.4734070816545047e-06, | |
| "loss": 1.9997, | |
| "step": 348500 | |
| }, | |
| { | |
| "epoch": 10.25, | |
| "learning_rate": 3.4066544375822726e-06, | |
| "loss": 2.0024, | |
| "step": 349000 | |
| }, | |
| { | |
| "epoch": 10.27, | |
| "learning_rate": 3.3399017935100413e-06, | |
| "loss": 1.9929, | |
| "step": 349500 | |
| }, | |
| { | |
| "epoch": 10.28, | |
| "learning_rate": 3.2731491494378097e-06, | |
| "loss": 1.9966, | |
| "step": 350000 | |
| }, | |
| { | |
| "epoch": 10.29, | |
| "learning_rate": 3.2063965053655776e-06, | |
| "loss": 1.9985, | |
| "step": 350500 | |
| }, | |
| { | |
| "epoch": 10.31, | |
| "learning_rate": 3.1396438612933463e-06, | |
| "loss": 1.996, | |
| "step": 351000 | |
| }, | |
| { | |
| "epoch": 10.32, | |
| "learning_rate": 3.072891217221114e-06, | |
| "loss": 2.0028, | |
| "step": 351500 | |
| }, | |
| { | |
| "epoch": 10.34, | |
| "learning_rate": 3.0061385731488825e-06, | |
| "loss": 1.9982, | |
| "step": 352000 | |
| }, | |
| { | |
| "epoch": 10.35, | |
| "learning_rate": 2.939385929076651e-06, | |
| "loss": 1.9988, | |
| "step": 352500 | |
| }, | |
| { | |
| "epoch": 10.37, | |
| "learning_rate": 2.872633285004419e-06, | |
| "loss": 2.0011, | |
| "step": 353000 | |
| }, | |
| { | |
| "epoch": 10.38, | |
| "learning_rate": 2.8058806409321875e-06, | |
| "loss": 1.9968, | |
| "step": 353500 | |
| }, | |
| { | |
| "epoch": 10.4, | |
| "learning_rate": 2.7391279968599558e-06, | |
| "loss": 1.9984, | |
| "step": 354000 | |
| }, | |
| { | |
| "epoch": 10.41, | |
| "learning_rate": 2.672375352787724e-06, | |
| "loss": 1.9961, | |
| "step": 354500 | |
| }, | |
| { | |
| "epoch": 10.43, | |
| "learning_rate": 2.6056227087154924e-06, | |
| "loss": 1.9976, | |
| "step": 355000 | |
| }, | |
| { | |
| "epoch": 10.44, | |
| "learning_rate": 2.5388700646432607e-06, | |
| "loss": 1.996, | |
| "step": 355500 | |
| }, | |
| { | |
| "epoch": 10.46, | |
| "learning_rate": 2.472117420571029e-06, | |
| "loss": 2.0002, | |
| "step": 356000 | |
| }, | |
| { | |
| "epoch": 10.47, | |
| "learning_rate": 2.405364776498797e-06, | |
| "loss": 1.9951, | |
| "step": 356500 | |
| }, | |
| { | |
| "epoch": 10.49, | |
| "learning_rate": 2.3386121324265657e-06, | |
| "loss": 1.9955, | |
| "step": 357000 | |
| }, | |
| { | |
| "epoch": 10.5, | |
| "learning_rate": 2.271859488354334e-06, | |
| "loss": 1.9992, | |
| "step": 357500 | |
| }, | |
| { | |
| "epoch": 10.51, | |
| "learning_rate": 2.205106844282102e-06, | |
| "loss": 1.9965, | |
| "step": 358000 | |
| }, | |
| { | |
| "epoch": 10.53, | |
| "learning_rate": 2.1383542002098702e-06, | |
| "loss": 1.9992, | |
| "step": 358500 | |
| }, | |
| { | |
| "epoch": 10.54, | |
| "learning_rate": 2.071601556137639e-06, | |
| "loss": 1.9951, | |
| "step": 359000 | |
| }, | |
| { | |
| "epoch": 10.56, | |
| "learning_rate": 2.004848912065407e-06, | |
| "loss": 1.9972, | |
| "step": 359500 | |
| }, | |
| { | |
| "epoch": 10.57, | |
| "learning_rate": 1.938096267993175e-06, | |
| "loss": 1.9929, | |
| "step": 360000 | |
| }, | |
| { | |
| "epoch": 10.59, | |
| "learning_rate": 1.8713436239209435e-06, | |
| "loss": 2.0001, | |
| "step": 360500 | |
| }, | |
| { | |
| "epoch": 10.6, | |
| "learning_rate": 1.804590979848712e-06, | |
| "loss": 1.9963, | |
| "step": 361000 | |
| }, | |
| { | |
| "epoch": 10.62, | |
| "learning_rate": 1.7378383357764801e-06, | |
| "loss": 1.9988, | |
| "step": 361500 | |
| }, | |
| { | |
| "epoch": 10.63, | |
| "learning_rate": 1.6710856917042484e-06, | |
| "loss": 1.9949, | |
| "step": 362000 | |
| }, | |
| { | |
| "epoch": 10.65, | |
| "learning_rate": 1.6043330476320166e-06, | |
| "loss": 1.9968, | |
| "step": 362500 | |
| }, | |
| { | |
| "epoch": 10.66, | |
| "learning_rate": 1.537580403559785e-06, | |
| "loss": 1.9943, | |
| "step": 363000 | |
| }, | |
| { | |
| "epoch": 10.68, | |
| "learning_rate": 1.4708277594875534e-06, | |
| "loss": 1.9962, | |
| "step": 363500 | |
| }, | |
| { | |
| "epoch": 10.69, | |
| "learning_rate": 1.4040751154153217e-06, | |
| "loss": 1.9959, | |
| "step": 364000 | |
| }, | |
| { | |
| "epoch": 10.71, | |
| "learning_rate": 1.33732247134309e-06, | |
| "loss": 1.9948, | |
| "step": 364500 | |
| }, | |
| { | |
| "epoch": 10.72, | |
| "learning_rate": 1.2705698272708584e-06, | |
| "loss": 1.9995, | |
| "step": 365000 | |
| }, | |
| { | |
| "epoch": 10.74, | |
| "learning_rate": 1.2038171831986265e-06, | |
| "loss": 1.9945, | |
| "step": 365500 | |
| }, | |
| { | |
| "epoch": 10.75, | |
| "learning_rate": 1.137064539126395e-06, | |
| "loss": 1.9973, | |
| "step": 366000 | |
| }, | |
| { | |
| "epoch": 10.76, | |
| "learning_rate": 1.070311895054163e-06, | |
| "loss": 1.9957, | |
| "step": 366500 | |
| }, | |
| { | |
| "epoch": 10.78, | |
| "learning_rate": 1.0035592509819314e-06, | |
| "loss": 1.9979, | |
| "step": 367000 | |
| }, | |
| { | |
| "epoch": 10.79, | |
| "learning_rate": 9.368066069096997e-07, | |
| "loss": 1.9988, | |
| "step": 367500 | |
| }, | |
| { | |
| "epoch": 10.81, | |
| "learning_rate": 8.70053962837468e-07, | |
| "loss": 1.9961, | |
| "step": 368000 | |
| }, | |
| { | |
| "epoch": 10.82, | |
| "learning_rate": 8.033013187652363e-07, | |
| "loss": 1.9977, | |
| "step": 368500 | |
| }, | |
| { | |
| "epoch": 10.84, | |
| "learning_rate": 7.365486746930046e-07, | |
| "loss": 1.9967, | |
| "step": 369000 | |
| }, | |
| { | |
| "epoch": 10.85, | |
| "learning_rate": 6.697960306207729e-07, | |
| "loss": 1.9953, | |
| "step": 369500 | |
| }, | |
| { | |
| "epoch": 10.87, | |
| "learning_rate": 6.030433865485412e-07, | |
| "loss": 1.9931, | |
| "step": 370000 | |
| }, | |
| { | |
| "epoch": 10.88, | |
| "learning_rate": 5.362907424763095e-07, | |
| "loss": 1.9968, | |
| "step": 370500 | |
| }, | |
| { | |
| "epoch": 10.9, | |
| "learning_rate": 4.695380984040778e-07, | |
| "loss": 1.9948, | |
| "step": 371000 | |
| }, | |
| { | |
| "epoch": 10.91, | |
| "learning_rate": 4.0278545433184606e-07, | |
| "loss": 1.9966, | |
| "step": 371500 | |
| }, | |
| { | |
| "epoch": 10.93, | |
| "learning_rate": 3.360328102596144e-07, | |
| "loss": 1.9981, | |
| "step": 372000 | |
| }, | |
| { | |
| "epoch": 10.94, | |
| "learning_rate": 2.692801661873827e-07, | |
| "loss": 1.9984, | |
| "step": 372500 | |
| }, | |
| { | |
| "epoch": 10.96, | |
| "learning_rate": 2.02527522115151e-07, | |
| "loss": 2.0005, | |
| "step": 373000 | |
| }, | |
| { | |
| "epoch": 10.97, | |
| "learning_rate": 1.3577487804291928e-07, | |
| "loss": 1.9957, | |
| "step": 373500 | |
| }, | |
| { | |
| "epoch": 10.98, | |
| "learning_rate": 6.902223397068759e-08, | |
| "loss": 1.9957, | |
| "step": 374000 | |
| }, | |
| { | |
| "epoch": 11.0, | |
| "learning_rate": 2.2695898984558777e-09, | |
| "loss": 1.9939, | |
| "step": 374500 | |
| }, | |
| { | |
| "epoch": 11.0, | |
| "step": 374517, | |
| "total_flos": 9.502046248868659e+17, | |
| "train_loss": 0.1815926024128583, | |
| "train_runtime": 12938.8886, | |
| "train_samples_per_second": 7409.842, | |
| "train_steps_per_second": 28.945 | |
| } | |
| ], | |
| "logging_steps": 500, | |
| "max_steps": 374517, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 11, | |
| "save_steps": 100000.0, | |
| "total_flos": 9.502046248868659e+17, | |
| "train_batch_size": 256, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |