| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 287076, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.991291504688654e-05, | |
| "loss": 5.9932, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.982583009377308e-05, | |
| "loss": 4.5752, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.973874514065962e-05, | |
| "loss": 3.9864, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9651660187546154e-05, | |
| "loss": 3.7187, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.95645752344327e-05, | |
| "loss": 3.5442, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.947749028131923e-05, | |
| "loss": 3.378, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.939040532820577e-05, | |
| "loss": 3.2869, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.930332037509231e-05, | |
| "loss": 3.2232, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.921623542197885e-05, | |
| "loss": 3.1339, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.912915046886539e-05, | |
| "loss": 3.1246, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.904206551575193e-05, | |
| "loss": 3.0439, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.895498056263847e-05, | |
| "loss": 2.9875, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.8867895609525004e-05, | |
| "loss": 2.9797, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.878081065641154e-05, | |
| "loss": 2.9155, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.869372570329808e-05, | |
| "loss": 2.9334, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.860664075018462e-05, | |
| "loss": 2.8763, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.851955579707116e-05, | |
| "loss": 2.8501, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.84324708439577e-05, | |
| "loss": 2.825, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.834538589084424e-05, | |
| "loss": 2.8267, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.8258300937730774e-05, | |
| "loss": 2.7971, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.817121598461732e-05, | |
| "loss": 2.7697, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.808413103150385e-05, | |
| "loss": 2.7516, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.799704607839039e-05, | |
| "loss": 2.721, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.790996112527693e-05, | |
| "loss": 2.6979, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.782287617216347e-05, | |
| "loss": 2.6872, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.7735791219050005e-05, | |
| "loss": 2.6909, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.764870626593655e-05, | |
| "loss": 2.664, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.7561621312823084e-05, | |
| "loss": 2.6515, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.747453635970962e-05, | |
| "loss": 2.655, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.738745140659617e-05, | |
| "loss": 2.6327, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.73003664534827e-05, | |
| "loss": 2.6356, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.721328150036924e-05, | |
| "loss": 2.6308, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.712619654725578e-05, | |
| "loss": 2.6302, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.703911159414232e-05, | |
| "loss": 2.6208, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.6952026641028854e-05, | |
| "loss": 2.6159, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.68649416879154e-05, | |
| "loss": 2.5992, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.677785673480193e-05, | |
| "loss": 2.5901, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.669077178168847e-05, | |
| "loss": 2.5911, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.660368682857502e-05, | |
| "loss": 2.5278, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.651660187546155e-05, | |
| "loss": 2.5569, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.642951692234809e-05, | |
| "loss": 2.5522, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.634243196923463e-05, | |
| "loss": 2.5293, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.625534701612117e-05, | |
| "loss": 2.5006, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.6168262063007704e-05, | |
| "loss": 2.5361, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.608117710989425e-05, | |
| "loss": 2.5116, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.599409215678078e-05, | |
| "loss": 2.4744, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.590700720366732e-05, | |
| "loss": 2.5232, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.581992225055386e-05, | |
| "loss": 2.4808, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.57328372974404e-05, | |
| "loss": 2.4899, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.564575234432694e-05, | |
| "loss": 2.4656, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.555866739121348e-05, | |
| "loss": 2.4816, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.547158243810002e-05, | |
| "loss": 2.4736, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.538449748498655e-05, | |
| "loss": 2.4767, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.529741253187309e-05, | |
| "loss": 2.4841, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.521032757875963e-05, | |
| "loss": 2.4883, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.512324262564617e-05, | |
| "loss": 2.4851, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.503615767253271e-05, | |
| "loss": 2.4384, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.494907271941925e-05, | |
| "loss": 2.4828, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.486198776630579e-05, | |
| "loss": 2.4509, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.4774902813192324e-05, | |
| "loss": 2.4772, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.468781786007887e-05, | |
| "loss": 2.4236, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.46007329069654e-05, | |
| "loss": 2.4359, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 4.451364795385194e-05, | |
| "loss": 2.4348, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 4.442656300073848e-05, | |
| "loss": 2.4512, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.433947804762502e-05, | |
| "loss": 2.4132, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.425239309451156e-05, | |
| "loss": 2.4363, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.41653081413981e-05, | |
| "loss": 2.4347, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.407822318828464e-05, | |
| "loss": 2.3936, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.399113823517117e-05, | |
| "loss": 2.4141, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.390405328205772e-05, | |
| "loss": 2.4454, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.381696832894425e-05, | |
| "loss": 2.398, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 4.372988337583079e-05, | |
| "loss": 2.4122, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 4.364279842271733e-05, | |
| "loss": 2.4089, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 4.355571346960387e-05, | |
| "loss": 2.3848, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 4.3468628516490404e-05, | |
| "loss": 2.3909, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.338154356337695e-05, | |
| "loss": 2.4077, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.329445861026349e-05, | |
| "loss": 2.3876, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 4.320737365715002e-05, | |
| "loss": 2.3946, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 4.312028870403656e-05, | |
| "loss": 2.3676, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.30332037509231e-05, | |
| "loss": 2.3862, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.294611879780964e-05, | |
| "loss": 2.3724, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 4.285903384469618e-05, | |
| "loss": 2.3735, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 4.277194889158272e-05, | |
| "loss": 2.3679, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.268486393846925e-05, | |
| "loss": 2.3536, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.259777898535579e-05, | |
| "loss": 2.3705, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 4.251069403224234e-05, | |
| "loss": 2.3386, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 4.242360907912887e-05, | |
| "loss": 2.3387, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 4.233652412601541e-05, | |
| "loss": 2.3283, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 4.224943917290195e-05, | |
| "loss": 2.3456, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 4.216235421978849e-05, | |
| "loss": 2.3318, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 4.2075269266675024e-05, | |
| "loss": 2.3478, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 4.198818431356157e-05, | |
| "loss": 2.3435, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 4.19010993604481e-05, | |
| "loss": 2.3509, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 4.181401440733464e-05, | |
| "loss": 2.3666, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 4.172692945422119e-05, | |
| "loss": 2.3244, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 4.163984450110772e-05, | |
| "loss": 2.3507, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.155275954799426e-05, | |
| "loss": 2.2845, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.14656745948808e-05, | |
| "loss": 2.3202, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.137858964176734e-05, | |
| "loss": 2.3355, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.129150468865387e-05, | |
| "loss": 2.31, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.120441973554042e-05, | |
| "loss": 2.2929, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.111733478242695e-05, | |
| "loss": 2.2909, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.103024982931349e-05, | |
| "loss": 2.2868, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.094316487620004e-05, | |
| "loss": 2.2951, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.085607992308657e-05, | |
| "loss": 2.2917, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.076899496997311e-05, | |
| "loss": 2.3179, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 4.068191001685965e-05, | |
| "loss": 2.2887, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 4.059482506374619e-05, | |
| "loss": 2.2864, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.050774011063272e-05, | |
| "loss": 2.2646, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.042065515751927e-05, | |
| "loss": 2.3021, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.03335702044058e-05, | |
| "loss": 2.2817, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.024648525129234e-05, | |
| "loss": 2.2674, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.015940029817888e-05, | |
| "loss": 2.296, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 4.007231534506542e-05, | |
| "loss": 2.2639, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 3.998523039195196e-05, | |
| "loss": 2.2967, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 3.98981454388385e-05, | |
| "loss": 2.245, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 3.981106048572504e-05, | |
| "loss": 2.2482, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 3.972397553261157e-05, | |
| "loss": 2.2715, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 3.963689057949811e-05, | |
| "loss": 2.251, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 3.954980562638465e-05, | |
| "loss": 2.2595, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 3.946272067327119e-05, | |
| "loss": 2.2864, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 3.937563572015773e-05, | |
| "loss": 2.2546, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 3.928855076704427e-05, | |
| "loss": 2.2461, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 3.920146581393081e-05, | |
| "loss": 2.2758, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 3.911438086081734e-05, | |
| "loss": 2.2608, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 3.902729590770389e-05, | |
| "loss": 2.2611, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 3.894021095459042e-05, | |
| "loss": 2.252, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 3.885312600147696e-05, | |
| "loss": 2.2721, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 3.87660410483635e-05, | |
| "loss": 2.2638, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 3.867895609525004e-05, | |
| "loss": 2.2493, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 3.859187114213657e-05, | |
| "loss": 2.2404, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 3.850478618902312e-05, | |
| "loss": 2.2588, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 3.841770123590966e-05, | |
| "loss": 2.2567, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 3.833061628279619e-05, | |
| "loss": 2.2333, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 3.824353132968274e-05, | |
| "loss": 2.2266, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 3.815644637656927e-05, | |
| "loss": 2.2122, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.806936142345581e-05, | |
| "loss": 2.2624, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.798227647034235e-05, | |
| "loss": 2.1801, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.789519151722889e-05, | |
| "loss": 2.2352, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.780810656411542e-05, | |
| "loss": 2.2211, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.772102161100197e-05, | |
| "loss": 2.2461, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.763393665788851e-05, | |
| "loss": 2.2082, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.754685170477504e-05, | |
| "loss": 2.2128, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.745976675166159e-05, | |
| "loss": 2.2142, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 3.737268179854812e-05, | |
| "loss": 2.2365, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 3.728559684543466e-05, | |
| "loss": 2.2143, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 3.71985118923212e-05, | |
| "loss": 2.2392, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 3.711142693920774e-05, | |
| "loss": 2.2237, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 3.702434198609427e-05, | |
| "loss": 2.1729, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 3.693725703298082e-05, | |
| "loss": 2.2192, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 3.685017207986736e-05, | |
| "loss": 2.2138, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 3.676308712675389e-05, | |
| "loss": 2.2359, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 3.667600217364043e-05, | |
| "loss": 2.1912, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 3.658891722052697e-05, | |
| "loss": 2.2056, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 3.650183226741351e-05, | |
| "loss": 2.1702, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.641474731430004e-05, | |
| "loss": 2.1527, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.632766236118659e-05, | |
| "loss": 2.1872, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.624057740807312e-05, | |
| "loss": 2.1901, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.615349245495966e-05, | |
| "loss": 2.1773, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.60664075018462e-05, | |
| "loss": 2.1708, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.597932254873274e-05, | |
| "loss": 2.2007, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 3.589223759561928e-05, | |
| "loss": 2.1826, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 3.580515264250582e-05, | |
| "loss": 2.1822, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 3.571806768939236e-05, | |
| "loss": 2.1832, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 3.563098273627889e-05, | |
| "loss": 2.1715, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 3.554389778316544e-05, | |
| "loss": 2.1889, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 3.545681283005197e-05, | |
| "loss": 2.1901, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 3.536972787693851e-05, | |
| "loss": 2.1805, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 3.528264292382505e-05, | |
| "loss": 2.1482, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 3.519555797071159e-05, | |
| "loss": 2.189, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 3.510847301759813e-05, | |
| "loss": 2.1871, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 3.502138806448467e-05, | |
| "loss": 2.1471, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 3.493430311137121e-05, | |
| "loss": 2.1751, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 3.484721815825774e-05, | |
| "loss": 2.1785, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 3.476013320514429e-05, | |
| "loss": 2.1597, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.467304825203082e-05, | |
| "loss": 2.1878, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.458596329891736e-05, | |
| "loss": 2.168, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 3.44988783458039e-05, | |
| "loss": 2.1748, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 3.441179339269044e-05, | |
| "loss": 2.2079, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 3.432470843957698e-05, | |
| "loss": 2.1503, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.423762348646352e-05, | |
| "loss": 2.1362, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.415053853335006e-05, | |
| "loss": 2.1269, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 3.406345358023659e-05, | |
| "loss": 2.1609, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 3.397636862712313e-05, | |
| "loss": 2.1505, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 3.388928367400967e-05, | |
| "loss": 2.1131, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 3.380219872089621e-05, | |
| "loss": 2.1455, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 3.371511376778275e-05, | |
| "loss": 2.1455, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 3.362802881466929e-05, | |
| "loss": 2.1504, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 3.354094386155583e-05, | |
| "loss": 2.1506, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 3.345385890844236e-05, | |
| "loss": 2.1657, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.336677395532891e-05, | |
| "loss": 2.1465, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.327968900221544e-05, | |
| "loss": 2.1235, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.319260404910198e-05, | |
| "loss": 2.101, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.310551909598852e-05, | |
| "loss": 2.1058, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.301843414287506e-05, | |
| "loss": 2.0882, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.293134918976159e-05, | |
| "loss": 2.0591, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 3.284426423664814e-05, | |
| "loss": 2.123, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 3.275717928353468e-05, | |
| "loss": 2.07, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 3.267009433042121e-05, | |
| "loss": 2.1077, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 3.258300937730776e-05, | |
| "loss": 2.0761, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 3.249592442419429e-05, | |
| "loss": 2.0634, | |
| "step": 100500 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 3.240883947108083e-05, | |
| "loss": 2.1005, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 3.232175451796737e-05, | |
| "loss": 2.108, | |
| "step": 101500 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 3.223466956485391e-05, | |
| "loss": 2.0803, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 3.214758461174044e-05, | |
| "loss": 2.1421, | |
| "step": 102500 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 3.206049965862699e-05, | |
| "loss": 2.1066, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 3.197341470551352e-05, | |
| "loss": 2.0903, | |
| "step": 103500 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 3.188632975240006e-05, | |
| "loss": 2.0872, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 3.179924479928661e-05, | |
| "loss": 2.1146, | |
| "step": 104500 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 3.171215984617314e-05, | |
| "loss": 2.1223, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 3.162507489305968e-05, | |
| "loss": 2.1103, | |
| "step": 105500 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 3.153798993994622e-05, | |
| "loss": 2.0907, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 3.145090498683276e-05, | |
| "loss": 2.0805, | |
| "step": 106500 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 3.136382003371929e-05, | |
| "loss": 2.0863, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 3.127673508060584e-05, | |
| "loss": 2.0719, | |
| "step": 107500 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 3.118965012749237e-05, | |
| "loss": 2.0808, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 3.110256517437891e-05, | |
| "loss": 2.0602, | |
| "step": 108500 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 3.101548022126545e-05, | |
| "loss": 2.0837, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 3.092839526815199e-05, | |
| "loss": 2.0494, | |
| "step": 109500 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 3.084131031503853e-05, | |
| "loss": 2.0456, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 3.075422536192507e-05, | |
| "loss": 2.0486, | |
| "step": 110500 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 3.066714040881161e-05, | |
| "loss": 2.0681, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 3.058005545569814e-05, | |
| "loss": 2.071, | |
| "step": 111500 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 3.0492970502584684e-05, | |
| "loss": 2.0743, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 3.040588554947122e-05, | |
| "loss": 2.0461, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 3.031880059635776e-05, | |
| "loss": 2.0534, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 3.0231715643244303e-05, | |
| "loss": 2.0593, | |
| "step": 113500 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 3.014463069013084e-05, | |
| "loss": 2.0678, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 3.005754573701738e-05, | |
| "loss": 2.0111, | |
| "step": 114500 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 2.9970460783903915e-05, | |
| "loss": 2.0568, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 2.9883375830790454e-05, | |
| "loss": 2.0486, | |
| "step": 115500 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 2.979629087767699e-05, | |
| "loss": 2.07, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 2.9709205924563534e-05, | |
| "loss": 2.0438, | |
| "step": 116500 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 2.9622120971450066e-05, | |
| "loss": 2.063, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 2.953503601833661e-05, | |
| "loss": 2.0837, | |
| "step": 117500 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 2.944795106522315e-05, | |
| "loss": 2.0608, | |
| "step": 118000 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 2.9360866112109685e-05, | |
| "loss": 2.0656, | |
| "step": 118500 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 2.9273781158996228e-05, | |
| "loss": 2.0592, | |
| "step": 119000 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 2.9186696205882764e-05, | |
| "loss": 2.0735, | |
| "step": 119500 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 2.9099611252769304e-05, | |
| "loss": 2.0314, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 2.901252629965584e-05, | |
| "loss": 2.0442, | |
| "step": 120500 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 2.892544134654238e-05, | |
| "loss": 2.0758, | |
| "step": 121000 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 2.8838356393428916e-05, | |
| "loss": 2.0281, | |
| "step": 121500 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 2.875127144031546e-05, | |
| "loss": 2.0676, | |
| "step": 122000 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 2.8664186487202e-05, | |
| "loss": 2.0562, | |
| "step": 122500 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 2.8577101534088535e-05, | |
| "loss": 2.0334, | |
| "step": 123000 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 2.8490016580975078e-05, | |
| "loss": 2.04, | |
| "step": 123500 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 2.840293162786161e-05, | |
| "loss": 2.0521, | |
| "step": 124000 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 2.8315846674748153e-05, | |
| "loss": 2.0218, | |
| "step": 124500 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 2.822876172163469e-05, | |
| "loss": 2.0267, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 2.814167676852123e-05, | |
| "loss": 2.0198, | |
| "step": 125500 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 2.8054591815407765e-05, | |
| "loss": 2.0346, | |
| "step": 126000 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 2.796750686229431e-05, | |
| "loss": 2.0275, | |
| "step": 126500 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 2.788042190918084e-05, | |
| "loss": 2.0263, | |
| "step": 127000 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 2.7793336956067384e-05, | |
| "loss": 2.0065, | |
| "step": 127500 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 2.7706252002953924e-05, | |
| "loss": 2.0181, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 2.761916704984046e-05, | |
| "loss": 2.0094, | |
| "step": 128500 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 2.7532082096727003e-05, | |
| "loss": 2.0347, | |
| "step": 129000 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 2.744499714361354e-05, | |
| "loss": 2.0015, | |
| "step": 129500 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 2.735791219050008e-05, | |
| "loss": 1.9911, | |
| "step": 130000 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 2.7270827237386615e-05, | |
| "loss": 2.0332, | |
| "step": 130500 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 2.7183742284273155e-05, | |
| "loss": 2.0351, | |
| "step": 131000 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 2.709665733115969e-05, | |
| "loss": 2.0185, | |
| "step": 131500 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 2.7009572378046234e-05, | |
| "loss": 2.0057, | |
| "step": 132000 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 2.6922487424932773e-05, | |
| "loss": 2.0095, | |
| "step": 132500 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 2.683540247181931e-05, | |
| "loss": 2.0363, | |
| "step": 133000 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 2.6748317518705852e-05, | |
| "loss": 2.0017, | |
| "step": 133500 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 2.6661232565592385e-05, | |
| "loss": 2.0176, | |
| "step": 134000 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 2.6574147612478928e-05, | |
| "loss": 1.9988, | |
| "step": 134500 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 2.6487062659365464e-05, | |
| "loss": 2.0006, | |
| "step": 135000 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 2.6399977706252004e-05, | |
| "loss": 1.9966, | |
| "step": 135500 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 2.631289275313854e-05, | |
| "loss": 1.9746, | |
| "step": 136000 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 2.6225807800025083e-05, | |
| "loss": 2.0186, | |
| "step": 136500 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 2.6138722846911623e-05, | |
| "loss": 1.9999, | |
| "step": 137000 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 2.605163789379816e-05, | |
| "loss": 2.0056, | |
| "step": 137500 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 2.59645529406847e-05, | |
| "loss": 1.9837, | |
| "step": 138000 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 2.5877467987571235e-05, | |
| "loss": 1.9683, | |
| "step": 138500 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 2.5790383034457778e-05, | |
| "loss": 2.0063, | |
| "step": 139000 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 2.5703298081344314e-05, | |
| "loss": 1.9752, | |
| "step": 139500 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 2.5616213128230854e-05, | |
| "loss": 1.977, | |
| "step": 140000 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 2.552912817511739e-05, | |
| "loss": 1.9953, | |
| "step": 140500 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 2.544204322200393e-05, | |
| "loss": 1.9789, | |
| "step": 141000 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 2.5354958268890472e-05, | |
| "loss": 2.0108, | |
| "step": 141500 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 2.526787331577701e-05, | |
| "loss": 1.9867, | |
| "step": 142000 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 2.5180788362663548e-05, | |
| "loss": 1.993, | |
| "step": 142500 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 2.5093703409550084e-05, | |
| "loss": 1.9721, | |
| "step": 143000 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 2.5006618456436627e-05, | |
| "loss": 1.9846, | |
| "step": 143500 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 2.4919533503323164e-05, | |
| "loss": 1.9779, | |
| "step": 144000 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 2.4832448550209703e-05, | |
| "loss": 1.9963, | |
| "step": 144500 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 2.4745363597096243e-05, | |
| "loss": 1.9521, | |
| "step": 145000 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 2.465827864398278e-05, | |
| "loss": 1.9644, | |
| "step": 145500 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 2.457119369086932e-05, | |
| "loss": 1.97, | |
| "step": 146000 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 2.4484108737755858e-05, | |
| "loss": 1.9816, | |
| "step": 146500 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 2.4397023784642394e-05, | |
| "loss": 1.9872, | |
| "step": 147000 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 2.4309938831528934e-05, | |
| "loss": 1.9639, | |
| "step": 147500 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 2.4222853878415473e-05, | |
| "loss": 1.9679, | |
| "step": 148000 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 2.4135768925302013e-05, | |
| "loss": 1.966, | |
| "step": 148500 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 2.4048683972188553e-05, | |
| "loss": 1.9513, | |
| "step": 149000 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 2.396159901907509e-05, | |
| "loss": 1.9739, | |
| "step": 149500 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 2.387451406596163e-05, | |
| "loss": 1.9763, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 2.3787429112848168e-05, | |
| "loss": 1.943, | |
| "step": 150500 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 2.3700344159734704e-05, | |
| "loss": 1.9663, | |
| "step": 151000 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 2.3613259206621244e-05, | |
| "loss": 1.9904, | |
| "step": 151500 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 2.3526174253507783e-05, | |
| "loss": 1.9495, | |
| "step": 152000 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 2.343908930039432e-05, | |
| "loss": 1.9218, | |
| "step": 152500 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 2.335200434728086e-05, | |
| "loss": 1.9479, | |
| "step": 153000 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 2.32649193941674e-05, | |
| "loss": 1.9625, | |
| "step": 153500 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 2.317783444105394e-05, | |
| "loss": 1.9426, | |
| "step": 154000 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 2.3090749487940478e-05, | |
| "loss": 1.94, | |
| "step": 154500 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 2.3003664534827014e-05, | |
| "loss": 1.9457, | |
| "step": 155000 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 2.2916579581713554e-05, | |
| "loss": 1.9589, | |
| "step": 155500 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 2.2829494628600093e-05, | |
| "loss": 1.9571, | |
| "step": 156000 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 2.274240967548663e-05, | |
| "loss": 1.9398, | |
| "step": 156500 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 2.265532472237317e-05, | |
| "loss": 1.9584, | |
| "step": 157000 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 2.256823976925971e-05, | |
| "loss": 1.9351, | |
| "step": 157500 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 2.2481154816146248e-05, | |
| "loss": 1.9612, | |
| "step": 158000 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 2.2394069863032788e-05, | |
| "loss": 1.9398, | |
| "step": 158500 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 2.2306984909919327e-05, | |
| "loss": 1.9425, | |
| "step": 159000 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 2.2219899956805864e-05, | |
| "loss": 1.9639, | |
| "step": 159500 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 2.2132815003692403e-05, | |
| "loss": 1.9281, | |
| "step": 160000 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 2.2045730050578943e-05, | |
| "loss": 1.9447, | |
| "step": 160500 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 2.195864509746548e-05, | |
| "loss": 1.9255, | |
| "step": 161000 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 2.187156014435202e-05, | |
| "loss": 1.9457, | |
| "step": 161500 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 2.1784475191238558e-05, | |
| "loss": 1.945, | |
| "step": 162000 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 2.1697390238125098e-05, | |
| "loss": 1.9559, | |
| "step": 162500 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 2.1610305285011637e-05, | |
| "loss": 1.9303, | |
| "step": 163000 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 2.1523220331898174e-05, | |
| "loss": 1.9174, | |
| "step": 163500 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 2.1436135378784713e-05, | |
| "loss": 1.9374, | |
| "step": 164000 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 2.1349050425671253e-05, | |
| "loss": 1.941, | |
| "step": 164500 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 2.126196547255779e-05, | |
| "loss": 1.9235, | |
| "step": 165000 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 2.117488051944433e-05, | |
| "loss": 1.9325, | |
| "step": 165500 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 2.1087795566330868e-05, | |
| "loss": 1.9497, | |
| "step": 166000 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 2.1000710613217404e-05, | |
| "loss": 1.9277, | |
| "step": 166500 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 2.0913625660103944e-05, | |
| "loss": 1.9326, | |
| "step": 167000 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 2.0826540706990487e-05, | |
| "loss": 1.9026, | |
| "step": 167500 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 2.0739455753877023e-05, | |
| "loss": 1.9175, | |
| "step": 168000 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 2.0652370800763563e-05, | |
| "loss": 1.9002, | |
| "step": 168500 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 2.0565285847650102e-05, | |
| "loss": 1.9534, | |
| "step": 169000 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 2.047820089453664e-05, | |
| "loss": 1.9303, | |
| "step": 169500 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 2.0391115941423178e-05, | |
| "loss": 1.9324, | |
| "step": 170000 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 2.0304030988309718e-05, | |
| "loss": 1.9322, | |
| "step": 170500 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 2.0216946035196254e-05, | |
| "loss": 1.9185, | |
| "step": 171000 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 2.0129861082082793e-05, | |
| "loss": 1.9078, | |
| "step": 171500 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 2.0042776128969333e-05, | |
| "loss": 1.8907, | |
| "step": 172000 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 1.9955691175855873e-05, | |
| "loss": 1.8814, | |
| "step": 172500 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 1.9868606222742412e-05, | |
| "loss": 1.9388, | |
| "step": 173000 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 1.978152126962895e-05, | |
| "loss": 1.898, | |
| "step": 173500 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 1.9694436316515488e-05, | |
| "loss": 1.9195, | |
| "step": 174000 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 1.9607351363402028e-05, | |
| "loss": 1.911, | |
| "step": 174500 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 1.9520266410288564e-05, | |
| "loss": 1.8908, | |
| "step": 175000 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 1.9433181457175103e-05, | |
| "loss": 1.8934, | |
| "step": 175500 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 1.9346096504061643e-05, | |
| "loss": 1.8823, | |
| "step": 176000 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 1.925901155094818e-05, | |
| "loss": 1.8952, | |
| "step": 176500 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 1.9171926597834722e-05, | |
| "loss": 1.917, | |
| "step": 177000 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 1.9084841644721262e-05, | |
| "loss": 1.9191, | |
| "step": 177500 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 1.8997756691607798e-05, | |
| "loss": 1.8784, | |
| "step": 178000 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 1.8910671738494338e-05, | |
| "loss": 1.8598, | |
| "step": 178500 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 1.8823586785380877e-05, | |
| "loss": 1.9008, | |
| "step": 179000 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 1.8736501832267413e-05, | |
| "loss": 1.9135, | |
| "step": 179500 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 1.8649416879153953e-05, | |
| "loss": 1.8975, | |
| "step": 180000 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.8562331926040493e-05, | |
| "loss": 1.8977, | |
| "step": 180500 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.847524697292703e-05, | |
| "loss": 1.9034, | |
| "step": 181000 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 1.8388162019813572e-05, | |
| "loss": 1.8742, | |
| "step": 181500 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 1.8301077066700108e-05, | |
| "loss": 1.9012, | |
| "step": 182000 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 1.8213992113586647e-05, | |
| "loss": 1.8909, | |
| "step": 182500 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 1.8126907160473187e-05, | |
| "loss": 1.8979, | |
| "step": 183000 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 1.8039822207359723e-05, | |
| "loss": 1.8946, | |
| "step": 183500 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 1.7952737254246263e-05, | |
| "loss": 1.9101, | |
| "step": 184000 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.7865652301132802e-05, | |
| "loss": 1.8848, | |
| "step": 184500 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.777856734801934e-05, | |
| "loss": 1.8875, | |
| "step": 185000 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 1.7691482394905878e-05, | |
| "loss": 1.8915, | |
| "step": 185500 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 1.760439744179242e-05, | |
| "loss": 1.9019, | |
| "step": 186000 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 1.7517312488678957e-05, | |
| "loss": 1.8698, | |
| "step": 186500 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 1.7430227535565497e-05, | |
| "loss": 1.887, | |
| "step": 187000 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.7343142582452037e-05, | |
| "loss": 1.8691, | |
| "step": 187500 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.7256057629338573e-05, | |
| "loss": 1.8669, | |
| "step": 188000 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 1.7168972676225112e-05, | |
| "loss": 1.9039, | |
| "step": 188500 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 1.7081887723111652e-05, | |
| "loss": 1.893, | |
| "step": 189000 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 1.6994802769998188e-05, | |
| "loss": 1.8628, | |
| "step": 189500 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 1.6907717816884728e-05, | |
| "loss": 1.8672, | |
| "step": 190000 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 1.6820632863771267e-05, | |
| "loss": 1.8714, | |
| "step": 190500 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 1.6733547910657807e-05, | |
| "loss": 1.8499, | |
| "step": 191000 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 1.6646462957544347e-05, | |
| "loss": 1.8405, | |
| "step": 191500 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 1.6559378004430883e-05, | |
| "loss": 1.8392, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 1.6472293051317422e-05, | |
| "loss": 1.8528, | |
| "step": 192500 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 1.6385208098203962e-05, | |
| "loss": 1.8385, | |
| "step": 193000 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 1.6298123145090498e-05, | |
| "loss": 1.8326, | |
| "step": 193500 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 1.6211038191977038e-05, | |
| "loss": 1.8424, | |
| "step": 194000 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 1.6123953238863577e-05, | |
| "loss": 1.8147, | |
| "step": 194500 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 1.6036868285750114e-05, | |
| "loss": 1.8671, | |
| "step": 195000 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 1.5949783332636656e-05, | |
| "loss": 1.823, | |
| "step": 195500 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 1.5862698379523193e-05, | |
| "loss": 1.8271, | |
| "step": 196000 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 1.5775613426409732e-05, | |
| "loss": 1.8385, | |
| "step": 196500 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 1.5688528473296272e-05, | |
| "loss": 1.8401, | |
| "step": 197000 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 1.5601443520182808e-05, | |
| "loss": 1.8166, | |
| "step": 197500 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 1.5514358567069348e-05, | |
| "loss": 1.8358, | |
| "step": 198000 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 1.5427273613955887e-05, | |
| "loss": 1.8174, | |
| "step": 198500 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 1.5340188660842423e-05, | |
| "loss": 1.8661, | |
| "step": 199000 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 1.5253103707728963e-05, | |
| "loss": 1.8418, | |
| "step": 199500 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 1.5166018754615503e-05, | |
| "loss": 1.8458, | |
| "step": 200000 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 1.5078933801502044e-05, | |
| "loss": 1.8425, | |
| "step": 200500 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 1.4991848848388582e-05, | |
| "loss": 1.8042, | |
| "step": 201000 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 1.490476389527512e-05, | |
| "loss": 1.811, | |
| "step": 201500 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 1.481767894216166e-05, | |
| "loss": 1.827, | |
| "step": 202000 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 1.4730593989048197e-05, | |
| "loss": 1.8273, | |
| "step": 202500 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 1.4643509035934735e-05, | |
| "loss": 1.8124, | |
| "step": 203000 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 1.4556424082821275e-05, | |
| "loss": 1.8035, | |
| "step": 203500 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 1.4469339129707813e-05, | |
| "loss": 1.8408, | |
| "step": 204000 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 1.438225417659435e-05, | |
| "loss": 1.7842, | |
| "step": 204500 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 1.4295169223480892e-05, | |
| "loss": 1.8058, | |
| "step": 205000 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 1.4208084270367431e-05, | |
| "loss": 1.8098, | |
| "step": 205500 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 1.412099931725397e-05, | |
| "loss": 1.8061, | |
| "step": 206000 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.4033914364140507e-05, | |
| "loss": 1.8077, | |
| "step": 206500 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.3946829411027047e-05, | |
| "loss": 1.8174, | |
| "step": 207000 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 1.3859744457913585e-05, | |
| "loss": 1.7933, | |
| "step": 207500 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 1.3772659504800122e-05, | |
| "loss": 1.7864, | |
| "step": 208000 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 1.3685574551686662e-05, | |
| "loss": 1.8072, | |
| "step": 208500 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 1.35984895985732e-05, | |
| "loss": 1.8226, | |
| "step": 209000 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 1.3511404645459738e-05, | |
| "loss": 1.8134, | |
| "step": 209500 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 1.342431969234628e-05, | |
| "loss": 1.8193, | |
| "step": 210000 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 1.3337234739232817e-05, | |
| "loss": 1.8274, | |
| "step": 210500 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 1.3250149786119357e-05, | |
| "loss": 1.8083, | |
| "step": 211000 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 1.3163064833005895e-05, | |
| "loss": 1.8299, | |
| "step": 211500 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 1.3075979879892432e-05, | |
| "loss": 1.7945, | |
| "step": 212000 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 1.2988894926778972e-05, | |
| "loss": 1.8197, | |
| "step": 212500 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 1.290180997366551e-05, | |
| "loss": 1.801, | |
| "step": 213000 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 1.2814725020552048e-05, | |
| "loss": 1.7869, | |
| "step": 213500 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 1.2727640067438587e-05, | |
| "loss": 1.787, | |
| "step": 214000 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 1.2640555114325129e-05, | |
| "loss": 1.795, | |
| "step": 214500 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 1.2553470161211667e-05, | |
| "loss": 1.8039, | |
| "step": 215000 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 1.2466385208098204e-05, | |
| "loss": 1.7862, | |
| "step": 215500 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 1.2379300254984744e-05, | |
| "loss": 1.7948, | |
| "step": 216000 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 1.2292215301871282e-05, | |
| "loss": 1.7791, | |
| "step": 216500 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 1.220513034875782e-05, | |
| "loss": 1.7978, | |
| "step": 217000 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 1.211804539564436e-05, | |
| "loss": 1.8121, | |
| "step": 217500 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 1.2030960442530899e-05, | |
| "loss": 1.8106, | |
| "step": 218000 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 1.1943875489417437e-05, | |
| "loss": 1.812, | |
| "step": 218500 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 1.1856790536303977e-05, | |
| "loss": 1.7548, | |
| "step": 219000 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 1.1769705583190514e-05, | |
| "loss": 1.7806, | |
| "step": 219500 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 1.1682620630077052e-05, | |
| "loss": 1.7942, | |
| "step": 220000 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 1.1595535676963592e-05, | |
| "loss": 1.8271, | |
| "step": 220500 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 1.1508450723850131e-05, | |
| "loss": 1.765, | |
| "step": 221000 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 1.142136577073667e-05, | |
| "loss": 1.8056, | |
| "step": 221500 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 1.1334280817623207e-05, | |
| "loss": 1.7853, | |
| "step": 222000 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 1.1247195864509749e-05, | |
| "loss": 1.7754, | |
| "step": 222500 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 1.1160110911396286e-05, | |
| "loss": 1.7931, | |
| "step": 223000 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 1.1073025958282824e-05, | |
| "loss": 1.7868, | |
| "step": 223500 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 1.0985941005169364e-05, | |
| "loss": 1.7744, | |
| "step": 224000 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 1.0898856052055902e-05, | |
| "loss": 1.7919, | |
| "step": 224500 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 1.0811771098942441e-05, | |
| "loss": 1.7822, | |
| "step": 225000 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 1.072468614582898e-05, | |
| "loss": 1.7738, | |
| "step": 225500 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 1.0637601192715519e-05, | |
| "loss": 1.779, | |
| "step": 226000 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 1.0550516239602057e-05, | |
| "loss": 1.7856, | |
| "step": 226500 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 1.0463431286488595e-05, | |
| "loss": 1.8012, | |
| "step": 227000 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 1.0376346333375134e-05, | |
| "loss": 1.7557, | |
| "step": 227500 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 1.0289261380261674e-05, | |
| "loss": 1.7674, | |
| "step": 228000 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 1.0202176427148212e-05, | |
| "loss": 1.7739, | |
| "step": 228500 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 1.011509147403475e-05, | |
| "loss": 1.7393, | |
| "step": 229000 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 1.002800652092129e-05, | |
| "loss": 1.7869, | |
| "step": 229500 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 9.940921567807829e-06, | |
| "loss": 1.7764, | |
| "step": 230000 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 9.853836614694367e-06, | |
| "loss": 1.7298, | |
| "step": 230500 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 9.766751661580906e-06, | |
| "loss": 1.7476, | |
| "step": 231000 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 9.679666708467444e-06, | |
| "loss": 1.7576, | |
| "step": 231500 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 9.592581755353984e-06, | |
| "loss": 1.7482, | |
| "step": 232000 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 9.505496802240522e-06, | |
| "loss": 1.7533, | |
| "step": 232500 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 9.418411849127061e-06, | |
| "loss": 1.7642, | |
| "step": 233000 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 9.3313268960136e-06, | |
| "loss": 1.7546, | |
| "step": 233500 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 9.244241942900137e-06, | |
| "loss": 1.7569, | |
| "step": 234000 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 9.157156989786678e-06, | |
| "loss": 1.7453, | |
| "step": 234500 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 9.070072036673216e-06, | |
| "loss": 1.7449, | |
| "step": 235000 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 8.982987083559754e-06, | |
| "loss": 1.7682, | |
| "step": 235500 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 8.895902130446294e-06, | |
| "loss": 1.7584, | |
| "step": 236000 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 8.808817177332832e-06, | |
| "loss": 1.7557, | |
| "step": 236500 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 8.721732224219371e-06, | |
| "loss": 1.773, | |
| "step": 237000 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 8.634647271105909e-06, | |
| "loss": 1.773, | |
| "step": 237500 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 8.547562317992449e-06, | |
| "loss": 1.744, | |
| "step": 238000 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 8.460477364878987e-06, | |
| "loss": 1.7379, | |
| "step": 238500 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 8.373392411765526e-06, | |
| "loss": 1.7635, | |
| "step": 239000 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 8.286307458652066e-06, | |
| "loss": 1.7585, | |
| "step": 239500 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 8.199222505538604e-06, | |
| "loss": 1.7488, | |
| "step": 240000 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 8.112137552425142e-06, | |
| "loss": 1.7532, | |
| "step": 240500 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 8.025052599311681e-06, | |
| "loss": 1.758, | |
| "step": 241000 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 7.93796764619822e-06, | |
| "loss": 1.7485, | |
| "step": 241500 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 7.850882693084759e-06, | |
| "loss": 1.7409, | |
| "step": 242000 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 7.763797739971297e-06, | |
| "loss": 1.7494, | |
| "step": 242500 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 7.676712786857836e-06, | |
| "loss": 1.7264, | |
| "step": 243000 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 7.589627833744374e-06, | |
| "loss": 1.7465, | |
| "step": 243500 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 7.502542880630914e-06, | |
| "loss": 1.7654, | |
| "step": 244000 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 7.415457927517452e-06, | |
| "loss": 1.7422, | |
| "step": 244500 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 7.328372974403991e-06, | |
| "loss": 1.7418, | |
| "step": 245000 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 7.241288021290529e-06, | |
| "loss": 1.7664, | |
| "step": 245500 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 7.154203068177068e-06, | |
| "loss": 1.7324, | |
| "step": 246000 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 7.067118115063607e-06, | |
| "loss": 1.7143, | |
| "step": 246500 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 6.980033161950146e-06, | |
| "loss": 1.6917, | |
| "step": 247000 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 6.892948208836685e-06, | |
| "loss": 1.7498, | |
| "step": 247500 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 6.805863255723223e-06, | |
| "loss": 1.7161, | |
| "step": 248000 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 6.718778302609763e-06, | |
| "loss": 1.7315, | |
| "step": 248500 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 6.631693349496301e-06, | |
| "loss": 1.7591, | |
| "step": 249000 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 6.54460839638284e-06, | |
| "loss": 1.7388, | |
| "step": 249500 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 6.4575234432693785e-06, | |
| "loss": 1.7236, | |
| "step": 250000 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 6.370438490155916e-06, | |
| "loss": 1.7396, | |
| "step": 250500 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 6.283353537042457e-06, | |
| "loss": 1.7287, | |
| "step": 251000 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 6.196268583928995e-06, | |
| "loss": 1.7231, | |
| "step": 251500 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 6.1091836308155335e-06, | |
| "loss": 1.723, | |
| "step": 252000 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 6.022098677702072e-06, | |
| "loss": 1.7419, | |
| "step": 252500 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 5.935013724588611e-06, | |
| "loss": 1.7434, | |
| "step": 253000 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 5.84792877147515e-06, | |
| "loss": 1.7221, | |
| "step": 253500 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 5.7608438183616885e-06, | |
| "loss": 1.7131, | |
| "step": 254000 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 5.673758865248227e-06, | |
| "loss": 1.7402, | |
| "step": 254500 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 5.586673912134766e-06, | |
| "loss": 1.7157, | |
| "step": 255000 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 5.499588959021305e-06, | |
| "loss": 1.7488, | |
| "step": 255500 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 5.4125040059078434e-06, | |
| "loss": 1.7135, | |
| "step": 256000 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 5.325419052794382e-06, | |
| "loss": 1.7125, | |
| "step": 256500 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 5.238334099680921e-06, | |
| "loss": 1.713, | |
| "step": 257000 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 5.15124914656746e-06, | |
| "loss": 1.7231, | |
| "step": 257500 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 5.064164193453998e-06, | |
| "loss": 1.724, | |
| "step": 258000 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 4.977079240340537e-06, | |
| "loss": 1.7196, | |
| "step": 258500 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 4.889994287227076e-06, | |
| "loss": 1.7282, | |
| "step": 259000 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 4.802909334113615e-06, | |
| "loss": 1.7245, | |
| "step": 259500 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 4.715824381000153e-06, | |
| "loss": 1.7129, | |
| "step": 260000 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 4.628739427886692e-06, | |
| "loss": 1.7114, | |
| "step": 260500 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 4.541654474773231e-06, | |
| "loss": 1.72, | |
| "step": 261000 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 4.45456952165977e-06, | |
| "loss": 1.7321, | |
| "step": 261500 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 4.367484568546309e-06, | |
| "loss": 1.7439, | |
| "step": 262000 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 4.280399615432847e-06, | |
| "loss": 1.6934, | |
| "step": 262500 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 4.193314662319386e-06, | |
| "loss": 1.7236, | |
| "step": 263000 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 4.1062297092059246e-06, | |
| "loss": 1.7333, | |
| "step": 263500 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 4.019144756092463e-06, | |
| "loss": 1.7389, | |
| "step": 264000 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 3.932059802979003e-06, | |
| "loss": 1.6828, | |
| "step": 264500 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 3.844974849865541e-06, | |
| "loss": 1.7122, | |
| "step": 265000 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 3.75788989675208e-06, | |
| "loss": 1.7356, | |
| "step": 265500 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 3.6708049436386187e-06, | |
| "loss": 1.7251, | |
| "step": 266000 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 3.583719990525157e-06, | |
| "loss": 1.7069, | |
| "step": 266500 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 3.496635037411696e-06, | |
| "loss": 1.6999, | |
| "step": 267000 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 3.4095500842982345e-06, | |
| "loss": 1.6997, | |
| "step": 267500 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 3.3224651311847737e-06, | |
| "loss": 1.6997, | |
| "step": 268000 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 3.2353801780713124e-06, | |
| "loss": 1.7099, | |
| "step": 268500 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 3.1482952249578516e-06, | |
| "loss": 1.7147, | |
| "step": 269000 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 3.06121027184439e-06, | |
| "loss": 1.7139, | |
| "step": 269500 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 2.9741253187309286e-06, | |
| "loss": 1.7218, | |
| "step": 270000 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 2.8870403656174674e-06, | |
| "loss": 1.7158, | |
| "step": 270500 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 2.7999554125040057e-06, | |
| "loss": 1.6819, | |
| "step": 271000 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 2.712870459390545e-06, | |
| "loss": 1.7058, | |
| "step": 271500 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 2.6257855062770836e-06, | |
| "loss": 1.708, | |
| "step": 272000 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 2.5387005531636223e-06, | |
| "loss": 1.7167, | |
| "step": 272500 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 2.451615600050161e-06, | |
| "loss": 1.7075, | |
| "step": 273000 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 2.3645306469367e-06, | |
| "loss": 1.7125, | |
| "step": 273500 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 2.2774456938232386e-06, | |
| "loss": 1.7116, | |
| "step": 274000 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 2.1903607407097773e-06, | |
| "loss": 1.6871, | |
| "step": 274500 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 2.103275787596316e-06, | |
| "loss": 1.6659, | |
| "step": 275000 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 2.016190834482855e-06, | |
| "loss": 1.6994, | |
| "step": 275500 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 1.9291058813693935e-06, | |
| "loss": 1.6782, | |
| "step": 276000 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 1.8420209282559325e-06, | |
| "loss": 1.6992, | |
| "step": 276500 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 1.7549359751424712e-06, | |
| "loss": 1.6839, | |
| "step": 277000 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 1.6678510220290096e-06, | |
| "loss": 1.7042, | |
| "step": 277500 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 1.5807660689155485e-06, | |
| "loss": 1.6993, | |
| "step": 278000 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 1.4936811158020872e-06, | |
| "loss": 1.7004, | |
| "step": 278500 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 1.406596162688626e-06, | |
| "loss": 1.6947, | |
| "step": 279000 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 1.319511209575165e-06, | |
| "loss": 1.7011, | |
| "step": 279500 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.2324262564617035e-06, | |
| "loss": 1.6973, | |
| "step": 280000 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.1453413033482422e-06, | |
| "loss": 1.6965, | |
| "step": 280500 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 1.0582563502347812e-06, | |
| "loss": 1.687, | |
| "step": 281000 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 9.7117139712132e-07, | |
| "loss": 1.6907, | |
| "step": 281500 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 8.840864440078585e-07, | |
| "loss": 1.6931, | |
| "step": 282000 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 7.970014908943974e-07, | |
| "loss": 1.7063, | |
| "step": 282500 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 7.09916537780936e-07, | |
| "loss": 1.7135, | |
| "step": 283000 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 6.228315846674749e-07, | |
| "loss": 1.689, | |
| "step": 283500 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 5.357466315540136e-07, | |
| "loss": 1.6676, | |
| "step": 284000 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 4.486616784405523e-07, | |
| "loss": 1.7108, | |
| "step": 284500 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 3.615767253270911e-07, | |
| "loss": 1.6767, | |
| "step": 285000 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 2.7449177221362985e-07, | |
| "loss": 1.6986, | |
| "step": 285500 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 1.8740681910016862e-07, | |
| "loss": 1.6615, | |
| "step": 286000 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 1.0032186598670735e-07, | |
| "loss": 1.6876, | |
| "step": 286500 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 1.323691287324611e-08, | |
| "loss": 1.7034, | |
| "step": 287000 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 287076, | |
| "total_flos": 4.9083067456945856e+17, | |
| "train_loss": 2.060935147813305, | |
| "train_runtime": 51665.6711, | |
| "train_samples_per_second": 44.451, | |
| "train_steps_per_second": 5.556 | |
| } | |
| ], | |
| "logging_steps": 500, | |
| "max_steps": 287076, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "total_flos": 4.9083067456945856e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |