| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "global_step": 593715, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 6.994199237007655e-05, | |
| "loss": 6.859, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 6.988304152665841e-05, | |
| "loss": 5.2814, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 6.982409068324027e-05, | |
| "loss": 4.4487, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 6.976513983982213e-05, | |
| "loss": 4.2191, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 6.970618899640399e-05, | |
| "loss": 3.8599, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 6.964723815298586e-05, | |
| "loss": 3.6392, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 6.958828730956772e-05, | |
| "loss": 3.4509, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 6.952933646614957e-05, | |
| "loss": 3.3074, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 6.947050352441827e-05, | |
| "loss": 3.3332, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 6.941167058268697e-05, | |
| "loss": 3.2866, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 6.935271973926883e-05, | |
| "loss": 3.0393, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 6.92937688958507e-05, | |
| "loss": 3.0049, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 6.923481805243257e-05, | |
| "loss": 2.9204, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 6.917586720901443e-05, | |
| "loss": 2.8666, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 6.911691636559629e-05, | |
| "loss": 2.7032, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 6.905796552217815e-05, | |
| "loss": 2.7154, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 6.899913258044685e-05, | |
| "loss": 2.7145, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 6.89401817370287e-05, | |
| "loss": 2.7003, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 6.888123089361057e-05, | |
| "loss": 2.699, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 6.882228005019243e-05, | |
| "loss": 2.6719, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 6.876332920677429e-05, | |
| "loss": 2.6672, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 6.870437836335615e-05, | |
| "loss": 2.616, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 6.864542751993801e-05, | |
| "loss": 2.6025, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 6.858647667651987e-05, | |
| "loss": 2.4387, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 6.852752583310173e-05, | |
| "loss": 2.3816, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 6.84685749896836e-05, | |
| "loss": 2.4366, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 6.840962414626546e-05, | |
| "loss": 2.4259, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 6.835067330284732e-05, | |
| "loss": 2.4172, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 6.829172245942918e-05, | |
| "loss": 2.3022, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 6.823277161601104e-05, | |
| "loss": 2.2821, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 6.817393867427974e-05, | |
| "loss": 2.2826, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 6.81149878308616e-05, | |
| "loss": 2.2232, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 6.805603698744346e-05, | |
| "loss": 2.192, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 6.799708614402532e-05, | |
| "loss": 2.2405, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 6.793813530060718e-05, | |
| "loss": 2.1968, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 6.787918445718906e-05, | |
| "loss": 2.2894, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 6.782023361377092e-05, | |
| "loss": 2.2866, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 6.776128277035276e-05, | |
| "loss": 2.1333, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 6.770233192693464e-05, | |
| "loss": 2.0818, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 6.76433810835165e-05, | |
| "loss": 2.0744, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 6.758443024009836e-05, | |
| "loss": 2.2706, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 6.752547939668022e-05, | |
| "loss": 2.1421, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 6.746652855326208e-05, | |
| "loss": 2.1903, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 6.740769561153078e-05, | |
| "loss": 2.0703, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 6.734874476811264e-05, | |
| "loss": 2.0504, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 6.728991182638134e-05, | |
| "loss": 2.144, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 6.72309609829632e-05, | |
| "loss": 2.0276, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 6.717201013954506e-05, | |
| "loss": 1.9879, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 6.711305929612692e-05, | |
| "loss": 2.0549, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 6.705410845270878e-05, | |
| "loss": 2.0771, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 6.699515760929065e-05, | |
| "loss": 1.9698, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 6.693644256924618e-05, | |
| "loss": 1.9929, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 6.687749172582804e-05, | |
| "loss": 2.0061, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 6.68185408824099e-05, | |
| "loss": 1.955, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 6.675959003899176e-05, | |
| "loss": 2.0245, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 6.670063919557362e-05, | |
| "loss": 1.9558, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 6.66416883521555e-05, | |
| "loss": 1.996, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 6.65828554104242e-05, | |
| "loss": 1.9741, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 6.652390456700604e-05, | |
| "loss": 1.9904, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 6.646507162527474e-05, | |
| "loss": 1.9764, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 6.64061207818566e-05, | |
| "loss": 1.9839, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 6.634716993843846e-05, | |
| "loss": 1.8744, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 6.628821909502034e-05, | |
| "loss": 1.9974, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 6.62292682516022e-05, | |
| "loss": 1.9997, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 6.617031740818406e-05, | |
| "loss": 1.9917, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 6.611148446645274e-05, | |
| "loss": 1.9973, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 6.605253362303462e-05, | |
| "loss": 1.8879, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 6.599358277961648e-05, | |
| "loss": 1.9089, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 6.593463193619834e-05, | |
| "loss": 2.0488, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 6.58756810927802e-05, | |
| "loss": 1.7955, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 6.581673024936206e-05, | |
| "loss": 1.8981, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 6.575777940594392e-05, | |
| "loss": 1.9835, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 6.56988285625258e-05, | |
| "loss": 1.8466, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 6.563987771910764e-05, | |
| "loss": 1.8713, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 6.55809268756895e-05, | |
| "loss": 1.8304, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 6.552197603227137e-05, | |
| "loss": 1.8735, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 6.546302518885323e-05, | |
| "loss": 1.8013, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 6.54040743454351e-05, | |
| "loss": 1.7514, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 6.53452414037038e-05, | |
| "loss": 1.8538, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 6.528629056028566e-05, | |
| "loss": 1.8968, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 6.522745761855434e-05, | |
| "loss": 1.8126, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 6.516850677513622e-05, | |
| "loss": 1.7436, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 6.510955593171808e-05, | |
| "loss": 1.8164, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 6.505060508829994e-05, | |
| "loss": 1.8852, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 6.49916542448818e-05, | |
| "loss": 1.7415, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 6.493270340146366e-05, | |
| "loss": 1.7809, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 6.487375255804552e-05, | |
| "loss": 1.8898, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 6.481480171462739e-05, | |
| "loss": 1.7151, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 6.475585087120925e-05, | |
| "loss": 1.821, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 6.469713583116478e-05, | |
| "loss": 1.872, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 6.463818498774664e-05, | |
| "loss": 1.8684, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 6.45792341443285e-05, | |
| "loss": 1.8248, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 6.452028330091036e-05, | |
| "loss": 1.7672, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 6.446133245749223e-05, | |
| "loss": 1.9444, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 6.440238161407409e-05, | |
| "loss": 1.8666, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 6.434343077065595e-05, | |
| "loss": 1.7939, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 6.428459782892464e-05, | |
| "loss": 1.7606, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 6.422564698550651e-05, | |
| "loss": 1.7548, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 6.416669614208837e-05, | |
| "loss": 1.8124, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 6.410774529867023e-05, | |
| "loss": 1.7206, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 6.40487944552521e-05, | |
| "loss": 1.7948, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 6.398984361183395e-05, | |
| "loss": 1.6771, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 6.393089276841581e-05, | |
| "loss": 1.6329, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 6.387194192499767e-05, | |
| "loss": 1.7638, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 6.381299108157953e-05, | |
| "loss": 1.7988, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 6.37540402381614e-05, | |
| "loss": 1.7728, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 6.369508939474327e-05, | |
| "loss": 1.7844, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 6.363613855132513e-05, | |
| "loss": 1.8386, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 6.357718770790699e-05, | |
| "loss": 1.8122, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 6.351823686448885e-05, | |
| "loss": 1.741, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 6.345928602107071e-05, | |
| "loss": 1.6201, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 6.340033517765257e-05, | |
| "loss": 1.6551, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 6.334138433423443e-05, | |
| "loss": 1.6896, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 6.328243349081629e-05, | |
| "loss": 1.6738, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 6.322348264739815e-05, | |
| "loss": 1.7253, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 6.316453180398002e-05, | |
| "loss": 1.6887, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 6.310581676393555e-05, | |
| "loss": 1.7441, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 6.304698382220425e-05, | |
| "loss": 1.6825, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 6.298803297878611e-05, | |
| "loss": 1.7008, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 6.292908213536797e-05, | |
| "loss": 1.7882, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 6.287013129194983e-05, | |
| "loss": 1.7428, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 6.281118044853169e-05, | |
| "loss": 1.6972, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 6.275234750680039e-05, | |
| "loss": 1.7379, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 6.269339666338225e-05, | |
| "loss": 1.5953, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 6.263444581996413e-05, | |
| "loss": 1.6816, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 6.257549497654597e-05, | |
| "loss": 1.7044, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 6.251666203481467e-05, | |
| "loss": 1.6617, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 6.245771119139653e-05, | |
| "loss": 1.7058, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 6.239876034797841e-05, | |
| "loss": 1.6949, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 6.233980950456027e-05, | |
| "loss": 1.6324, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 6.228085866114213e-05, | |
| "loss": 1.6314, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 6.222190781772399e-05, | |
| "loss": 1.7001, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 6.216295697430585e-05, | |
| "loss": 1.6162, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 6.210400613088771e-05, | |
| "loss": 1.6579, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 6.204505528746957e-05, | |
| "loss": 1.6837, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 6.198622234573827e-05, | |
| "loss": 1.6529, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 6.192727150232013e-05, | |
| "loss": 1.6706, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 6.186832065890199e-05, | |
| "loss": 1.6849, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 6.180936981548386e-05, | |
| "loss": 1.5789, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 6.175053687375255e-05, | |
| "loss": 1.6463, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 6.169158603033441e-05, | |
| "loss": 1.6165, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 6.163263518691627e-05, | |
| "loss": 1.6316, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 6.157368434349813e-05, | |
| "loss": 1.6751, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 6.151473350008e-05, | |
| "loss": 1.6096, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 6.145578265666186e-05, | |
| "loss": 1.6837, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 6.139683181324373e-05, | |
| "loss": 1.5968, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 6.133799887151243e-05, | |
| "loss": 1.7028, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 6.127904802809427e-05, | |
| "loss": 1.6685, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 6.122009718467615e-05, | |
| "loss": 1.669, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 6.1161146341258e-05, | |
| "loss": 1.557, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 6.11023133995267e-05, | |
| "loss": 1.592, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 6.104336255610857e-05, | |
| "loss": 1.6474, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 6.098441171269043e-05, | |
| "loss": 1.6673, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 6.0925460869272294e-05, | |
| "loss": 1.5011, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 6.0866510025854154e-05, | |
| "loss": 1.6295, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 6.080755918243601e-05, | |
| "loss": 1.5999, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 6.0748608339017875e-05, | |
| "loss": 1.607, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 6.0689657495599735e-05, | |
| "loss": 1.5911, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 6.0630824553868435e-05, | |
| "loss": 1.6111, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 6.0571873710450295e-05, | |
| "loss": 1.6162, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 6.0512922867032156e-05, | |
| "loss": 1.6114, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 6.0454089925300856e-05, | |
| "loss": 1.5766, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 6.0395139081882716e-05, | |
| "loss": 1.656, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 6.0336188238464576e-05, | |
| "loss": 1.544, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 6.0277237395046437e-05, | |
| "loss": 1.6107, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 6.021840445331514e-05, | |
| "loss": 1.6119, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 6.0159453609897e-05, | |
| "loss": 1.6393, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 6.01006206681657e-05, | |
| "loss": 1.6269, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 6.0041669824747565e-05, | |
| "loss": 1.594, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 5.998271898132942e-05, | |
| "loss": 1.5672, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 5.992376813791128e-05, | |
| "loss": 1.595, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 5.9864817294493145e-05, | |
| "loss": 1.4975, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 5.980598435276184e-05, | |
| "loss": 1.6532, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 5.9747033509343706e-05, | |
| "loss": 1.5118, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 5.9688082665925566e-05, | |
| "loss": 1.504, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 5.962913182250743e-05, | |
| "loss": 1.5813, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 5.9570180979089286e-05, | |
| "loss": 1.5667, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 5.951123013567115e-05, | |
| "loss": 1.5856, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 5.945227929225301e-05, | |
| "loss": 1.5393, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 5.9393328448834874e-05, | |
| "loss": 1.5786, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 5.9334495507103574e-05, | |
| "loss": 1.6104, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 5.9275544663685434e-05, | |
| "loss": 1.5735, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 5.9216711721954135e-05, | |
| "loss": 1.6001, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 5.915776087853599e-05, | |
| "loss": 1.6341, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 5.909881003511785e-05, | |
| "loss": 1.551, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 5.9039859191699715e-05, | |
| "loss": 1.6586, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 5.8980908348281576e-05, | |
| "loss": 1.594, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 5.892195750486344e-05, | |
| "loss": 1.5156, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 5.88630066614453e-05, | |
| "loss": 1.5512, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 5.880405581802716e-05, | |
| "loss": 1.5811, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 5.874510497460903e-05, | |
| "loss": 1.6201, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 5.868627203287772e-05, | |
| "loss": 1.5383, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 5.862743909114642e-05, | |
| "loss": 1.5245, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 5.8568488247728284e-05, | |
| "loss": 1.5338, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 5.850965530599698e-05, | |
| "loss": 1.5225, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 5.8450704462578845e-05, | |
| "loss": 1.5189, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 5.8391871520847545e-05, | |
| "loss": 1.5908, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 5.83329206774294e-05, | |
| "loss": 1.6402, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 5.827396983401126e-05, | |
| "loss": 1.5652, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 5.8215018990593126e-05, | |
| "loss": 1.517, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 5.8156068147174986e-05, | |
| "loss": 1.5143, | |
| "step": 100500 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 5.8097117303756846e-05, | |
| "loss": 1.5966, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 5.803816646033871e-05, | |
| "loss": 1.5101, | |
| "step": 101500 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 5.7979215616920574e-05, | |
| "loss": 1.5415, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 5.7920264773502434e-05, | |
| "loss": 1.5355, | |
| "step": 102500 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 5.7861313930084294e-05, | |
| "loss": 1.5454, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 5.7802363086666154e-05, | |
| "loss": 1.5042, | |
| "step": 103500 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 5.7743412243248014e-05, | |
| "loss": 1.562, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 5.768446139982988e-05, | |
| "loss": 1.5272, | |
| "step": 104500 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 5.7625628458098575e-05, | |
| "loss": 1.5825, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 5.756667761468044e-05, | |
| "loss": 1.4985, | |
| "step": 105500 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 5.75077267712623e-05, | |
| "loss": 1.4954, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 5.744877592784417e-05, | |
| "loss": 1.5467, | |
| "step": 106500 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 5.738982508442602e-05, | |
| "loss": 1.5303, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 5.733087424100788e-05, | |
| "loss": 1.4862, | |
| "step": 107500 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 5.727192339758974e-05, | |
| "loss": 1.5106, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 5.721297255417161e-05, | |
| "loss": 1.5593, | |
| "step": 108500 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 5.715402171075347e-05, | |
| "loss": 1.5959, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 5.709507086733534e-05, | |
| "loss": 1.5636, | |
| "step": 109500 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 5.703623792560403e-05, | |
| "loss": 1.5684, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 5.697728708218589e-05, | |
| "loss": 1.4454, | |
| "step": 110500 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 5.691833623876775e-05, | |
| "loss": 1.5496, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 5.685938539534961e-05, | |
| "loss": 1.4763, | |
| "step": 111500 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 5.680055245361831e-05, | |
| "loss": 1.5981, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 5.674160161020018e-05, | |
| "loss": 1.4676, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 5.668288657015571e-05, | |
| "loss": 1.6023, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 5.662393572673757e-05, | |
| "loss": 1.5173, | |
| "step": 113500 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 5.656498488331943e-05, | |
| "loss": 1.5024, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 5.650615194158813e-05, | |
| "loss": 1.4905, | |
| "step": 114500 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 5.6447201098169994e-05, | |
| "loss": 1.5263, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 5.6388250254751854e-05, | |
| "loss": 1.4874, | |
| "step": 115500 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 5.6329299411333714e-05, | |
| "loss": 1.4981, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 5.627034856791558e-05, | |
| "loss": 1.524, | |
| "step": 116500 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 5.621139772449744e-05, | |
| "loss": 1.6569, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 5.6152446881079295e-05, | |
| "loss": 1.4486, | |
| "step": 117500 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 5.609349603766116e-05, | |
| "loss": 1.4926, | |
| "step": 118000 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 5.603454519424302e-05, | |
| "loss": 1.5704, | |
| "step": 118500 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 5.597559435082488e-05, | |
| "loss": 1.565, | |
| "step": 119000 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 5.591664350740675e-05, | |
| "loss": 1.5233, | |
| "step": 119500 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 5.585769266398861e-05, | |
| "loss": 1.4826, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 5.579874182057047e-05, | |
| "loss": 1.5609, | |
| "step": 120500 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 5.5739790977152336e-05, | |
| "loss": 1.5396, | |
| "step": 121000 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 5.568095803542103e-05, | |
| "loss": 1.4773, | |
| "step": 121500 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 5.562200719200289e-05, | |
| "loss": 1.5906, | |
| "step": 122000 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 5.556305634858475e-05, | |
| "loss": 1.619, | |
| "step": 122500 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 5.550410550516662e-05, | |
| "loss": 1.4244, | |
| "step": 123000 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 5.544515466174848e-05, | |
| "loss": 1.5547, | |
| "step": 123500 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 5.538620381833034e-05, | |
| "loss": 1.3988, | |
| "step": 124000 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 5.5327252974912205e-05, | |
| "loss": 1.4837, | |
| "step": 124500 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 5.5268302131494065e-05, | |
| "loss": 1.5698, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 5.520935128807592e-05, | |
| "loss": 1.4586, | |
| "step": 125500 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 5.5150400444657786e-05, | |
| "loss": 1.4794, | |
| "step": 126000 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 5.5091449601239646e-05, | |
| "loss": 1.5044, | |
| "step": 126500 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 5.5032498757821506e-05, | |
| "loss": 1.4507, | |
| "step": 127000 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 5.497354791440337e-05, | |
| "loss": 1.348, | |
| "step": 127500 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.491459707098523e-05, | |
| "loss": 1.5163, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.485564622756709e-05, | |
| "loss": 1.53, | |
| "step": 128500 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.479669538414896e-05, | |
| "loss": 1.515, | |
| "step": 129000 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 5.473786244241765e-05, | |
| "loss": 1.459, | |
| "step": 129500 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 5.4678911598999514e-05, | |
| "loss": 1.4868, | |
| "step": 130000 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 5.4620078657268215e-05, | |
| "loss": 1.4395, | |
| "step": 130500 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 5.4561127813850075e-05, | |
| "loss": 1.4272, | |
| "step": 131000 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 5.4502176970431935e-05, | |
| "loss": 1.4605, | |
| "step": 131500 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.44432261270138e-05, | |
| "loss": 1.607, | |
| "step": 132000 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.438427528359566e-05, | |
| "loss": 1.4993, | |
| "step": 132500 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.4325324440177516e-05, | |
| "loss": 1.461, | |
| "step": 133000 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.426637359675938e-05, | |
| "loss": 1.5128, | |
| "step": 133500 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 5.420742275334124e-05, | |
| "loss": 1.3986, | |
| "step": 134000 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 5.414858981160994e-05, | |
| "loss": 1.5693, | |
| "step": 134500 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 5.40896389681918e-05, | |
| "loss": 1.4682, | |
| "step": 135000 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 5.403068812477367e-05, | |
| "loss": 1.5161, | |
| "step": 135500 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 5.397173728135553e-05, | |
| "loss": 1.4837, | |
| "step": 136000 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 5.3912904339624224e-05, | |
| "loss": 1.4552, | |
| "step": 136500 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 5.3853953496206084e-05, | |
| "loss": 1.4588, | |
| "step": 137000 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 5.3795002652787945e-05, | |
| "loss": 1.462, | |
| "step": 137500 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 5.373605180936981e-05, | |
| "loss": 1.511, | |
| "step": 138000 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 5.367710096595167e-05, | |
| "loss": 1.3909, | |
| "step": 138500 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 5.361826802422037e-05, | |
| "loss": 1.456, | |
| "step": 139000 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 5.355931718080223e-05, | |
| "loss": 1.4701, | |
| "step": 139500 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 5.35003663373841e-05, | |
| "loss": 1.427, | |
| "step": 140000 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 5.344141549396595e-05, | |
| "loss": 1.4451, | |
| "step": 140500 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 5.338246465054781e-05, | |
| "loss": 1.5295, | |
| "step": 141000 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 5.332351380712968e-05, | |
| "loss": 1.4976, | |
| "step": 141500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 5.326456296371154e-05, | |
| "loss": 1.4277, | |
| "step": 142000 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 5.32056121202934e-05, | |
| "loss": 1.3922, | |
| "step": 142500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 5.31467791785621e-05, | |
| "loss": 1.5119, | |
| "step": 143000 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 5.30879462368308e-05, | |
| "loss": 1.4537, | |
| "step": 143500 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 5.3028995393412655e-05, | |
| "loss": 1.4404, | |
| "step": 144000 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 5.297004454999452e-05, | |
| "loss": 1.4981, | |
| "step": 144500 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 5.291121160826322e-05, | |
| "loss": 1.5137, | |
| "step": 145000 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 5.285226076484508e-05, | |
| "loss": 1.449, | |
| "step": 145500 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 5.279330992142694e-05, | |
| "loss": 1.5425, | |
| "step": 146000 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 5.273435907800881e-05, | |
| "loss": 1.4137, | |
| "step": 146500 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 5.267540823459067e-05, | |
| "loss": 1.3714, | |
| "step": 147000 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 5.261645739117252e-05, | |
| "loss": 1.5328, | |
| "step": 147500 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 5.2557624449441223e-05, | |
| "loss": 1.4122, | |
| "step": 148000 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 5.2498673606023084e-05, | |
| "loss": 1.4551, | |
| "step": 148500 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 5.243972276260495e-05, | |
| "loss": 1.4528, | |
| "step": 149000 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 5.238077191918681e-05, | |
| "loss": 1.5808, | |
| "step": 149500 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 5.232182107576867e-05, | |
| "loss": 1.4571, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 5.226287023235054e-05, | |
| "loss": 1.4758, | |
| "step": 150500 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 5.22039193889324e-05, | |
| "loss": 1.5003, | |
| "step": 151000 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 5.214496854551425e-05, | |
| "loss": 1.4685, | |
| "step": 151500 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 5.208601770209612e-05, | |
| "loss": 1.4834, | |
| "step": 152000 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 5.202706685867798e-05, | |
| "loss": 1.3873, | |
| "step": 152500 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 5.196811601525984e-05, | |
| "loss": 1.409, | |
| "step": 153000 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 5.1909165171841706e-05, | |
| "loss": 1.4671, | |
| "step": 153500 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 5.1850332230110406e-05, | |
| "loss": 1.4043, | |
| "step": 154000 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 5.179138138669227e-05, | |
| "loss": 1.4753, | |
| "step": 154500 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 5.173243054327412e-05, | |
| "loss": 1.481, | |
| "step": 155000 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.167347969985599e-05, | |
| "loss": 1.5065, | |
| "step": 155500 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.161452885643785e-05, | |
| "loss": 1.4066, | |
| "step": 156000 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.155557801301971e-05, | |
| "loss": 1.3987, | |
| "step": 156500 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.1496627169601574e-05, | |
| "loss": 1.4025, | |
| "step": 157000 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.143779422787027e-05, | |
| "loss": 1.4775, | |
| "step": 157500 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.137896128613897e-05, | |
| "loss": 1.3659, | |
| "step": 158000 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.132001044272083e-05, | |
| "loss": 1.3547, | |
| "step": 158500 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.126105959930269e-05, | |
| "loss": 1.3893, | |
| "step": 159000 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 5.120210875588455e-05, | |
| "loss": 1.4349, | |
| "step": 159500 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 5.1143157912466416e-05, | |
| "loss": 1.3757, | |
| "step": 160000 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 5.1084207069048276e-05, | |
| "loss": 1.4795, | |
| "step": 160500 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 5.1025256225630137e-05, | |
| "loss": 1.431, | |
| "step": 161000 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 5.096642328389884e-05, | |
| "loss": 1.383, | |
| "step": 161500 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 5.0907472440480704e-05, | |
| "loss": 1.4149, | |
| "step": 162000 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 5.084852159706256e-05, | |
| "loss": 1.4028, | |
| "step": 162500 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 5.078968865533126e-05, | |
| "loss": 1.397, | |
| "step": 163000 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 5.073073781191312e-05, | |
| "loss": 1.4919, | |
| "step": 163500 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 5.067178696849498e-05, | |
| "loss": 1.347, | |
| "step": 164000 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 5.0612836125076845e-05, | |
| "loss": 1.379, | |
| "step": 164500 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 5.0553885281658705e-05, | |
| "loss": 1.4026, | |
| "step": 165000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 5.0494934438240566e-05, | |
| "loss": 1.3372, | |
| "step": 165500 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 5.0435983594822426e-05, | |
| "loss": 1.4564, | |
| "step": 166000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 5.0377032751404286e-05, | |
| "loss": 1.3968, | |
| "step": 166500 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 5.0318081907986146e-05, | |
| "loss": 1.4019, | |
| "step": 167000 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 5.0259248966254847e-05, | |
| "loss": 1.4203, | |
| "step": 167500 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 5.0200298122836714e-05, | |
| "loss": 1.4195, | |
| "step": 168000 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 5.0141347279418574e-05, | |
| "loss": 1.4314, | |
| "step": 168500 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 5.0082396436000434e-05, | |
| "loss": 1.4599, | |
| "step": 169000 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 5.00234455925823e-05, | |
| "loss": 1.3961, | |
| "step": 169500 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 4.9964494749164154e-05, | |
| "loss": 1.49, | |
| "step": 170000 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 4.9905543905746015e-05, | |
| "loss": 1.388, | |
| "step": 170500 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 4.984659306232788e-05, | |
| "loss": 1.3399, | |
| "step": 171000 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 4.9787760120596575e-05, | |
| "loss": 1.3559, | |
| "step": 171500 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 4.972880927717844e-05, | |
| "loss": 1.3839, | |
| "step": 172000 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 4.96698584337603e-05, | |
| "loss": 1.3597, | |
| "step": 172500 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 4.961090759034216e-05, | |
| "loss": 1.4653, | |
| "step": 173000 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 4.955195674692403e-05, | |
| "loss": 1.412, | |
| "step": 173500 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 4.949300590350588e-05, | |
| "loss": 1.3362, | |
| "step": 174000 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 4.943405506008774e-05, | |
| "loss": 1.3354, | |
| "step": 174500 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 4.9375222118356444e-05, | |
| "loss": 1.3451, | |
| "step": 175000 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 4.931627127493831e-05, | |
| "loss": 1.4159, | |
| "step": 175500 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 4.925732043152017e-05, | |
| "loss": 1.442, | |
| "step": 176000 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 4.919848748978887e-05, | |
| "loss": 1.3793, | |
| "step": 176500 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 4.913953664637073e-05, | |
| "loss": 1.4447, | |
| "step": 177000 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 4.9080585802952585e-05, | |
| "loss": 1.4068, | |
| "step": 177500 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 4.902163495953445e-05, | |
| "loss": 1.3833, | |
| "step": 178000 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 4.896268411611631e-05, | |
| "loss": 1.3802, | |
| "step": 178500 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 4.890373327269817e-05, | |
| "loss": 1.3929, | |
| "step": 179000 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.884478242928004e-05, | |
| "loss": 1.4342, | |
| "step": 179500 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.87858315858619e-05, | |
| "loss": 1.3436, | |
| "step": 180000 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.8726880742443766e-05, | |
| "loss": 1.386, | |
| "step": 180500 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.866804780071245e-05, | |
| "loss": 1.5735, | |
| "step": 181000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 4.860909695729432e-05, | |
| "loss": 1.3181, | |
| "step": 181500 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 4.855014611387618e-05, | |
| "loss": 1.4031, | |
| "step": 182000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 4.849131317214488e-05, | |
| "loss": 1.442, | |
| "step": 182500 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 4.843236232872674e-05, | |
| "loss": 1.4167, | |
| "step": 183000 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 4.837341148530861e-05, | |
| "loss": 1.3656, | |
| "step": 183500 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 4.831446064189047e-05, | |
| "loss": 1.3526, | |
| "step": 184000 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 4.825550979847233e-05, | |
| "loss": 1.3862, | |
| "step": 184500 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 4.819655895505418e-05, | |
| "loss": 1.3868, | |
| "step": 185000 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.813772601332288e-05, | |
| "loss": 1.4435, | |
| "step": 185500 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.807877516990475e-05, | |
| "loss": 1.4068, | |
| "step": 186000 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.801982432648661e-05, | |
| "loss": 1.3566, | |
| "step": 186500 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.796099138475531e-05, | |
| "loss": 1.3954, | |
| "step": 187000 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 4.790204054133717e-05, | |
| "loss": 1.4382, | |
| "step": 187500 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 4.784308969791904e-05, | |
| "loss": 1.4366, | |
| "step": 188000 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 4.778413885450089e-05, | |
| "loss": 1.3365, | |
| "step": 188500 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 4.772518801108275e-05, | |
| "loss": 1.323, | |
| "step": 189000 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 4.766623716766462e-05, | |
| "loss": 1.3379, | |
| "step": 189500 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 4.760728632424648e-05, | |
| "loss": 1.356, | |
| "step": 190000 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 4.754833548082834e-05, | |
| "loss": 1.4302, | |
| "step": 190500 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 4.7489384637410205e-05, | |
| "loss": 1.4302, | |
| "step": 191000 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 4.7430433793992065e-05, | |
| "loss": 1.4299, | |
| "step": 191500 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 4.7371482950573926e-05, | |
| "loss": 1.3922, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 4.7312532107155786e-05, | |
| "loss": 1.3954, | |
| "step": 192500 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.7253581263737646e-05, | |
| "loss": 1.363, | |
| "step": 193000 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.719486622369318e-05, | |
| "loss": 1.4173, | |
| "step": 193500 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.713591538027505e-05, | |
| "loss": 1.3826, | |
| "step": 194000 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.707696453685691e-05, | |
| "loss": 1.4274, | |
| "step": 194500 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 4.701801369343877e-05, | |
| "loss": 1.4059, | |
| "step": 195000 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 4.6959062850020634e-05, | |
| "loss": 1.3678, | |
| "step": 195500 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 4.690011200660249e-05, | |
| "loss": 1.4029, | |
| "step": 196000 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 4.684116116318435e-05, | |
| "loss": 1.433, | |
| "step": 196500 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.678232822145305e-05, | |
| "loss": 1.3245, | |
| "step": 197000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.672349527972175e-05, | |
| "loss": 1.3849, | |
| "step": 197500 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.666454443630361e-05, | |
| "loss": 1.2898, | |
| "step": 198000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.6605593592885476e-05, | |
| "loss": 1.4067, | |
| "step": 198500 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.6546642749467336e-05, | |
| "loss": 1.3511, | |
| "step": 199000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.648769190604919e-05, | |
| "loss": 1.3504, | |
| "step": 199500 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.642885896431789e-05, | |
| "loss": 1.3707, | |
| "step": 200000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.636990812089976e-05, | |
| "loss": 1.3405, | |
| "step": 200500 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.631095727748162e-05, | |
| "loss": 1.4657, | |
| "step": 201000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.625200643406348e-05, | |
| "loss": 1.3594, | |
| "step": 201500 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.6193055590645344e-05, | |
| "loss": 1.3988, | |
| "step": 202000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.6134104747227204e-05, | |
| "loss": 1.4282, | |
| "step": 202500 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 4.607515390380906e-05, | |
| "loss": 1.3753, | |
| "step": 203000 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 4.6016203060390925e-05, | |
| "loss": 1.3933, | |
| "step": 203500 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 4.5957252216972785e-05, | |
| "loss": 1.3739, | |
| "step": 204000 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 4.5898301373554645e-05, | |
| "loss": 1.4223, | |
| "step": 204500 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 4.583935053013651e-05, | |
| "loss": 1.3495, | |
| "step": 205000 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 4.5780517588405206e-05, | |
| "loss": 1.2631, | |
| "step": 205500 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 4.572156674498707e-05, | |
| "loss": 1.3649, | |
| "step": 206000 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 4.566261590156893e-05, | |
| "loss": 1.4423, | |
| "step": 206500 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 4.5603665058150786e-05, | |
| "loss": 1.3753, | |
| "step": 207000 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 4.5544714214732653e-05, | |
| "loss": 1.3592, | |
| "step": 207500 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 4.5485763371314514e-05, | |
| "loss": 1.4111, | |
| "step": 208000 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 4.5426812527896374e-05, | |
| "loss": 1.4252, | |
| "step": 208500 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 4.536786168447824e-05, | |
| "loss": 1.3718, | |
| "step": 209000 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 4.53089108410601e-05, | |
| "loss": 1.3504, | |
| "step": 209500 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 4.524995999764197e-05, | |
| "loss": 1.3337, | |
| "step": 210000 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 4.519100915422383e-05, | |
| "loss": 1.318, | |
| "step": 210500 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 4.513217621249252e-05, | |
| "loss": 1.4068, | |
| "step": 211000 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 4.507322536907438e-05, | |
| "loss": 1.2916, | |
| "step": 211500 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 4.501427452565624e-05, | |
| "loss": 1.3078, | |
| "step": 212000 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 4.495532368223811e-05, | |
| "loss": 1.3244, | |
| "step": 212500 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 4.489637283881997e-05, | |
| "loss": 1.4635, | |
| "step": 213000 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 4.483753989708867e-05, | |
| "loss": 1.3647, | |
| "step": 213500 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 4.477858905367053e-05, | |
| "loss": 1.3064, | |
| "step": 214000 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 4.4719756111939224e-05, | |
| "loss": 1.3345, | |
| "step": 214500 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 4.4660805268521084e-05, | |
| "loss": 1.3723, | |
| "step": 215000 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 4.460185442510295e-05, | |
| "loss": 1.3303, | |
| "step": 215500 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 4.454290358168481e-05, | |
| "loss": 1.3384, | |
| "step": 216000 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 4.448395273826667e-05, | |
| "loss": 1.3998, | |
| "step": 216500 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 4.442511979653537e-05, | |
| "loss": 1.4623, | |
| "step": 217000 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 4.436616895311724e-05, | |
| "loss": 1.3193, | |
| "step": 217500 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 4.430721810969909e-05, | |
| "loss": 1.3703, | |
| "step": 218000 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 4.424826726628095e-05, | |
| "loss": 1.3235, | |
| "step": 218500 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 4.418931642286282e-05, | |
| "loss": 1.252, | |
| "step": 219000 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 4.413036557944468e-05, | |
| "loss": 1.3262, | |
| "step": 219500 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 4.407141473602654e-05, | |
| "loss": 1.287, | |
| "step": 220000 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 4.401246389260841e-05, | |
| "loss": 1.3144, | |
| "step": 220500 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 4.395351304919027e-05, | |
| "loss": 1.3907, | |
| "step": 221000 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 4.389456220577213e-05, | |
| "loss": 1.321, | |
| "step": 221500 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 4.383572926404082e-05, | |
| "loss": 1.2963, | |
| "step": 222000 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 4.377677842062268e-05, | |
| "loss": 1.3762, | |
| "step": 222500 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 4.371794547889138e-05, | |
| "loss": 1.4147, | |
| "step": 223000 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 4.365899463547325e-05, | |
| "loss": 1.317, | |
| "step": 223500 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 4.360004379205511e-05, | |
| "loss": 1.353, | |
| "step": 224000 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 4.354109294863697e-05, | |
| "loss": 1.2843, | |
| "step": 224500 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 4.3482142105218836e-05, | |
| "loss": 1.2709, | |
| "step": 225000 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 4.342319126180069e-05, | |
| "loss": 1.3417, | |
| "step": 225500 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 4.336424041838255e-05, | |
| "loss": 1.3168, | |
| "step": 226000 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 4.3305289574964416e-05, | |
| "loss": 1.3149, | |
| "step": 226500 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 4.324657453491995e-05, | |
| "loss": 1.4086, | |
| "step": 227000 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 4.318762369150181e-05, | |
| "loss": 1.4308, | |
| "step": 227500 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 4.312867284808368e-05, | |
| "loss": 1.3158, | |
| "step": 228000 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 4.306972200466554e-05, | |
| "loss": 1.3374, | |
| "step": 228500 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 4.301077116124739e-05, | |
| "loss": 1.2938, | |
| "step": 229000 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 4.295182031782926e-05, | |
| "loss": 1.3818, | |
| "step": 229500 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 4.289286947441112e-05, | |
| "loss": 1.3054, | |
| "step": 230000 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 4.283391863099298e-05, | |
| "loss": 1.4121, | |
| "step": 230500 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 4.2774967787574845e-05, | |
| "loss": 1.3855, | |
| "step": 231000 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 4.271625274753038e-05, | |
| "loss": 1.3088, | |
| "step": 231500 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 4.265730190411224e-05, | |
| "loss": 1.2938, | |
| "step": 232000 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 4.25983510606941e-05, | |
| "loss": 1.3244, | |
| "step": 232500 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 4.253940021727596e-05, | |
| "loss": 1.303, | |
| "step": 233000 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 4.248044937385782e-05, | |
| "loss": 1.2691, | |
| "step": 233500 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 4.242149853043969e-05, | |
| "loss": 1.2841, | |
| "step": 234000 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 4.236254768702155e-05, | |
| "loss": 1.4295, | |
| "step": 234500 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 4.230359684360341e-05, | |
| "loss": 1.2992, | |
| "step": 235000 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 4.2244646000185274e-05, | |
| "loss": 1.3605, | |
| "step": 235500 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 4.2185695156767135e-05, | |
| "loss": 1.3952, | |
| "step": 236000 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 4.212674431334899e-05, | |
| "loss": 1.3236, | |
| "step": 236500 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 4.2067793469930855e-05, | |
| "loss": 1.2393, | |
| "step": 237000 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 4.2008842626512715e-05, | |
| "loss": 1.3406, | |
| "step": 237500 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 4.195024548815509e-05, | |
| "loss": 1.2906, | |
| "step": 238000 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 4.189129464473695e-05, | |
| "loss": 1.3258, | |
| "step": 238500 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 4.1832343801318816e-05, | |
| "loss": 1.3759, | |
| "step": 239000 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 4.177339295790067e-05, | |
| "loss": 1.375, | |
| "step": 239500 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 4.171444211448253e-05, | |
| "loss": 1.3272, | |
| "step": 240000 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 4.16554912710644e-05, | |
| "loss": 1.3385, | |
| "step": 240500 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 4.159654042764626e-05, | |
| "loss": 1.3197, | |
| "step": 241000 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 4.153758958422812e-05, | |
| "loss": 1.2662, | |
| "step": 241500 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 4.1478638740809985e-05, | |
| "loss": 1.3402, | |
| "step": 242000 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 4.141980579907868e-05, | |
| "loss": 1.3483, | |
| "step": 242500 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 4.136097285734737e-05, | |
| "loss": 1.2599, | |
| "step": 243000 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 4.130202201392924e-05, | |
| "loss": 1.274, | |
| "step": 243500 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 4.12430711705111e-05, | |
| "loss": 1.372, | |
| "step": 244000 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 4.118412032709296e-05, | |
| "loss": 1.3183, | |
| "step": 244500 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 4.1125169483674826e-05, | |
| "loss": 1.2656, | |
| "step": 245000 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 4.106633654194352e-05, | |
| "loss": 1.2663, | |
| "step": 245500 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 4.100750360021222e-05, | |
| "loss": 1.3134, | |
| "step": 246000 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 4.0948670658480914e-05, | |
| "loss": 1.3478, | |
| "step": 246500 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 4.0889719815062774e-05, | |
| "loss": 1.323, | |
| "step": 247000 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 4.083076897164464e-05, | |
| "loss": 1.301, | |
| "step": 247500 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 4.07718181282265e-05, | |
| "loss": 1.3122, | |
| "step": 248000 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 4.071286728480836e-05, | |
| "loss": 1.3037, | |
| "step": 248500 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 4.065391644139023e-05, | |
| "loss": 1.3082, | |
| "step": 249000 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 4.059496559797209e-05, | |
| "loss": 1.3267, | |
| "step": 249500 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 4.0536014754553956e-05, | |
| "loss": 1.3434, | |
| "step": 250000 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 4.047706391113581e-05, | |
| "loss": 1.3716, | |
| "step": 250500 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 4.041811306771767e-05, | |
| "loss": 1.2677, | |
| "step": 251000 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 4.035916222429953e-05, | |
| "loss": 1.3538, | |
| "step": 251500 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 4.0300211380881396e-05, | |
| "loss": 1.2902, | |
| "step": 252000 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 4.024126053746326e-05, | |
| "loss": 1.2978, | |
| "step": 252500 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 4.0182309694045124e-05, | |
| "loss": 1.4214, | |
| "step": 253000 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 4.012347675231382e-05, | |
| "loss": 1.2749, | |
| "step": 253500 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 4.006452590889568e-05, | |
| "loss": 1.3774, | |
| "step": 254000 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 4.000569296716437e-05, | |
| "loss": 1.3001, | |
| "step": 254500 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 3.994674212374624e-05, | |
| "loss": 1.3206, | |
| "step": 255000 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 3.98877912803281e-05, | |
| "loss": 1.2425, | |
| "step": 255500 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 3.9828840436909965e-05, | |
| "loss": 1.2601, | |
| "step": 256000 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 3.9769889593491825e-05, | |
| "loss": 1.3718, | |
| "step": 256500 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 3.9710938750073686e-05, | |
| "loss": 1.2449, | |
| "step": 257000 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 3.965198790665555e-05, | |
| "loss": 1.395, | |
| "step": 257500 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 3.9593037063237406e-05, | |
| "loss": 1.3455, | |
| "step": 258000 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 3.9534086219819266e-05, | |
| "loss": 1.2778, | |
| "step": 258500 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 3.947513537640113e-05, | |
| "loss": 1.3237, | |
| "step": 259000 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 3.9416184532982993e-05, | |
| "loss": 1.3166, | |
| "step": 259500 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 3.9357351591251694e-05, | |
| "loss": 1.3272, | |
| "step": 260000 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 3.9298518649520394e-05, | |
| "loss": 1.3, | |
| "step": 260500 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 3.9239567806102254e-05, | |
| "loss": 1.3644, | |
| "step": 261000 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 3.918061696268411e-05, | |
| "loss": 1.2442, | |
| "step": 261500 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 3.9121666119265975e-05, | |
| "loss": 1.3674, | |
| "step": 262000 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 3.9062715275847835e-05, | |
| "loss": 1.3582, | |
| "step": 262500 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 3.9003764432429695e-05, | |
| "loss": 1.2996, | |
| "step": 263000 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 3.8944931490698396e-05, | |
| "loss": 1.2659, | |
| "step": 263500 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 3.8885980647280256e-05, | |
| "loss": 1.2974, | |
| "step": 264000 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 3.882702980386212e-05, | |
| "loss": 1.3355, | |
| "step": 264500 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 3.876807896044398e-05, | |
| "loss": 1.2746, | |
| "step": 265000 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 3.8709128117025837e-05, | |
| "loss": 1.3051, | |
| "step": 265500 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 3.8650177273607704e-05, | |
| "loss": 1.2458, | |
| "step": 266000 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 3.8591344331876404e-05, | |
| "loss": 1.2668, | |
| "step": 266500 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 3.8532393488458264e-05, | |
| "loss": 1.3182, | |
| "step": 267000 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 3.8473442645040124e-05, | |
| "loss": 1.3013, | |
| "step": 267500 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 3.841449180162199e-05, | |
| "loss": 1.2176, | |
| "step": 268000 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 3.835554095820385e-05, | |
| "loss": 1.3966, | |
| "step": 268500 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 3.8296708016472545e-05, | |
| "loss": 1.2697, | |
| "step": 269000 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 3.8237757173054405e-05, | |
| "loss": 1.3279, | |
| "step": 269500 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 3.8178806329636266e-05, | |
| "loss": 1.28, | |
| "step": 270000 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 3.811985548621813e-05, | |
| "loss": 1.2927, | |
| "step": 270500 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 3.806090464279999e-05, | |
| "loss": 1.3114, | |
| "step": 271000 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 3.800195379938186e-05, | |
| "loss": 1.3238, | |
| "step": 271500 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 3.794300295596372e-05, | |
| "loss": 1.4098, | |
| "step": 272000 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 3.788405211254558e-05, | |
| "loss": 1.2851, | |
| "step": 272500 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 3.7825101269127434e-05, | |
| "loss": 1.3233, | |
| "step": 273000 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 3.77661504257093e-05, | |
| "loss": 1.4169, | |
| "step": 273500 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 3.770719958229116e-05, | |
| "loss": 1.3181, | |
| "step": 274000 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 3.764824873887303e-05, | |
| "loss": 1.2962, | |
| "step": 274500 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 3.758929789545489e-05, | |
| "loss": 1.3229, | |
| "step": 275000 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 3.753034705203675e-05, | |
| "loss": 1.3531, | |
| "step": 275500 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 3.747163201199229e-05, | |
| "loss": 1.3241, | |
| "step": 276000 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 3.741268116857414e-05, | |
| "loss": 1.2468, | |
| "step": 276500 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 3.7353730325156e-05, | |
| "loss": 1.2878, | |
| "step": 277000 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 3.729477948173787e-05, | |
| "loss": 1.2675, | |
| "step": 277500 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 3.723582863831973e-05, | |
| "loss": 1.2372, | |
| "step": 278000 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 3.717687779490159e-05, | |
| "loss": 1.3147, | |
| "step": 278500 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 3.711804485317029e-05, | |
| "loss": 1.3194, | |
| "step": 279000 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 3.705909400975216e-05, | |
| "loss": 1.325, | |
| "step": 279500 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 3.700014316633401e-05, | |
| "loss": 1.3561, | |
| "step": 280000 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 3.694119232291587e-05, | |
| "loss": 1.2199, | |
| "step": 280500 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 3.688224147949773e-05, | |
| "loss": 1.3318, | |
| "step": 281000 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 3.682340853776643e-05, | |
| "loss": 1.3502, | |
| "step": 281500 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 3.67644576943483e-05, | |
| "loss": 1.3071, | |
| "step": 282000 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 3.670550685093016e-05, | |
| "loss": 1.2433, | |
| "step": 282500 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 3.664655600751202e-05, | |
| "loss": 1.2955, | |
| "step": 283000 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 3.6587605164093886e-05, | |
| "loss": 1.3713, | |
| "step": 283500 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 3.652865432067574e-05, | |
| "loss": 1.3452, | |
| "step": 284000 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 3.64697034772576e-05, | |
| "loss": 1.321, | |
| "step": 284500 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 3.6410752633839466e-05, | |
| "loss": 1.2524, | |
| "step": 285000 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 3.635180179042133e-05, | |
| "loss": 1.3178, | |
| "step": 285500 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 3.629285094700319e-05, | |
| "loss": 1.2231, | |
| "step": 286000 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 3.623401800527189e-05, | |
| "loss": 1.3051, | |
| "step": 286500 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 3.6175067161853754e-05, | |
| "loss": 1.2286, | |
| "step": 287000 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 3.611611631843561e-05, | |
| "loss": 1.2875, | |
| "step": 287500 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 3.605716547501747e-05, | |
| "loss": 1.23, | |
| "step": 288000 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 3.599821463159933e-05, | |
| "loss": 1.2613, | |
| "step": 288500 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 3.5939263788181195e-05, | |
| "loss": 1.2641, | |
| "step": 289000 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 3.5880312944763055e-05, | |
| "loss": 1.2473, | |
| "step": 289500 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 3.582136210134492e-05, | |
| "loss": 1.2474, | |
| "step": 290000 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 3.5762529159613616e-05, | |
| "loss": 1.2923, | |
| "step": 290500 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 3.570369621788231e-05, | |
| "loss": 1.3024, | |
| "step": 291000 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 3.5644745374464176e-05, | |
| "loss": 1.2859, | |
| "step": 291500 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 3.558579453104604e-05, | |
| "loss": 1.1786, | |
| "step": 292000 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 3.55268436876279e-05, | |
| "loss": 1.2681, | |
| "step": 292500 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 3.5467892844209764e-05, | |
| "loss": 1.3482, | |
| "step": 293000 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 3.540905990247846e-05, | |
| "loss": 1.3399, | |
| "step": 293500 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 3.5350109059060325e-05, | |
| "loss": 1.2904, | |
| "step": 294000 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 3.529127611732902e-05, | |
| "loss": 1.3039, | |
| "step": 294500 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 3.523232527391088e-05, | |
| "loss": 1.2332, | |
| "step": 295000 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 3.517337443049274e-05, | |
| "loss": 1.3332, | |
| "step": 295500 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 3.5114423587074606e-05, | |
| "loss": 1.2235, | |
| "step": 296000 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 3.5055472743656466e-05, | |
| "loss": 1.3569, | |
| "step": 296500 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 3.4996521900238326e-05, | |
| "loss": 1.2554, | |
| "step": 297000 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 3.4937571056820186e-05, | |
| "loss": 1.2249, | |
| "step": 297500 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 3.4878620213402046e-05, | |
| "loss": 1.3162, | |
| "step": 298000 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 3.481978727167075e-05, | |
| "loss": 1.2694, | |
| "step": 298500 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 3.4760836428252614e-05, | |
| "loss": 1.2175, | |
| "step": 299000 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 3.470188558483447e-05, | |
| "loss": 1.2431, | |
| "step": 299500 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 3.4642934741416334e-05, | |
| "loss": 1.245, | |
| "step": 300000 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 3.4583983897998194e-05, | |
| "loss": 1.341, | |
| "step": 300500 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 3.452503305458006e-05, | |
| "loss": 1.2291, | |
| "step": 301000 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 3.4466082211161915e-05, | |
| "loss": 1.2578, | |
| "step": 301500 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 3.4407249269430615e-05, | |
| "loss": 1.2904, | |
| "step": 302000 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 3.434829842601248e-05, | |
| "loss": 1.2795, | |
| "step": 302500 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 3.4289347582594336e-05, | |
| "loss": 1.2287, | |
| "step": 303000 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 3.42303967391762e-05, | |
| "loss": 1.2741, | |
| "step": 303500 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 3.41715637974449e-05, | |
| "loss": 1.2506, | |
| "step": 304000 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 3.411261295402676e-05, | |
| "loss": 1.328, | |
| "step": 304500 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 3.4053662110608623e-05, | |
| "loss": 1.2775, | |
| "step": 305000 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 3.3994711267190484e-05, | |
| "loss": 1.2351, | |
| "step": 305500 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 3.3935760423772344e-05, | |
| "loss": 1.3361, | |
| "step": 306000 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 3.387680958035421e-05, | |
| "loss": 1.2548, | |
| "step": 306500 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 3.381785873693607e-05, | |
| "loss": 1.2169, | |
| "step": 307000 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 3.375890789351793e-05, | |
| "loss": 1.278, | |
| "step": 307500 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 3.370007495178663e-05, | |
| "loss": 1.1629, | |
| "step": 308000 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 3.364112410836849e-05, | |
| "loss": 1.268, | |
| "step": 308500 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 3.358217326495035e-05, | |
| "loss": 1.2995, | |
| "step": 309000 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 3.352322242153221e-05, | |
| "loss": 1.2686, | |
| "step": 309500 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 3.346427157811408e-05, | |
| "loss": 1.3668, | |
| "step": 310000 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 3.340532073469594e-05, | |
| "loss": 1.2884, | |
| "step": 310500 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 3.33463698912778e-05, | |
| "loss": 1.3142, | |
| "step": 311000 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 3.32875369495465e-05, | |
| "loss": 1.287, | |
| "step": 311500 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 3.322858610612836e-05, | |
| "loss": 1.2372, | |
| "step": 312000 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 3.316963526271022e-05, | |
| "loss": 1.2591, | |
| "step": 312500 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 3.311068441929208e-05, | |
| "loss": 1.2634, | |
| "step": 313000 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 3.305173357587394e-05, | |
| "loss": 1.2233, | |
| "step": 313500 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 3.299278273245581e-05, | |
| "loss": 1.2714, | |
| "step": 314000 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 3.293383188903767e-05, | |
| "loss": 1.2526, | |
| "step": 314500 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 3.287488104561953e-05, | |
| "loss": 1.2608, | |
| "step": 315000 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 3.281593020220139e-05, | |
| "loss": 1.289, | |
| "step": 315500 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 3.2756979358783255e-05, | |
| "loss": 1.2376, | |
| "step": 316000 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 3.269802851536511e-05, | |
| "loss": 1.2847, | |
| "step": 316500 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 3.2639077671946976e-05, | |
| "loss": 1.264, | |
| "step": 317000 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 3.2580244730215676e-05, | |
| "loss": 1.2889, | |
| "step": 317500 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 3.2521293886797536e-05, | |
| "loss": 1.3118, | |
| "step": 318000 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 3.24623430433794e-05, | |
| "loss": 1.3439, | |
| "step": 318500 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 3.24035101016481e-05, | |
| "loss": 1.3455, | |
| "step": 319000 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 3.234455925822996e-05, | |
| "loss": 1.2317, | |
| "step": 319500 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 3.228560841481182e-05, | |
| "loss": 1.2395, | |
| "step": 320000 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 3.222665757139368e-05, | |
| "loss": 1.2622, | |
| "step": 320500 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 3.2167706727975545e-05, | |
| "loss": 1.2654, | |
| "step": 321000 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 3.210887378624424e-05, | |
| "loss": 1.3169, | |
| "step": 321500 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 3.20499229428261e-05, | |
| "loss": 1.3072, | |
| "step": 322000 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 3.1990972099407965e-05, | |
| "loss": 1.2517, | |
| "step": 322500 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 3.1932021255989826e-05, | |
| "loss": 1.2509, | |
| "step": 323000 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 3.1873070412571686e-05, | |
| "loss": 1.2573, | |
| "step": 323500 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 3.1814237470840386e-05, | |
| "loss": 1.276, | |
| "step": 324000 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 3.1755286627422247e-05, | |
| "loss": 1.2172, | |
| "step": 324500 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 3.169633578400411e-05, | |
| "loss": 1.2464, | |
| "step": 325000 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 3.163738494058597e-05, | |
| "loss": 1.2765, | |
| "step": 325500 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 3.157855199885467e-05, | |
| "loss": 1.2238, | |
| "step": 326000 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 3.151960115543653e-05, | |
| "loss": 1.2464, | |
| "step": 326500 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 3.1460650312018395e-05, | |
| "loss": 1.2596, | |
| "step": 327000 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 3.140169946860025e-05, | |
| "loss": 1.3547, | |
| "step": 327500 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 3.1342748625182115e-05, | |
| "loss": 1.2478, | |
| "step": 328000 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 3.1283797781763975e-05, | |
| "loss": 1.2495, | |
| "step": 328500 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 3.1224846938345835e-05, | |
| "loss": 1.2631, | |
| "step": 329000 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 3.1165896094927696e-05, | |
| "loss": 1.152, | |
| "step": 329500 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 3.1107063153196396e-05, | |
| "loss": 1.2581, | |
| "step": 330000 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 3.1048112309778256e-05, | |
| "loss": 1.2761, | |
| "step": 330500 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 3.0989161466360116e-05, | |
| "loss": 1.2881, | |
| "step": 331000 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 3.093021062294198e-05, | |
| "loss": 1.268, | |
| "step": 331500 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 3.087137768121068e-05, | |
| "loss": 1.1981, | |
| "step": 332000 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 3.0812426837792544e-05, | |
| "loss": 1.2601, | |
| "step": 332500 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 3.0753475994374404e-05, | |
| "loss": 1.2364, | |
| "step": 333000 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 3.0694525150956264e-05, | |
| "loss": 1.2036, | |
| "step": 333500 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 3.0635574307538125e-05, | |
| "loss": 1.2192, | |
| "step": 334000 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 3.0576741365806825e-05, | |
| "loss": 1.227, | |
| "step": 334500 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 3.051790842407552e-05, | |
| "loss": 1.1997, | |
| "step": 335000 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 3.0459075482344222e-05, | |
| "loss": 1.205, | |
| "step": 335500 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 3.040012463892608e-05, | |
| "loss": 1.2236, | |
| "step": 336000 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 3.0341173795507943e-05, | |
| "loss": 1.1391, | |
| "step": 336500 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 3.0282222952089806e-05, | |
| "loss": 1.2229, | |
| "step": 337000 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 3.02233900103585e-05, | |
| "loss": 1.2597, | |
| "step": 337500 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 3.0164439166940364e-05, | |
| "loss": 1.26, | |
| "step": 338000 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 3.0105488323522227e-05, | |
| "loss": 1.203, | |
| "step": 338500 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 3.0046537480104087e-05, | |
| "loss": 1.2286, | |
| "step": 339000 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 2.998758663668595e-05, | |
| "loss": 1.2209, | |
| "step": 339500 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 2.992863579326781e-05, | |
| "loss": 1.2839, | |
| "step": 340000 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 2.986968494984967e-05, | |
| "loss": 1.2837, | |
| "step": 340500 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 2.9810734106431535e-05, | |
| "loss": 1.2309, | |
| "step": 341000 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 2.9751901164700232e-05, | |
| "loss": 1.2584, | |
| "step": 341500 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 2.9692950321282092e-05, | |
| "loss": 1.2027, | |
| "step": 342000 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 2.9633999477863956e-05, | |
| "loss": 1.1769, | |
| "step": 342500 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 2.957504863444582e-05, | |
| "loss": 1.1513, | |
| "step": 343000 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 2.9516215692714513e-05, | |
| "loss": 1.3123, | |
| "step": 343500 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 2.9457264849296377e-05, | |
| "loss": 1.236, | |
| "step": 344000 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 2.939831400587824e-05, | |
| "loss": 1.2736, | |
| "step": 344500 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 2.9339363162460104e-05, | |
| "loss": 1.1916, | |
| "step": 345000 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 2.928041231904196e-05, | |
| "loss": 1.1626, | |
| "step": 345500 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 2.9221461475623824e-05, | |
| "loss": 1.3372, | |
| "step": 346000 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 2.9162510632205684e-05, | |
| "loss": 1.2332, | |
| "step": 346500 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 2.910367769047438e-05, | |
| "loss": 1.1655, | |
| "step": 347000 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 2.9044726847056245e-05, | |
| "loss": 1.206, | |
| "step": 347500 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 2.898577600363811e-05, | |
| "loss": 1.2023, | |
| "step": 348000 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 2.892682516021997e-05, | |
| "loss": 1.2383, | |
| "step": 348500 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 2.8867992218488666e-05, | |
| "loss": 1.2346, | |
| "step": 349000 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 2.880904137507053e-05, | |
| "loss": 1.2237, | |
| "step": 349500 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 2.875009053165239e-05, | |
| "loss": 1.2113, | |
| "step": 350000 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 2.8691139688234253e-05, | |
| "loss": 1.242, | |
| "step": 350500 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 2.863230674650295e-05, | |
| "loss": 1.2793, | |
| "step": 351000 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 2.857335590308481e-05, | |
| "loss": 1.1956, | |
| "step": 351500 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 2.8514405059666674e-05, | |
| "loss": 1.2009, | |
| "step": 352000 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 2.8455454216248534e-05, | |
| "loss": 1.3208, | |
| "step": 352500 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 2.8396503372830395e-05, | |
| "loss": 1.2701, | |
| "step": 353000 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 2.8337670431099095e-05, | |
| "loss": 1.2164, | |
| "step": 353500 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 2.827871958768096e-05, | |
| "loss": 1.205, | |
| "step": 354000 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 2.8219768744262815e-05, | |
| "loss": 1.1913, | |
| "step": 354500 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 2.816081790084468e-05, | |
| "loss": 1.2342, | |
| "step": 355000 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 2.8101867057426543e-05, | |
| "loss": 1.277, | |
| "step": 355500 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 2.8042916214008403e-05, | |
| "loss": 1.2478, | |
| "step": 356000 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 2.7983965370590263e-05, | |
| "loss": 1.2457, | |
| "step": 356500 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 2.7925014527172127e-05, | |
| "loss": 1.2791, | |
| "step": 357000 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 2.7866181585440824e-05, | |
| "loss": 1.1806, | |
| "step": 357500 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 2.7807230742022684e-05, | |
| "loss": 1.3453, | |
| "step": 358000 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 2.7748397800291384e-05, | |
| "loss": 1.2719, | |
| "step": 358500 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 2.7689446956873244e-05, | |
| "loss": 1.1959, | |
| "step": 359000 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 2.7630496113455108e-05, | |
| "loss": 1.2623, | |
| "step": 359500 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 2.7571545270036968e-05, | |
| "loss": 1.2014, | |
| "step": 360000 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 2.751259442661883e-05, | |
| "loss": 1.1756, | |
| "step": 360500 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 2.745376148488753e-05, | |
| "loss": 1.2209, | |
| "step": 361000 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 2.739481064146939e-05, | |
| "loss": 1.255, | |
| "step": 361500 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 2.733585979805125e-05, | |
| "loss": 1.1713, | |
| "step": 362000 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 2.7276908954633113e-05, | |
| "loss": 1.2254, | |
| "step": 362500 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 2.7217958111214976e-05, | |
| "loss": 1.2165, | |
| "step": 363000 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 2.715912516948367e-05, | |
| "loss": 1.1452, | |
| "step": 363500 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 2.7100174326065534e-05, | |
| "loss": 1.2771, | |
| "step": 364000 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 2.7041223482647397e-05, | |
| "loss": 1.2054, | |
| "step": 364500 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 2.698227263922926e-05, | |
| "loss": 1.2138, | |
| "step": 365000 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 2.6923321795811118e-05, | |
| "loss": 1.1762, | |
| "step": 365500 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 2.686437095239298e-05, | |
| "loss": 1.2403, | |
| "step": 366000 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 2.6805420108974845e-05, | |
| "loss": 1.2552, | |
| "step": 366500 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 2.6746469265556705e-05, | |
| "loss": 1.2517, | |
| "step": 367000 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 2.6687636323825402e-05, | |
| "loss": 1.1744, | |
| "step": 367500 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 2.6628685480407266e-05, | |
| "loss": 1.2521, | |
| "step": 368000 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 2.6569734636989126e-05, | |
| "loss": 1.1839, | |
| "step": 368500 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 2.6510783793570986e-05, | |
| "loss": 1.1896, | |
| "step": 369000 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 2.6451950851839686e-05, | |
| "loss": 1.2454, | |
| "step": 369500 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 2.6393000008421547e-05, | |
| "loss": 1.2666, | |
| "step": 370000 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 2.633404916500341e-05, | |
| "loss": 1.2005, | |
| "step": 370500 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 2.627509832158527e-05, | |
| "loss": 1.2315, | |
| "step": 371000 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 2.621614747816713e-05, | |
| "loss": 1.2595, | |
| "step": 371500 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 2.6157196634748994e-05, | |
| "loss": 1.2481, | |
| "step": 372000 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 2.609836369301769e-05, | |
| "loss": 1.2914, | |
| "step": 372500 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 2.603941284959955e-05, | |
| "loss": 1.1879, | |
| "step": 373000 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 2.5980462006181415e-05, | |
| "loss": 1.1608, | |
| "step": 373500 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 2.592151116276328e-05, | |
| "loss": 1.163, | |
| "step": 374000 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 2.5862560319345136e-05, | |
| "loss": 1.2001, | |
| "step": 374500 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 2.5803609475927e-05, | |
| "loss": 1.1682, | |
| "step": 375000 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 2.57447765341957e-05, | |
| "loss": 1.2287, | |
| "step": 375500 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 2.568582569077756e-05, | |
| "loss": 1.1357, | |
| "step": 376000 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 2.562687484735942e-05, | |
| "loss": 1.1565, | |
| "step": 376500 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 2.5567924003941284e-05, | |
| "loss": 1.2437, | |
| "step": 377000 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 2.550909106220998e-05, | |
| "loss": 1.1682, | |
| "step": 377500 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 2.545014021879184e-05, | |
| "loss": 1.1594, | |
| "step": 378000 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 2.5391189375373704e-05, | |
| "loss": 1.2731, | |
| "step": 378500 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 2.5332238531955565e-05, | |
| "loss": 1.228, | |
| "step": 379000 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 2.5273287688537428e-05, | |
| "loss": 1.2309, | |
| "step": 379500 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 2.521433684511929e-05, | |
| "loss": 1.1674, | |
| "step": 380000 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 2.515538600170115e-05, | |
| "loss": 1.2896, | |
| "step": 380500 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 2.5096435158283012e-05, | |
| "loss": 1.2069, | |
| "step": 381000 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 2.5037484314864876e-05, | |
| "loss": 1.1584, | |
| "step": 381500 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 2.497865137313357e-05, | |
| "loss": 1.2353, | |
| "step": 382000 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 2.4919700529715433e-05, | |
| "loss": 1.1736, | |
| "step": 382500 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 2.4860867587984133e-05, | |
| "loss": 1.1823, | |
| "step": 383000 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 2.480191674456599e-05, | |
| "loss": 1.2652, | |
| "step": 383500 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 2.4742965901147854e-05, | |
| "loss": 1.2173, | |
| "step": 384000 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 2.4684015057729717e-05, | |
| "loss": 1.2165, | |
| "step": 384500 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 2.462506421431158e-05, | |
| "loss": 1.1748, | |
| "step": 385000 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 2.4566113370893438e-05, | |
| "loss": 1.149, | |
| "step": 385500 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 2.45071625274753e-05, | |
| "loss": 1.1918, | |
| "step": 386000 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 2.4448211684057165e-05, | |
| "loss": 1.1725, | |
| "step": 386500 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 2.4389260840639025e-05, | |
| "loss": 1.1267, | |
| "step": 387000 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 2.4330309997220885e-05, | |
| "loss": 1.2385, | |
| "step": 387500 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 2.4271477055489586e-05, | |
| "loss": 1.159, | |
| "step": 388000 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 2.4212526212071446e-05, | |
| "loss": 1.2492, | |
| "step": 388500 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 2.415357536865331e-05, | |
| "loss": 1.1856, | |
| "step": 389000 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 2.409462452523517e-05, | |
| "loss": 1.1974, | |
| "step": 389500 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 2.403567368181703e-05, | |
| "loss": 1.1594, | |
| "step": 390000 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 2.3976722838398894e-05, | |
| "loss": 1.1298, | |
| "step": 390500 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 2.3917771994980757e-05, | |
| "loss": 1.2669, | |
| "step": 391000 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 2.3858821151562614e-05, | |
| "loss": 1.1915, | |
| "step": 391500 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 2.3799870308144478e-05, | |
| "loss": 1.1861, | |
| "step": 392000 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 2.3741037366413178e-05, | |
| "loss": 1.1185, | |
| "step": 392500 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 2.368220442468187e-05, | |
| "loss": 1.1989, | |
| "step": 393000 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 2.3623253581263735e-05, | |
| "loss": 1.1478, | |
| "step": 393500 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 2.35643027378456e-05, | |
| "loss": 1.2042, | |
| "step": 394000 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 2.3505469796114292e-05, | |
| "loss": 1.2027, | |
| "step": 394500 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 2.3446518952696156e-05, | |
| "loss": 1.1731, | |
| "step": 395000 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 2.338756810927802e-05, | |
| "loss": 1.1656, | |
| "step": 395500 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 2.3328735167546717e-05, | |
| "loss": 1.2331, | |
| "step": 396000 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 2.3269784324128577e-05, | |
| "loss": 1.2218, | |
| "step": 396500 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 2.321083348071044e-05, | |
| "loss": 1.2254, | |
| "step": 397000 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 2.31518826372923e-05, | |
| "loss": 1.2235, | |
| "step": 397500 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 2.3092931793874164e-05, | |
| "loss": 1.2181, | |
| "step": 398000 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 2.3033980950456025e-05, | |
| "loss": 1.1699, | |
| "step": 398500 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 2.2975030107037885e-05, | |
| "loss": 1.1735, | |
| "step": 399000 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 2.2916079263619748e-05, | |
| "loss": 1.1364, | |
| "step": 399500 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 2.2857128420201612e-05, | |
| "loss": 1.1461, | |
| "step": 400000 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 2.279817757678347e-05, | |
| "loss": 1.1873, | |
| "step": 400500 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 2.2739226733365332e-05, | |
| "loss": 1.1424, | |
| "step": 401000 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 2.2680393791634033e-05, | |
| "loss": 1.2363, | |
| "step": 401500 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 2.262144294821589e-05, | |
| "loss": 1.1695, | |
| "step": 402000 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 2.2562492104797753e-05, | |
| "loss": 1.1056, | |
| "step": 402500 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 2.2503541261379617e-05, | |
| "loss": 1.1959, | |
| "step": 403000 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 2.244459041796148e-05, | |
| "loss": 1.2244, | |
| "step": 403500 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 2.238563957454334e-05, | |
| "loss": 1.2254, | |
| "step": 404000 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 2.23266887311252e-05, | |
| "loss": 1.1684, | |
| "step": 404500 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 2.2267737887707064e-05, | |
| "loss": 1.1584, | |
| "step": 405000 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 2.2208787044288925e-05, | |
| "loss": 1.1854, | |
| "step": 405500 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 2.214995410255762e-05, | |
| "loss": 1.1488, | |
| "step": 406000 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 2.2091003259139485e-05, | |
| "loss": 1.1655, | |
| "step": 406500 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 2.2032052415721345e-05, | |
| "loss": 1.161, | |
| "step": 407000 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 2.197310157230321e-05, | |
| "loss": 1.1538, | |
| "step": 407500 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 2.191415072888507e-05, | |
| "loss": 1.155, | |
| "step": 408000 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 2.1855317787153766e-05, | |
| "loss": 1.1437, | |
| "step": 408500 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 2.179636694373563e-05, | |
| "loss": 1.1404, | |
| "step": 409000 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 2.1737416100317493e-05, | |
| "loss": 1.166, | |
| "step": 409500 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 2.167846525689935e-05, | |
| "loss": 1.1788, | |
| "step": 410000 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 2.1619514413481214e-05, | |
| "loss": 1.2474, | |
| "step": 410500 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 2.1560563570063077e-05, | |
| "loss": 1.1323, | |
| "step": 411000 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 2.150173062833177e-05, | |
| "loss": 1.1443, | |
| "step": 411500 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 2.1442779784913635e-05, | |
| "loss": 1.1085, | |
| "step": 412000 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 2.1383828941495498e-05, | |
| "loss": 1.1595, | |
| "step": 412500 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 2.132487809807736e-05, | |
| "loss": 1.2336, | |
| "step": 413000 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 2.126592725465922e-05, | |
| "loss": 1.1398, | |
| "step": 413500 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 2.120709431292792e-05, | |
| "loss": 1.236, | |
| "step": 414000 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 2.114814346950978e-05, | |
| "loss": 1.2135, | |
| "step": 414500 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 2.1089192626091643e-05, | |
| "loss": 1.1443, | |
| "step": 415000 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 2.1030241782673503e-05, | |
| "loss": 1.0692, | |
| "step": 415500 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 2.0971290939255363e-05, | |
| "loss": 1.1897, | |
| "step": 416000 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 2.0912340095837227e-05, | |
| "loss": 1.1329, | |
| "step": 416500 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 2.085338925241909e-05, | |
| "loss": 1.184, | |
| "step": 417000 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 2.0794438409000947e-05, | |
| "loss": 1.1203, | |
| "step": 417500 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 2.0735605467269648e-05, | |
| "loss": 1.1576, | |
| "step": 418000 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 2.067665462385151e-05, | |
| "loss": 1.1962, | |
| "step": 418500 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 2.0617821682120205e-05, | |
| "loss": 1.1513, | |
| "step": 419000 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 2.055887083870207e-05, | |
| "loss": 1.1488, | |
| "step": 419500 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 2.050003789697077e-05, | |
| "loss": 1.2417, | |
| "step": 420000 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 2.0441087053552626e-05, | |
| "loss": 1.1547, | |
| "step": 420500 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 2.038213621013449e-05, | |
| "loss": 1.1786, | |
| "step": 421000 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 2.0323185366716353e-05, | |
| "loss": 1.1449, | |
| "step": 421500 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 2.0264234523298216e-05, | |
| "loss": 1.1384, | |
| "step": 422000 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 2.0205283679880073e-05, | |
| "loss": 1.1989, | |
| "step": 422500 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 2.0146332836461937e-05, | |
| "loss": 1.2179, | |
| "step": 423000 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 2.00873819930438e-05, | |
| "loss": 1.2169, | |
| "step": 423500 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 2.002843114962566e-05, | |
| "loss": 1.1653, | |
| "step": 424000 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 1.996948030620752e-05, | |
| "loss": 1.2552, | |
| "step": 424500 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 1.9910529462789384e-05, | |
| "loss": 1.0916, | |
| "step": 425000 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 1.985169652105808e-05, | |
| "loss": 1.1899, | |
| "step": 425500 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 1.979286357932678e-05, | |
| "loss": 1.1469, | |
| "step": 426000 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.9733912735908642e-05, | |
| "loss": 1.1358, | |
| "step": 426500 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.9674961892490502e-05, | |
| "loss": 1.1754, | |
| "step": 427000 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.9616011049072366e-05, | |
| "loss": 1.1624, | |
| "step": 427500 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.9557060205654226e-05, | |
| "loss": 1.1019, | |
| "step": 428000 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 1.9498109362236086e-05, | |
| "loss": 1.1592, | |
| "step": 428500 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 1.943915851881795e-05, | |
| "loss": 1.1909, | |
| "step": 429000 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 1.9380207675399813e-05, | |
| "loss": 1.1488, | |
| "step": 429500 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 1.932125683198167e-05, | |
| "loss": 1.2036, | |
| "step": 430000 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 1.9262305988563534e-05, | |
| "loss": 1.2028, | |
| "step": 430500 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 1.9203355145145397e-05, | |
| "loss": 1.1806, | |
| "step": 431000 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 1.9144522203414095e-05, | |
| "loss": 1.1659, | |
| "step": 431500 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 1.9085571359995955e-05, | |
| "loss": 1.151, | |
| "step": 432000 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 1.902662051657782e-05, | |
| "loss": 1.1106, | |
| "step": 432500 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 1.896766967315968e-05, | |
| "loss": 1.2034, | |
| "step": 433000 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 1.8908718829741542e-05, | |
| "loss": 1.239, | |
| "step": 433500 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 1.884988588801024e-05, | |
| "loss": 1.1395, | |
| "step": 434000 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 1.87909350445921e-05, | |
| "loss": 1.2253, | |
| "step": 434500 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 1.8731984201173963e-05, | |
| "loss": 1.1857, | |
| "step": 435000 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 1.8673033357755823e-05, | |
| "loss": 1.1693, | |
| "step": 435500 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 1.861420041602452e-05, | |
| "loss": 1.1043, | |
| "step": 436000 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 1.8555249572606384e-05, | |
| "loss": 1.1789, | |
| "step": 436500 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 1.8496298729188247e-05, | |
| "loss": 1.1296, | |
| "step": 437000 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 1.8437347885770104e-05, | |
| "loss": 1.1615, | |
| "step": 437500 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 1.8378397042351968e-05, | |
| "loss": 1.1349, | |
| "step": 438000 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 1.8319564100620668e-05, | |
| "loss": 1.1539, | |
| "step": 438500 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 1.8260613257202525e-05, | |
| "loss": 1.2216, | |
| "step": 439000 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 1.820166241378439e-05, | |
| "loss": 1.1701, | |
| "step": 439500 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 1.8142711570366252e-05, | |
| "loss": 1.1634, | |
| "step": 440000 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 1.8083760726948116e-05, | |
| "loss": 1.1049, | |
| "step": 440500 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 1.8024809883529973e-05, | |
| "loss": 1.1572, | |
| "step": 441000 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 1.7965976941798673e-05, | |
| "loss": 1.1914, | |
| "step": 441500 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 1.7907026098380537e-05, | |
| "loss": 1.1036, | |
| "step": 442000 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 1.7848075254962397e-05, | |
| "loss": 1.1079, | |
| "step": 442500 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 1.7789124411544257e-05, | |
| "loss": 1.0967, | |
| "step": 443000 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 1.773017356812612e-05, | |
| "loss": 1.0886, | |
| "step": 443500 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 1.767122272470798e-05, | |
| "loss": 1.2437, | |
| "step": 444000 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 1.7612271881289844e-05, | |
| "loss": 1.1384, | |
| "step": 444500 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 1.7553321037871705e-05, | |
| "loss": 1.2413, | |
| "step": 445000 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 1.74944880961404e-05, | |
| "loss": 1.2516, | |
| "step": 445500 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 1.7435537252722262e-05, | |
| "loss": 1.2607, | |
| "step": 446000 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 1.7376586409304125e-05, | |
| "loss": 1.1788, | |
| "step": 446500 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 1.7317635565885986e-05, | |
| "loss": 1.1807, | |
| "step": 447000 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 1.7258802624154686e-05, | |
| "loss": 1.1212, | |
| "step": 447500 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 1.7199851780736546e-05, | |
| "loss": 1.1052, | |
| "step": 448000 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 1.714090093731841e-05, | |
| "loss": 1.1732, | |
| "step": 448500 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 1.708195009390027e-05, | |
| "loss": 1.1986, | |
| "step": 449000 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 1.7022999250482134e-05, | |
| "loss": 1.1305, | |
| "step": 449500 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 1.6964284210437667e-05, | |
| "loss": 1.1177, | |
| "step": 450000 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 1.6905333367019528e-05, | |
| "loss": 1.106, | |
| "step": 450500 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 1.684638252360139e-05, | |
| "loss": 1.1806, | |
| "step": 451000 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 1.678743168018325e-05, | |
| "loss": 1.1752, | |
| "step": 451500 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 1.6728480836765115e-05, | |
| "loss": 1.1708, | |
| "step": 452000 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 1.6669529993346975e-05, | |
| "loss": 1.1829, | |
| "step": 452500 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 1.6610697051615672e-05, | |
| "loss": 1.0927, | |
| "step": 453000 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 1.6551746208197536e-05, | |
| "loss": 1.1679, | |
| "step": 453500 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 1.6492795364779396e-05, | |
| "loss": 1.1808, | |
| "step": 454000 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 1.6433844521361256e-05, | |
| "loss": 1.1363, | |
| "step": 454500 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 1.637489367794312e-05, | |
| "loss": 1.0806, | |
| "step": 455000 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 1.6316060736211817e-05, | |
| "loss": 1.1608, | |
| "step": 455500 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 1.6257109892793677e-05, | |
| "loss": 1.204, | |
| "step": 456000 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 1.619815904937554e-05, | |
| "loss": 1.1361, | |
| "step": 456500 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 1.61392082059574e-05, | |
| "loss": 1.1163, | |
| "step": 457000 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 1.6080257362539265e-05, | |
| "loss": 1.1429, | |
| "step": 457500 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 1.6021306519121125e-05, | |
| "loss": 1.1361, | |
| "step": 458000 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 1.596235567570299e-05, | |
| "loss": 1.0787, | |
| "step": 458500 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 1.5903522733971685e-05, | |
| "loss": 1.2084, | |
| "step": 459000 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 1.5844571890553546e-05, | |
| "loss": 1.2054, | |
| "step": 459500 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 1.578562104713541e-05, | |
| "loss": 1.16, | |
| "step": 460000 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 1.572667020371727e-05, | |
| "loss": 1.1465, | |
| "step": 460500 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 1.5667719360299133e-05, | |
| "loss": 1.172, | |
| "step": 461000 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 1.5608768516880993e-05, | |
| "loss": 1.1564, | |
| "step": 461500 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 1.5549935575149694e-05, | |
| "loss": 1.0888, | |
| "step": 462000 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 1.5490984731731554e-05, | |
| "loss": 1.1401, | |
| "step": 462500 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 1.543215179000025e-05, | |
| "loss": 1.1677, | |
| "step": 463000 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 1.5373200946582114e-05, | |
| "loss": 1.1087, | |
| "step": 463500 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 1.5314250103163975e-05, | |
| "loss": 1.2204, | |
| "step": 464000 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 1.5255299259745836e-05, | |
| "loss": 1.1369, | |
| "step": 464500 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 1.5196348416327697e-05, | |
| "loss": 1.111, | |
| "step": 465000 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 1.513739757290956e-05, | |
| "loss": 1.1991, | |
| "step": 465500 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 1.5078564631178257e-05, | |
| "loss": 1.0535, | |
| "step": 466000 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 1.501961378776012e-05, | |
| "loss": 1.1935, | |
| "step": 466500 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 1.4960662944341981e-05, | |
| "loss": 1.1171, | |
| "step": 467000 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 1.4901712100923843e-05, | |
| "loss": 1.2015, | |
| "step": 467500 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 1.4842761257505703e-05, | |
| "loss": 1.1403, | |
| "step": 468000 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 1.4783928315774402e-05, | |
| "loss": 1.2389, | |
| "step": 468500 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 1.4724977472356264e-05, | |
| "loss": 1.0835, | |
| "step": 469000 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 1.4666026628938124e-05, | |
| "loss": 1.1687, | |
| "step": 469500 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 1.4607075785519988e-05, | |
| "loss": 1.1683, | |
| "step": 470000 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 1.4548124942101848e-05, | |
| "loss": 1.1314, | |
| "step": 470500 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 1.4489174098683711e-05, | |
| "loss": 1.0656, | |
| "step": 471000 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 1.4430223255265572e-05, | |
| "loss": 1.085, | |
| "step": 471500 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 1.4371272411847434e-05, | |
| "loss": 1.1091, | |
| "step": 472000 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 1.4312439470116132e-05, | |
| "loss": 1.2264, | |
| "step": 472500 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 1.4253488626697994e-05, | |
| "loss": 1.0943, | |
| "step": 473000 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 1.4194537783279854e-05, | |
| "loss": 1.0504, | |
| "step": 473500 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 1.4135586939861718e-05, | |
| "loss": 1.1083, | |
| "step": 474000 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 1.4076636096443578e-05, | |
| "loss": 1.1178, | |
| "step": 474500 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 1.401768525302544e-05, | |
| "loss": 1.1602, | |
| "step": 475000 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 1.3958734409607302e-05, | |
| "loss": 1.1508, | |
| "step": 475500 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 1.3899783566189164e-05, | |
| "loss": 1.1951, | |
| "step": 476000 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 1.3840832722771024e-05, | |
| "loss": 1.1492, | |
| "step": 476500 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 1.3781881879352888e-05, | |
| "loss": 1.0209, | |
| "step": 477000 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 1.3722931035934748e-05, | |
| "loss": 1.1813, | |
| "step": 477500 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 1.3663980192516611e-05, | |
| "loss": 1.1068, | |
| "step": 478000 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 1.3605265152472145e-05, | |
| "loss": 1.1782, | |
| "step": 478500 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 1.3546432210740842e-05, | |
| "loss": 1.1442, | |
| "step": 479000 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 1.3487481367322703e-05, | |
| "loss": 1.0932, | |
| "step": 479500 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1.3428530523904566e-05, | |
| "loss": 1.12, | |
| "step": 480000 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1.3369579680486426e-05, | |
| "loss": 1.1215, | |
| "step": 480500 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1.331062883706829e-05, | |
| "loss": 1.1167, | |
| "step": 481000 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1.325167799365015e-05, | |
| "loss": 1.1132, | |
| "step": 481500 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 1.3192727150232012e-05, | |
| "loss": 1.1323, | |
| "step": 482000 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 1.313389420850071e-05, | |
| "loss": 1.1822, | |
| "step": 482500 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 1.3074943365082573e-05, | |
| "loss": 1.1408, | |
| "step": 483000 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 1.3015992521664433e-05, | |
| "loss": 1.1328, | |
| "step": 483500 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 1.2957041678246296e-05, | |
| "loss": 1.0896, | |
| "step": 484000 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 1.2898090834828157e-05, | |
| "loss": 1.1126, | |
| "step": 484500 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 1.2839139991410019e-05, | |
| "loss": 1.1636, | |
| "step": 485000 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 1.278018914799188e-05, | |
| "loss": 1.1159, | |
| "step": 485500 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.2721238304573742e-05, | |
| "loss": 1.1699, | |
| "step": 486000 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.266240536284244e-05, | |
| "loss": 1.0885, | |
| "step": 486500 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.2603454519424301e-05, | |
| "loss": 1.102, | |
| "step": 487000 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.2544503676006163e-05, | |
| "loss": 1.1209, | |
| "step": 487500 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 1.2485552832588027e-05, | |
| "loss": 1.1678, | |
| "step": 488000 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 1.2426601989169887e-05, | |
| "loss": 1.1369, | |
| "step": 488500 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 1.2367651145751749e-05, | |
| "loss": 1.0852, | |
| "step": 489000 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 1.2308818204020448e-05, | |
| "loss": 1.1961, | |
| "step": 489500 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 1.2249867360602308e-05, | |
| "loss": 1.0893, | |
| "step": 490000 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 1.219091651718417e-05, | |
| "loss": 1.1555, | |
| "step": 490500 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 1.2131965673766032e-05, | |
| "loss": 1.1691, | |
| "step": 491000 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 1.2073014830347893e-05, | |
| "loss": 1.1374, | |
| "step": 491500 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 1.201418188861659e-05, | |
| "loss": 1.1329, | |
| "step": 492000 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 1.1955231045198454e-05, | |
| "loss": 1.0668, | |
| "step": 492500 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 1.1896280201780314e-05, | |
| "loss": 1.1424, | |
| "step": 493000 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 1.1837329358362176e-05, | |
| "loss": 1.1749, | |
| "step": 493500 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 1.1778378514944038e-05, | |
| "loss": 1.1726, | |
| "step": 494000 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 1.17194276715259e-05, | |
| "loss": 1.1148, | |
| "step": 494500 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 1.1660594729794599e-05, | |
| "loss": 1.1525, | |
| "step": 495000 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 1.1601643886376459e-05, | |
| "loss": 1.1298, | |
| "step": 495500 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 1.154269304295832e-05, | |
| "loss": 1.0978, | |
| "step": 496000 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 1.1483742199540181e-05, | |
| "loss": 1.1544, | |
| "step": 496500 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 1.1424791356122045e-05, | |
| "loss": 1.1432, | |
| "step": 497000 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 1.1366076316077578e-05, | |
| "loss": 1.1145, | |
| "step": 497500 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 1.130712547265944e-05, | |
| "loss": 1.0662, | |
| "step": 498000 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 1.1248174629241302e-05, | |
| "loss": 1.1682, | |
| "step": 498500 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 1.1189223785823162e-05, | |
| "loss": 1.0792, | |
| "step": 499000 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 1.1130272942405026e-05, | |
| "loss": 1.1048, | |
| "step": 499500 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 1.1071322098986886e-05, | |
| "loss": 1.129, | |
| "step": 500000 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 1.1012371255568748e-05, | |
| "loss": 1.0655, | |
| "step": 500500 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 1.095342041215061e-05, | |
| "loss": 1.1566, | |
| "step": 501000 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 1.0894587470419307e-05, | |
| "loss": 1.1391, | |
| "step": 501500 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 1.0835636627001169e-05, | |
| "loss": 1.1428, | |
| "step": 502000 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 1.0776803685269868e-05, | |
| "loss": 1.1577, | |
| "step": 502500 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 1.071785284185173e-05, | |
| "loss": 1.0436, | |
| "step": 503000 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 1.065890199843359e-05, | |
| "loss": 1.1324, | |
| "step": 503500 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 1.0599951155015453e-05, | |
| "loss": 1.2022, | |
| "step": 504000 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 1.0541000311597314e-05, | |
| "loss": 1.1497, | |
| "step": 504500 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 1.0482049468179177e-05, | |
| "loss": 1.1561, | |
| "step": 505000 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 1.0423098624761037e-05, | |
| "loss": 1.1448, | |
| "step": 505500 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 1.03641477813429e-05, | |
| "loss": 1.1069, | |
| "step": 506000 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 1.0305196937924761e-05, | |
| "loss": 1.2171, | |
| "step": 506500 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 1.0246246094506623e-05, | |
| "loss": 1.0737, | |
| "step": 507000 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 1.0187295251088483e-05, | |
| "loss": 1.1163, | |
| "step": 507500 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 1.0128344407670347e-05, | |
| "loss": 1.052, | |
| "step": 508000 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 1.0069393564252207e-05, | |
| "loss": 1.167, | |
| "step": 508500 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 1.0010560622520906e-05, | |
| "loss": 1.1351, | |
| "step": 509000 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 9.951727680789605e-06, | |
| "loss": 1.0973, | |
| "step": 509500 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 9.892776837371465e-06, | |
| "loss": 1.192, | |
| "step": 510000 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 9.833825993953327e-06, | |
| "loss": 1.0856, | |
| "step": 510500 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 9.774875150535189e-06, | |
| "loss": 1.0788, | |
| "step": 511000 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 9.71592430711705e-06, | |
| "loss": 1.1291, | |
| "step": 511500 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 9.65697346369891e-06, | |
| "loss": 1.1339, | |
| "step": 512000 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 9.598022620280774e-06, | |
| "loss": 1.1495, | |
| "step": 512500 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 9.539189678549471e-06, | |
| "loss": 1.1007, | |
| "step": 513000 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 9.480238835131335e-06, | |
| "loss": 1.129, | |
| "step": 513500 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 9.421287991713195e-06, | |
| "loss": 1.1553, | |
| "step": 514000 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 9.362337148295057e-06, | |
| "loss": 1.0732, | |
| "step": 514500 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 9.303386304876919e-06, | |
| "loss": 1.0946, | |
| "step": 515000 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 9.244553363145616e-06, | |
| "loss": 1.0184, | |
| "step": 515500 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 9.185602519727478e-06, | |
| "loss": 1.1658, | |
| "step": 516000 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 9.12665167630934e-06, | |
| "loss": 1.1194, | |
| "step": 516500 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 9.067700832891202e-06, | |
| "loss": 1.132, | |
| "step": 517000 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 9.008749989473062e-06, | |
| "loss": 1.1043, | |
| "step": 517500 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 8.949799146054925e-06, | |
| "loss": 1.1069, | |
| "step": 518000 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 8.890966204323622e-06, | |
| "loss": 1.07, | |
| "step": 518500 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 8.832015360905484e-06, | |
| "loss": 1.209, | |
| "step": 519000 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 8.773064517487346e-06, | |
| "loss": 1.1133, | |
| "step": 519500 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 8.714113674069208e-06, | |
| "loss": 1.1408, | |
| "step": 520000 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 8.655162830651068e-06, | |
| "loss": 1.0819, | |
| "step": 520500 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 8.59621198723293e-06, | |
| "loss": 1.1985, | |
| "step": 521000 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 8.537261143814792e-06, | |
| "loss": 1.0531, | |
| "step": 521500 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 8.478310300396654e-06, | |
| "loss": 1.1547, | |
| "step": 522000 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 8.419359456978516e-06, | |
| "loss": 1.1247, | |
| "step": 522500 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 8.36064441693405e-06, | |
| "loss": 1.132, | |
| "step": 523000 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 8.301693573515912e-06, | |
| "loss": 1.0849, | |
| "step": 523500 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 8.24286063178461e-06, | |
| "loss": 1.06, | |
| "step": 524000 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 8.183909788366472e-06, | |
| "loss": 1.2021, | |
| "step": 524500 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 8.124958944948334e-06, | |
| "loss": 1.1077, | |
| "step": 525000 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 8.066008101530194e-06, | |
| "loss": 1.0834, | |
| "step": 525500 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 8.007057258112056e-06, | |
| "loss": 1.1898, | |
| "step": 526000 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 7.948106414693918e-06, | |
| "loss": 1.0924, | |
| "step": 526500 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 7.88915557127578e-06, | |
| "loss": 1.1666, | |
| "step": 527000 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 7.830204727857642e-06, | |
| "loss": 1.1483, | |
| "step": 527500 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 7.771253884439504e-06, | |
| "loss": 1.1006, | |
| "step": 528000 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 7.712303041021366e-06, | |
| "loss": 1.1411, | |
| "step": 528500 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 7.653352197603226e-06, | |
| "loss": 1.1328, | |
| "step": 529000 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 7.594401354185089e-06, | |
| "loss": 1.1238, | |
| "step": 529500 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 7.535568412453786e-06, | |
| "loss": 1.0613, | |
| "step": 530000 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 7.476617569035648e-06, | |
| "loss": 1.1304, | |
| "step": 530500 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 7.4176667256175095e-06, | |
| "loss": 1.0985, | |
| "step": 531000 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 7.3587158821993706e-06, | |
| "loss": 1.0552, | |
| "step": 531500 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 7.299882940468069e-06, | |
| "loss": 1.0772, | |
| "step": 532000 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 7.240932097049931e-06, | |
| "loss": 1.0503, | |
| "step": 532500 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 7.181981253631792e-06, | |
| "loss": 1.112, | |
| "step": 533000 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 7.12314831190049e-06, | |
| "loss": 1.0917, | |
| "step": 533500 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 7.064197468482352e-06, | |
| "loss": 1.0451, | |
| "step": 534000 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 7.005246625064214e-06, | |
| "loss": 1.1209, | |
| "step": 534500 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 6.946295781646075e-06, | |
| "loss": 1.104, | |
| "step": 535000 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 6.887344938227937e-06, | |
| "loss": 1.0351, | |
| "step": 535500 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 6.828394094809799e-06, | |
| "loss": 1.0651, | |
| "step": 536000 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 6.769561153078497e-06, | |
| "loss": 1.123, | |
| "step": 536500 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 6.7106103096603585e-06, | |
| "loss": 1.0592, | |
| "step": 537000 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 6.65165946624222e-06, | |
| "loss": 1.1219, | |
| "step": 537500 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 6.592708622824082e-06, | |
| "loss": 1.1148, | |
| "step": 538000 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 6.533757779405943e-06, | |
| "loss": 1.146, | |
| "step": 538500 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 6.474806935987805e-06, | |
| "loss": 1.0631, | |
| "step": 539000 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 6.415856092569667e-06, | |
| "loss": 1.0659, | |
| "step": 539500 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 6.356905249151528e-06, | |
| "loss": 1.0902, | |
| "step": 540000 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 6.29795440573339e-06, | |
| "loss": 1.1162, | |
| "step": 540500 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 6.239121464002088e-06, | |
| "loss": 1.0639, | |
| "step": 541000 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 6.180170620583949e-06, | |
| "loss": 1.104, | |
| "step": 541500 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 6.121219777165811e-06, | |
| "loss": 1.094, | |
| "step": 542000 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 6.062268933747673e-06, | |
| "loss": 1.1028, | |
| "step": 542500 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 6.0034359920163716e-06, | |
| "loss": 1.119, | |
| "step": 543000 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 5.944485148598233e-06, | |
| "loss": 1.1794, | |
| "step": 543500 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 5.8855343051800945e-06, | |
| "loss": 1.026, | |
| "step": 544000 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 5.826583461761956e-06, | |
| "loss": 1.0395, | |
| "step": 544500 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 5.7676326183438175e-06, | |
| "loss": 1.0916, | |
| "step": 545000 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 5.708799676612515e-06, | |
| "loss": 1.0913, | |
| "step": 545500 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 5.649848833194377e-06, | |
| "loss": 0.991, | |
| "step": 546000 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 5.590897989776238e-06, | |
| "loss": 1.0942, | |
| "step": 546500 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 5.5319471463581e-06, | |
| "loss": 1.0675, | |
| "step": 547000 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 5.473114204626799e-06, | |
| "loss": 1.1203, | |
| "step": 547500 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 5.414163361208661e-06, | |
| "loss": 1.1392, | |
| "step": 548000 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 5.355212517790522e-06, | |
| "loss": 1.1748, | |
| "step": 548500 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 5.296261674372384e-06, | |
| "loss": 1.1011, | |
| "step": 549000 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 5.237310830954246e-06, | |
| "loss": 1.1237, | |
| "step": 549500 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 5.178477889222943e-06, | |
| "loss": 1.14, | |
| "step": 550000 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 5.1195270458048046e-06, | |
| "loss": 1.1082, | |
| "step": 550500 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 5.0605762023866665e-06, | |
| "loss": 1.0511, | |
| "step": 551000 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 5.0016253589685275e-06, | |
| "loss": 1.0746, | |
| "step": 551500 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 4.942674515550389e-06, | |
| "loss": 1.0512, | |
| "step": 552000 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 4.883723672132251e-06, | |
| "loss": 1.1061, | |
| "step": 552500 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 4.82489073040095e-06, | |
| "loss": 1.1637, | |
| "step": 553000 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 4.765939886982811e-06, | |
| "loss": 1.033, | |
| "step": 553500 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 4.706989043564673e-06, | |
| "loss": 1.1271, | |
| "step": 554000 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 4.648038200146535e-06, | |
| "loss": 1.115, | |
| "step": 554500 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 4.589205258415232e-06, | |
| "loss": 1.1516, | |
| "step": 555000 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 4.530254414997094e-06, | |
| "loss": 1.0802, | |
| "step": 555500 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 4.471303571578956e-06, | |
| "loss": 1.0404, | |
| "step": 556000 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 4.4124706298476544e-06, | |
| "loss": 1.0606, | |
| "step": 556500 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 4.3535197864295155e-06, | |
| "loss": 1.0665, | |
| "step": 557000 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 4.294568943011377e-06, | |
| "loss": 1.0785, | |
| "step": 557500 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 4.235618099593239e-06, | |
| "loss": 1.06, | |
| "step": 558000 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 4.1766672561751e-06, | |
| "loss": 1.1075, | |
| "step": 558500 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 4.117716412756962e-06, | |
| "loss": 1.0468, | |
| "step": 559000 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 4.058765569338824e-06, | |
| "loss": 1.0891, | |
| "step": 559500 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 3.999814725920685e-06, | |
| "loss": 1.1616, | |
| "step": 560000 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 3.940863882502547e-06, | |
| "loss": 1.0247, | |
| "step": 560500 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 3.882030940771245e-06, | |
| "loss": 1.0789, | |
| "step": 561000 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 3.823080097353107e-06, | |
| "loss": 1.0501, | |
| "step": 561500 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 3.7641292539349687e-06, | |
| "loss": 1.0249, | |
| "step": 562000 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 3.7051784105168302e-06, | |
| "loss": 1.1371, | |
| "step": 562500 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 3.646345468785528e-06, | |
| "loss": 1.0623, | |
| "step": 563000 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 3.58739462536739e-06, | |
| "loss": 1.003, | |
| "step": 563500 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 3.5284437819492515e-06, | |
| "loss": 1.0979, | |
| "step": 564000 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 3.4696108402179498e-06, | |
| "loss": 1.126, | |
| "step": 564500 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 3.4106599967998112e-06, | |
| "loss": 1.053, | |
| "step": 565000 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 3.3517091533816727e-06, | |
| "loss": 1.0729, | |
| "step": 565500 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 3.2927583099635346e-06, | |
| "loss": 1.0286, | |
| "step": 566000 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 3.233807466545396e-06, | |
| "loss": 1.1291, | |
| "step": 566500 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 3.174856623127258e-06, | |
| "loss": 1.075, | |
| "step": 567000 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 3.1159057797091195e-06, | |
| "loss": 1.1489, | |
| "step": 567500 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 3.056954936290981e-06, | |
| "loss": 1.0357, | |
| "step": 568000 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 2.998004092872843e-06, | |
| "loss": 1.0894, | |
| "step": 568500 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 2.9390532494547043e-06, | |
| "loss": 1.0034, | |
| "step": 569000 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 2.880102406036566e-06, | |
| "loss": 1.1578, | |
| "step": 569500 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 2.821269464305264e-06, | |
| "loss": 1.0334, | |
| "step": 570000 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 2.762318620887126e-06, | |
| "loss": 1.096, | |
| "step": 570500 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 2.7033677774689875e-06, | |
| "loss": 1.069, | |
| "step": 571000 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 2.644416934050849e-06, | |
| "loss": 1.0984, | |
| "step": 571500 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 2.5855839923195472e-06, | |
| "loss": 1.0847, | |
| "step": 572000 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 2.5266331489014087e-06, | |
| "loss": 1.098, | |
| "step": 572500 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 2.4676823054832706e-06, | |
| "loss": 1.0521, | |
| "step": 573000 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 2.4088493637519685e-06, | |
| "loss": 1.1309, | |
| "step": 573500 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 2.34989852033383e-06, | |
| "loss": 1.0719, | |
| "step": 574000 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 2.290947676915692e-06, | |
| "loss": 1.0152, | |
| "step": 574500 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 2.2319968334975533e-06, | |
| "loss": 1.0853, | |
| "step": 575000 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 2.1730459900794152e-06, | |
| "loss": 1.0781, | |
| "step": 575500 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 2.1140951466612767e-06, | |
| "loss": 1.1535, | |
| "step": 576000 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 2.055144303243138e-06, | |
| "loss": 1.0598, | |
| "step": 576500 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 1.996193459825e-06, | |
| "loss": 1.1467, | |
| "step": 577000 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 1.9372426164068615e-06, | |
| "loss": 1.0705, | |
| "step": 577500 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 1.8782917729887232e-06, | |
| "loss": 1.1656, | |
| "step": 578000 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 1.8193409295705851e-06, | |
| "loss": 1.0631, | |
| "step": 578500 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.7603900861524468e-06, | |
| "loss": 1.1149, | |
| "step": 579000 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.7015571444211447e-06, | |
| "loss": 1.1648, | |
| "step": 579500 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.6426063010030062e-06, | |
| "loss": 1.0288, | |
| "step": 580000 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.5836554575848679e-06, | |
| "loss": 1.0671, | |
| "step": 580500 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 1.5247046141667297e-06, | |
| "loss": 1.048, | |
| "step": 581000 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 1.4658716724354276e-06, | |
| "loss": 1.0739, | |
| "step": 581500 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 1.4069208290172893e-06, | |
| "loss": 1.0812, | |
| "step": 582000 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 1.347969985599151e-06, | |
| "loss": 1.0971, | |
| "step": 582500 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 1.2890191421810125e-06, | |
| "loss": 1.0779, | |
| "step": 583000 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 1.2300682987628744e-06, | |
| "loss": 1.1095, | |
| "step": 583500 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 1.171117455344736e-06, | |
| "loss": 1.0638, | |
| "step": 584000 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 1.1121666119265977e-06, | |
| "loss": 1.1076, | |
| "step": 584500 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 1.0532157685084594e-06, | |
| "loss": 1.1115, | |
| "step": 585000 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 9.942649250903211e-07, | |
| "loss": 1.0967, | |
| "step": 585500 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 9.355498850458553e-07, | |
| "loss": 1.0556, | |
| "step": 586000 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 8.765990416277169e-07, | |
| "loss": 1.0782, | |
| "step": 586500 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 8.176481982095787e-07, | |
| "loss": 1.0308, | |
| "step": 587000 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 7.586973547914403e-07, | |
| "loss": 1.047, | |
| "step": 587500 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 6.997465113733019e-07, | |
| "loss": 1.0906, | |
| "step": 588000 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 6.407956679551636e-07, | |
| "loss": 1.0376, | |
| "step": 588500 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 5.818448245370253e-07, | |
| "loss": 1.0616, | |
| "step": 589000 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 5.230118828057232e-07, | |
| "loss": 1.0688, | |
| "step": 589500 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 4.640610393875849e-07, | |
| "loss": 1.0337, | |
| "step": 590000 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 4.051101959694466e-07, | |
| "loss": 1.1031, | |
| "step": 590500 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 3.461593525513083e-07, | |
| "loss": 1.1921, | |
| "step": 591000 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 2.872085091331699e-07, | |
| "loss": 1.1171, | |
| "step": 591500 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 2.282576657150316e-07, | |
| "loss": 1.0845, | |
| "step": 592000 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 1.6930682229689326e-07, | |
| "loss": 1.0091, | |
| "step": 592500 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 1.1047388056559123e-07, | |
| "loss": 1.0612, | |
| "step": 593000 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 5.15230371474529e-08, | |
| "loss": 1.0389, | |
| "step": 593500 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 593715, | |
| "total_flos": 6.354828841670784e+16, | |
| "train_loss": 1.3742914497521532, | |
| "train_runtime": 129030.9031, | |
| "train_samples_per_second": 18.405, | |
| "train_steps_per_second": 4.601 | |
| } | |
| ], | |
| "max_steps": 593715, | |
| "num_train_epochs": 3, | |
| "total_flos": 6.354828841670784e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |