| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 10.0, | |
| "eval_steps": 500, | |
| "global_step": 430750, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.994196169471852e-05, | |
| "loss": 0.4992, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.988392338943703e-05, | |
| "loss": 0.4491, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.982588508415554e-05, | |
| "loss": 0.4336, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.976784677887406e-05, | |
| "loss": 0.4251, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.970980847359257e-05, | |
| "loss": 0.4192, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.965177016831109e-05, | |
| "loss": 0.4137, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.95937318630296e-05, | |
| "loss": 0.4099, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.953569355774811e-05, | |
| "loss": 0.4068, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.947765525246663e-05, | |
| "loss": 0.4041, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.941961694718514e-05, | |
| "loss": 0.4017, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.936157864190366e-05, | |
| "loss": 0.3992, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.930354033662217e-05, | |
| "loss": 0.3976, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.924550203134069e-05, | |
| "loss": 0.3958, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.91874637260592e-05, | |
| "loss": 0.3942, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.912942542077771e-05, | |
| "loss": 0.393, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.907138711549623e-05, | |
| "loss": 0.3915, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.901334881021474e-05, | |
| "loss": 0.3904, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.895531050493326e-05, | |
| "loss": 0.3894, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.889727219965177e-05, | |
| "loss": 0.388, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.883923389437029e-05, | |
| "loss": 0.387, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.87811955890888e-05, | |
| "loss": 0.386, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.872315728380731e-05, | |
| "loss": 0.385, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.866511897852583e-05, | |
| "loss": 0.384, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.860708067324434e-05, | |
| "loss": 0.3833, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.854904236796286e-05, | |
| "loss": 0.3829, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.849100406268137e-05, | |
| "loss": 0.3819, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.8432965757399886e-05, | |
| "loss": 0.3811, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 4.83749274521184e-05, | |
| "loss": 0.3805, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.831688914683691e-05, | |
| "loss": 0.3799, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.825885084155543e-05, | |
| "loss": 0.379, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.820081253627394e-05, | |
| "loss": 0.3787, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.814277423099246e-05, | |
| "loss": 0.3779, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 4.808473592571097e-05, | |
| "loss": 0.3776, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 4.8026697620429486e-05, | |
| "loss": 0.3766, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 4.7968659315148e-05, | |
| "loss": 0.3759, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.791062100986651e-05, | |
| "loss": 0.376, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 4.785258270458503e-05, | |
| "loss": 0.3751, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.779454439930354e-05, | |
| "loss": 0.375, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 4.7736506094022056e-05, | |
| "loss": 0.3748, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 4.767846778874057e-05, | |
| "loss": 0.3738, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 4.7620429483459086e-05, | |
| "loss": 0.3739, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 4.75623911781776e-05, | |
| "loss": 0.3727, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 4.750435287289611e-05, | |
| "loss": 0.3726, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.744631456761463e-05, | |
| "loss": 0.3719, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.738827626233314e-05, | |
| "loss": 0.372, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.7330237957051655e-05, | |
| "loss": 0.3714, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.727219965177017e-05, | |
| "loss": 0.3714, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 4.7214161346488686e-05, | |
| "loss": 0.3706, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.71561230412072e-05, | |
| "loss": 0.37, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.709808473592571e-05, | |
| "loss": 0.37, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.704004643064423e-05, | |
| "loss": 0.3698, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 4.698200812536274e-05, | |
| "loss": 0.3695, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 4.6923969820081255e-05, | |
| "loss": 0.3692, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 4.686593151479977e-05, | |
| "loss": 0.3686, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 4.6807893209518286e-05, | |
| "loss": 0.3682, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 4.67498549042368e-05, | |
| "loss": 0.3682, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 4.669181659895531e-05, | |
| "loss": 0.3677, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 4.6633778293673825e-05, | |
| "loss": 0.3673, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.657573998839234e-05, | |
| "loss": 0.367, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.6517701683110855e-05, | |
| "loss": 0.3664, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.645966337782937e-05, | |
| "loss": 0.3665, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 4.6401625072547886e-05, | |
| "loss": 0.3664, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 4.63435867672664e-05, | |
| "loss": 0.366, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 4.628554846198491e-05, | |
| "loss": 0.3654, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 4.6227510156703424e-05, | |
| "loss": 0.3652, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 4.616947185142194e-05, | |
| "loss": 0.3654, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 4.6111433546140455e-05, | |
| "loss": 0.365, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 4.605339524085897e-05, | |
| "loss": 0.3642, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.5995356935577485e-05, | |
| "loss": 0.3643, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.5937318630296e-05, | |
| "loss": 0.3644, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.587928032501451e-05, | |
| "loss": 0.364, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 4.5821242019733024e-05, | |
| "loss": 0.3638, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 4.576320371445154e-05, | |
| "loss": 0.3633, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 4.5705165409170055e-05, | |
| "loss": 0.3636, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 4.564712710388857e-05, | |
| "loss": 0.3629, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 4.5589088798607085e-05, | |
| "loss": 0.3629, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 4.5531050493325594e-05, | |
| "loss": 0.3625, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.547301218804411e-05, | |
| "loss": 0.3629, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 4.5414973882762624e-05, | |
| "loss": 0.362, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 4.535693557748114e-05, | |
| "loss": 0.3619, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.5298897272199654e-05, | |
| "loss": 0.3619, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 4.524085896691817e-05, | |
| "loss": 0.3612, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 4.5182820661636685e-05, | |
| "loss": 0.3616, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.5124782356355193e-05, | |
| "loss": 0.3608, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 4.506674405107371e-05, | |
| "loss": 0.361, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.5008705745792224e-05, | |
| "loss": 0.3608, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.8883345013697975, | |
| "eval_loss": 0.35087689757347107, | |
| "eval_runtime": 2032.4427, | |
| "eval_samples_per_second": 188.179, | |
| "eval_steps_per_second": 2.352, | |
| "step": 43075 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.495066744051074e-05, | |
| "loss": 0.3596, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.4892629135229254e-05, | |
| "loss": 0.3592, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 4.483459082994777e-05, | |
| "loss": 0.3594, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 4.4776552524666285e-05, | |
| "loss": 0.3594, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 4.471851421938479e-05, | |
| "loss": 0.3592, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 4.466047591410331e-05, | |
| "loss": 0.359, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 4.4602437608821824e-05, | |
| "loss": 0.3586, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 4.454439930354034e-05, | |
| "loss": 0.3586, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 4.4486360998258854e-05, | |
| "loss": 0.3583, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 4.442832269297737e-05, | |
| "loss": 0.3584, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 4.4370284387695885e-05, | |
| "loss": 0.3582, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 4.431224608241439e-05, | |
| "loss": 0.3579, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 4.425420777713291e-05, | |
| "loss": 0.3581, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 4.4196169471851423e-05, | |
| "loss": 0.3575, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 4.413813116656994e-05, | |
| "loss": 0.3575, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 4.4080092861288454e-05, | |
| "loss": 0.3573, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 4.402205455600696e-05, | |
| "loss": 0.3574, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 4.3964016250725484e-05, | |
| "loss": 0.357, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 4.390597794544399e-05, | |
| "loss": 0.357, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 4.384793964016251e-05, | |
| "loss": 0.3568, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 4.378990133488102e-05, | |
| "loss": 0.3569, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 4.373186302959954e-05, | |
| "loss": 0.3569, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 4.3673824724318054e-05, | |
| "loss": 0.3564, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 4.361578641903656e-05, | |
| "loss": 0.3562, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 4.3557748113755084e-05, | |
| "loss": 0.3562, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 4.349970980847359e-05, | |
| "loss": 0.3559, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 4.344167150319211e-05, | |
| "loss": 0.3555, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 4.338363319791062e-05, | |
| "loss": 0.3554, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 4.332559489262914e-05, | |
| "loss": 0.3552, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 4.3267556587347654e-05, | |
| "loss": 0.3553, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 4.320951828206616e-05, | |
| "loss": 0.3552, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 4.3151479976784684e-05, | |
| "loss": 0.3551, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 4.309344167150319e-05, | |
| "loss": 0.3554, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 4.303540336622171e-05, | |
| "loss": 0.3548, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 4.297736506094022e-05, | |
| "loss": 0.3547, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 4.291932675565873e-05, | |
| "loss": 0.3548, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 4.286128845037725e-05, | |
| "loss": 0.3545, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 4.280325014509576e-05, | |
| "loss": 0.3543, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 4.2745211839814284e-05, | |
| "loss": 0.3542, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 4.268717353453279e-05, | |
| "loss": 0.3544, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 4.262913522925131e-05, | |
| "loss": 0.3542, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 4.257109692396982e-05, | |
| "loss": 0.3544, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 4.251305861868833e-05, | |
| "loss": 0.3541, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 4.245502031340685e-05, | |
| "loss": 0.354, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 4.239698200812536e-05, | |
| "loss": 0.3538, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 4.2338943702843884e-05, | |
| "loss": 0.3536, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 4.228090539756239e-05, | |
| "loss": 0.3532, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 4.222286709228091e-05, | |
| "loss": 0.3533, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 4.216482878699942e-05, | |
| "loss": 0.3535, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 4.210679048171793e-05, | |
| "loss": 0.3536, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 4.204875217643645e-05, | |
| "loss": 0.353, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 4.199071387115496e-05, | |
| "loss": 0.3528, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 4.1932675565873483e-05, | |
| "loss": 0.3524, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 4.187463726059199e-05, | |
| "loss": 0.3524, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 4.181659895531051e-05, | |
| "loss": 0.3523, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 4.175856065002902e-05, | |
| "loss": 0.3524, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 4.170052234474753e-05, | |
| "loss": 0.3524, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 4.164248403946605e-05, | |
| "loss": 0.3523, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 4.158444573418456e-05, | |
| "loss": 0.3524, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 4.152640742890308e-05, | |
| "loss": 0.3518, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 4.146836912362159e-05, | |
| "loss": 0.3519, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 4.141033081834011e-05, | |
| "loss": 0.3516, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 4.135229251305862e-05, | |
| "loss": 0.3511, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 4.129425420777713e-05, | |
| "loss": 0.3515, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 4.123621590249565e-05, | |
| "loss": 0.3517, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 4.117817759721416e-05, | |
| "loss": 0.3515, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 4.112013929193268e-05, | |
| "loss": 0.3513, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 4.106210098665119e-05, | |
| "loss": 0.3509, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 4.100406268136971e-05, | |
| "loss": 0.3512, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 4.094602437608822e-05, | |
| "loss": 0.351, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 4.088798607080673e-05, | |
| "loss": 0.3511, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 4.082994776552525e-05, | |
| "loss": 0.3507, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 4.077190946024376e-05, | |
| "loss": 0.3506, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 4.071387115496228e-05, | |
| "loss": 0.3507, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 4.065583284968079e-05, | |
| "loss": 0.3508, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 4.0597794544399307e-05, | |
| "loss": 0.3501, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 4.053975623911782e-05, | |
| "loss": 0.3507, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 4.048171793383633e-05, | |
| "loss": 0.3505, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 4.042367962855485e-05, | |
| "loss": 0.3501, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 4.036564132327336e-05, | |
| "loss": 0.35, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 4.0307603017991876e-05, | |
| "loss": 0.3498, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 4.024956471271039e-05, | |
| "loss": 0.3499, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 4.0191526407428906e-05, | |
| "loss": 0.3496, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 4.013348810214742e-05, | |
| "loss": 0.35, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 4.007544979686593e-05, | |
| "loss": 0.3496, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 4.001741149158445e-05, | |
| "loss": 0.3495, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_accuracy": 0.8910229909772815, | |
| "eval_loss": 0.3408386707305908, | |
| "eval_runtime": 1994.5363, | |
| "eval_samples_per_second": 191.756, | |
| "eval_steps_per_second": 2.397, | |
| "step": 86150 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 3.995937318630296e-05, | |
| "loss": 0.3486, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 3.9901334881021476e-05, | |
| "loss": 0.3487, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 3.984329657573999e-05, | |
| "loss": 0.3486, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 3.9785258270458506e-05, | |
| "loss": 0.3488, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 3.972721996517702e-05, | |
| "loss": 0.3482, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 3.966918165989553e-05, | |
| "loss": 0.3481, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 3.961114335461405e-05, | |
| "loss": 0.3482, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 3.955310504933256e-05, | |
| "loss": 0.3482, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 3.9495066744051076e-05, | |
| "loss": 0.3479, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 3.943702843876959e-05, | |
| "loss": 0.348, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 3.9378990133488106e-05, | |
| "loss": 0.3478, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 3.932095182820662e-05, | |
| "loss": 0.3477, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 3.926291352292513e-05, | |
| "loss": 0.3476, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 3.9204875217643645e-05, | |
| "loss": 0.3477, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 3.914683691236216e-05, | |
| "loss": 0.3478, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 3.9088798607080675e-05, | |
| "loss": 0.3473, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 3.903076030179919e-05, | |
| "loss": 0.3476, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 3.8972721996517706e-05, | |
| "loss": 0.3477, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 3.891468369123622e-05, | |
| "loss": 0.3473, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 3.885664538595473e-05, | |
| "loss": 0.3475, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 3.8798607080673245e-05, | |
| "loss": 0.3471, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 3.874056877539176e-05, | |
| "loss": 0.3471, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 3.8682530470110275e-05, | |
| "loss": 0.3475, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 3.862449216482879e-05, | |
| "loss": 0.3471, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 3.8566453859547306e-05, | |
| "loss": 0.347, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 3.850841555426582e-05, | |
| "loss": 0.3471, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 3.845037724898433e-05, | |
| "loss": 0.3469, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 3.8392338943702845e-05, | |
| "loss": 0.3469, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 3.833430063842136e-05, | |
| "loss": 0.3467, | |
| "step": 100500 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 3.8276262333139875e-05, | |
| "loss": 0.3463, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 3.821822402785839e-05, | |
| "loss": 0.3463, | |
| "step": 101500 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 3.8160185722576905e-05, | |
| "loss": 0.3465, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 3.8102147417295414e-05, | |
| "loss": 0.3462, | |
| "step": 102500 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 3.804410911201393e-05, | |
| "loss": 0.3462, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 3.7986070806732444e-05, | |
| "loss": 0.3462, | |
| "step": 103500 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 3.792803250145096e-05, | |
| "loss": 0.3463, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 3.7869994196169475e-05, | |
| "loss": 0.3464, | |
| "step": 104500 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 3.781195589088799e-05, | |
| "loss": 0.3464, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 3.7753917585606505e-05, | |
| "loss": 0.346, | |
| "step": 105500 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 3.7695879280325014e-05, | |
| "loss": 0.3459, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 3.763784097504353e-05, | |
| "loss": 0.3456, | |
| "step": 106500 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 3.7579802669762044e-05, | |
| "loss": 0.346, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 3.752176436448056e-05, | |
| "loss": 0.3462, | |
| "step": 107500 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 3.7463726059199075e-05, | |
| "loss": 0.346, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 3.740568775391759e-05, | |
| "loss": 0.3456, | |
| "step": 108500 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 3.73476494486361e-05, | |
| "loss": 0.3456, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 3.7289611143354614e-05, | |
| "loss": 0.3457, | |
| "step": 109500 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 3.723157283807313e-05, | |
| "loss": 0.3454, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 3.7173534532791644e-05, | |
| "loss": 0.3453, | |
| "step": 110500 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 3.711549622751016e-05, | |
| "loss": 0.3452, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 3.7057457922228674e-05, | |
| "loss": 0.3456, | |
| "step": 111500 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 3.699941961694719e-05, | |
| "loss": 0.3455, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 3.69413813116657e-05, | |
| "loss": 0.3455, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 3.688334300638421e-05, | |
| "loss": 0.3454, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 3.682530470110273e-05, | |
| "loss": 0.3453, | |
| "step": 113500 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 3.6767266395821244e-05, | |
| "loss": 0.3453, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 3.670922809053976e-05, | |
| "loss": 0.3448, | |
| "step": 114500 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 3.6651189785258274e-05, | |
| "loss": 0.3451, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 3.659315147997678e-05, | |
| "loss": 0.3452, | |
| "step": 115500 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 3.65351131746953e-05, | |
| "loss": 0.3448, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 3.647707486941381e-05, | |
| "loss": 0.3445, | |
| "step": 116500 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 3.641903656413233e-05, | |
| "loss": 0.3443, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 3.6360998258850844e-05, | |
| "loss": 0.3445, | |
| "step": 117500 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 3.630295995356936e-05, | |
| "loss": 0.3445, | |
| "step": 118000 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 3.6244921648287874e-05, | |
| "loss": 0.345, | |
| "step": 118500 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 3.618688334300638e-05, | |
| "loss": 0.3443, | |
| "step": 119000 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 3.61288450377249e-05, | |
| "loss": 0.3445, | |
| "step": 119500 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 3.607080673244341e-05, | |
| "loss": 0.3446, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 3.601276842716193e-05, | |
| "loss": 0.3443, | |
| "step": 120500 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 3.5954730121880443e-05, | |
| "loss": 0.3442, | |
| "step": 121000 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 3.589669181659896e-05, | |
| "loss": 0.3443, | |
| "step": 121500 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 3.5838653511317474e-05, | |
| "loss": 0.3444, | |
| "step": 122000 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 3.578061520603598e-05, | |
| "loss": 0.3438, | |
| "step": 122500 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 3.57225769007545e-05, | |
| "loss": 0.3444, | |
| "step": 123000 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 3.566453859547301e-05, | |
| "loss": 0.3442, | |
| "step": 123500 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 3.560650029019153e-05, | |
| "loss": 0.3436, | |
| "step": 124000 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 3.554846198491004e-05, | |
| "loss": 0.3441, | |
| "step": 124500 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 3.549042367962855e-05, | |
| "loss": 0.3438, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 3.5432385374347074e-05, | |
| "loss": 0.3438, | |
| "step": 125500 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 3.537434706906558e-05, | |
| "loss": 0.3438, | |
| "step": 126000 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 3.53163087637841e-05, | |
| "loss": 0.3437, | |
| "step": 126500 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 3.525827045850261e-05, | |
| "loss": 0.3438, | |
| "step": 127000 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 3.520023215322113e-05, | |
| "loss": 0.3438, | |
| "step": 127500 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 3.514219384793964e-05, | |
| "loss": 0.3434, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 3.508415554265815e-05, | |
| "loss": 0.3434, | |
| "step": 128500 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 3.5026117237376674e-05, | |
| "loss": 0.3435, | |
| "step": 129000 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_accuracy": 0.8924505609685972, | |
| "eval_loss": 0.3355477452278137, | |
| "eval_runtime": 1982.819, | |
| "eval_samples_per_second": 192.889, | |
| "eval_steps_per_second": 2.411, | |
| "step": 129225 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 3.496807893209518e-05, | |
| "loss": 0.3429, | |
| "step": 129500 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 3.49100406268137e-05, | |
| "loss": 0.3425, | |
| "step": 130000 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 3.485200232153221e-05, | |
| "loss": 0.3425, | |
| "step": 130500 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 3.479396401625073e-05, | |
| "loss": 0.3424, | |
| "step": 131000 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 3.473592571096924e-05, | |
| "loss": 0.3426, | |
| "step": 131500 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 3.467788740568775e-05, | |
| "loss": 0.3425, | |
| "step": 132000 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 3.461984910040627e-05, | |
| "loss": 0.3421, | |
| "step": 132500 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 3.456181079512478e-05, | |
| "loss": 0.3422, | |
| "step": 133000 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 3.45037724898433e-05, | |
| "loss": 0.3426, | |
| "step": 133500 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 3.444573418456181e-05, | |
| "loss": 0.3423, | |
| "step": 134000 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 3.438769587928033e-05, | |
| "loss": 0.3424, | |
| "step": 134500 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 3.432965757399884e-05, | |
| "loss": 0.3423, | |
| "step": 135000 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 3.427161926871735e-05, | |
| "loss": 0.3422, | |
| "step": 135500 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 3.421358096343587e-05, | |
| "loss": 0.342, | |
| "step": 136000 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 3.415554265815438e-05, | |
| "loss": 0.342, | |
| "step": 136500 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 3.40975043528729e-05, | |
| "loss": 0.342, | |
| "step": 137000 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 3.403946604759141e-05, | |
| "loss": 0.342, | |
| "step": 137500 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 3.398142774230992e-05, | |
| "loss": 0.3416, | |
| "step": 138000 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 3.392338943702844e-05, | |
| "loss": 0.342, | |
| "step": 138500 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 3.386535113174695e-05, | |
| "loss": 0.342, | |
| "step": 139000 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 3.380731282646547e-05, | |
| "loss": 0.3419, | |
| "step": 139500 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 3.374927452118398e-05, | |
| "loss": 0.3421, | |
| "step": 140000 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 3.36912362159025e-05, | |
| "loss": 0.3415, | |
| "step": 140500 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 3.363319791062101e-05, | |
| "loss": 0.3417, | |
| "step": 141000 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 3.357515960533952e-05, | |
| "loss": 0.3417, | |
| "step": 141500 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 3.351712130005804e-05, | |
| "loss": 0.3413, | |
| "step": 142000 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 3.345908299477655e-05, | |
| "loss": 0.3414, | |
| "step": 142500 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 3.340104468949507e-05, | |
| "loss": 0.3415, | |
| "step": 143000 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 3.334300638421358e-05, | |
| "loss": 0.3411, | |
| "step": 143500 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 3.3284968078932096e-05, | |
| "loss": 0.3414, | |
| "step": 144000 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 3.322692977365061e-05, | |
| "loss": 0.3415, | |
| "step": 144500 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 3.316889146836912e-05, | |
| "loss": 0.3412, | |
| "step": 145000 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 3.311085316308764e-05, | |
| "loss": 0.3415, | |
| "step": 145500 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 3.305281485780615e-05, | |
| "loss": 0.3415, | |
| "step": 146000 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 3.299477655252467e-05, | |
| "loss": 0.3417, | |
| "step": 146500 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 3.293673824724318e-05, | |
| "loss": 0.3412, | |
| "step": 147000 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 3.2878699941961696e-05, | |
| "loss": 0.3413, | |
| "step": 147500 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 3.282066163668021e-05, | |
| "loss": 0.3413, | |
| "step": 148000 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 3.276262333139872e-05, | |
| "loss": 0.3414, | |
| "step": 148500 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 3.270458502611724e-05, | |
| "loss": 0.341, | |
| "step": 149000 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 3.264654672083575e-05, | |
| "loss": 0.3408, | |
| "step": 149500 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 3.258850841555427e-05, | |
| "loss": 0.3412, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 3.253047011027278e-05, | |
| "loss": 0.3409, | |
| "step": 150500 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 3.2472431804991296e-05, | |
| "loss": 0.3412, | |
| "step": 151000 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 3.241439349970981e-05, | |
| "loss": 0.3408, | |
| "step": 151500 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 3.235635519442832e-05, | |
| "loss": 0.341, | |
| "step": 152000 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 3.229831688914684e-05, | |
| "loss": 0.3407, | |
| "step": 152500 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 3.224027858386535e-05, | |
| "loss": 0.3408, | |
| "step": 153000 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 3.218224027858387e-05, | |
| "loss": 0.3407, | |
| "step": 153500 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 3.212420197330238e-05, | |
| "loss": 0.3404, | |
| "step": 154000 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 3.2066163668020896e-05, | |
| "loss": 0.3407, | |
| "step": 154500 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 3.200812536273941e-05, | |
| "loss": 0.3404, | |
| "step": 155000 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 3.195008705745792e-05, | |
| "loss": 0.3403, | |
| "step": 155500 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 3.189204875217644e-05, | |
| "loss": 0.3404, | |
| "step": 156000 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 3.183401044689495e-05, | |
| "loss": 0.3407, | |
| "step": 156500 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 3.1775972141613465e-05, | |
| "loss": 0.3403, | |
| "step": 157000 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 3.171793383633198e-05, | |
| "loss": 0.3403, | |
| "step": 157500 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 3.1659895531050496e-05, | |
| "loss": 0.3401, | |
| "step": 158000 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 3.160185722576901e-05, | |
| "loss": 0.3399, | |
| "step": 158500 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 3.154381892048752e-05, | |
| "loss": 0.3403, | |
| "step": 159000 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 3.148578061520604e-05, | |
| "loss": 0.3403, | |
| "step": 159500 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 3.142774230992455e-05, | |
| "loss": 0.3404, | |
| "step": 160000 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 3.1369704004643065e-05, | |
| "loss": 0.3401, | |
| "step": 160500 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 3.131166569936158e-05, | |
| "loss": 0.3401, | |
| "step": 161000 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 3.1253627394080096e-05, | |
| "loss": 0.3401, | |
| "step": 161500 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 3.119558908879861e-05, | |
| "loss": 0.3401, | |
| "step": 162000 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 3.113755078351712e-05, | |
| "loss": 0.34, | |
| "step": 162500 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 3.107951247823564e-05, | |
| "loss": 0.3397, | |
| "step": 163000 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 3.102147417295415e-05, | |
| "loss": 0.3399, | |
| "step": 163500 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 3.0963435867672665e-05, | |
| "loss": 0.34, | |
| "step": 164000 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 3.090539756239118e-05, | |
| "loss": 0.3401, | |
| "step": 164500 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 3.0847359257109695e-05, | |
| "loss": 0.3397, | |
| "step": 165000 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 3.078932095182821e-05, | |
| "loss": 0.3398, | |
| "step": 165500 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 3.073128264654672e-05, | |
| "loss": 0.3399, | |
| "step": 166000 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 3.067324434126524e-05, | |
| "loss": 0.3401, | |
| "step": 166500 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 3.061520603598375e-05, | |
| "loss": 0.3399, | |
| "step": 167000 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 3.0557167730702265e-05, | |
| "loss": 0.3399, | |
| "step": 167500 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 3.049912942542078e-05, | |
| "loss": 0.3397, | |
| "step": 168000 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 3.0441091120139292e-05, | |
| "loss": 0.3396, | |
| "step": 168500 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 3.038305281485781e-05, | |
| "loss": 0.3397, | |
| "step": 169000 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 3.0325014509576322e-05, | |
| "loss": 0.3395, | |
| "step": 169500 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 3.0266976204294834e-05, | |
| "loss": 0.339, | |
| "step": 170000 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 3.020893789901335e-05, | |
| "loss": 0.3393, | |
| "step": 170500 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 3.015089959373186e-05, | |
| "loss": 0.3397, | |
| "step": 171000 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 3.009286128845038e-05, | |
| "loss": 0.3391, | |
| "step": 171500 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 3.003482298316889e-05, | |
| "loss": 0.3394, | |
| "step": 172000 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_accuracy": 0.8933373896351878, | |
| "eval_loss": 0.3321678936481476, | |
| "eval_runtime": 1989.6768, | |
| "eval_samples_per_second": 192.224, | |
| "eval_steps_per_second": 2.403, | |
| "step": 172300 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 2.997678467788741e-05, | |
| "loss": 0.3389, | |
| "step": 172500 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 2.9918746372605922e-05, | |
| "loss": 0.3383, | |
| "step": 173000 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 2.9860708067324434e-05, | |
| "loss": 0.3384, | |
| "step": 173500 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 2.980266976204295e-05, | |
| "loss": 0.3387, | |
| "step": 174000 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 2.974463145676146e-05, | |
| "loss": 0.3383, | |
| "step": 174500 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 2.968659315147998e-05, | |
| "loss": 0.3385, | |
| "step": 175000 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "learning_rate": 2.962855484619849e-05, | |
| "loss": 0.3382, | |
| "step": 175500 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 2.957051654091701e-05, | |
| "loss": 0.3383, | |
| "step": 176000 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 2.9512478235635522e-05, | |
| "loss": 0.3387, | |
| "step": 176500 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "learning_rate": 2.9454439930354034e-05, | |
| "loss": 0.3382, | |
| "step": 177000 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "learning_rate": 2.939640162507255e-05, | |
| "loss": 0.3386, | |
| "step": 177500 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 2.933836331979106e-05, | |
| "loss": 0.3381, | |
| "step": 178000 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 2.928032501450958e-05, | |
| "loss": 0.338, | |
| "step": 178500 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 2.922228670922809e-05, | |
| "loss": 0.3383, | |
| "step": 179000 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 2.9164248403946603e-05, | |
| "loss": 0.3383, | |
| "step": 179500 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 2.9106210098665122e-05, | |
| "loss": 0.3385, | |
| "step": 180000 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 2.9048171793383634e-05, | |
| "loss": 0.3386, | |
| "step": 180500 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "learning_rate": 2.899013348810215e-05, | |
| "loss": 0.3385, | |
| "step": 181000 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 2.893209518282066e-05, | |
| "loss": 0.3383, | |
| "step": 181500 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 2.887405687753918e-05, | |
| "loss": 0.3384, | |
| "step": 182000 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 2.881601857225769e-05, | |
| "loss": 0.338, | |
| "step": 182500 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "learning_rate": 2.8757980266976203e-05, | |
| "loss": 0.3382, | |
| "step": 183000 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 2.869994196169472e-05, | |
| "loss": 0.3382, | |
| "step": 183500 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "learning_rate": 2.8641903656413233e-05, | |
| "loss": 0.3382, | |
| "step": 184000 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 2.858386535113175e-05, | |
| "loss": 0.3376, | |
| "step": 184500 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "learning_rate": 2.852582704585026e-05, | |
| "loss": 0.338, | |
| "step": 185000 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 2.846778874056878e-05, | |
| "loss": 0.3379, | |
| "step": 185500 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 2.840975043528729e-05, | |
| "loss": 0.3377, | |
| "step": 186000 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "learning_rate": 2.8351712130005803e-05, | |
| "loss": 0.3382, | |
| "step": 186500 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 2.829367382472432e-05, | |
| "loss": 0.3375, | |
| "step": 187000 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "learning_rate": 2.8235635519442833e-05, | |
| "loss": 0.338, | |
| "step": 187500 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "learning_rate": 2.817759721416135e-05, | |
| "loss": 0.3377, | |
| "step": 188000 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 2.811955890887986e-05, | |
| "loss": 0.3375, | |
| "step": 188500 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "learning_rate": 2.8061520603598372e-05, | |
| "loss": 0.3373, | |
| "step": 189000 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 2.800348229831689e-05, | |
| "loss": 0.3378, | |
| "step": 189500 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 2.7945443993035403e-05, | |
| "loss": 0.3375, | |
| "step": 190000 | |
| }, | |
| { | |
| "epoch": 4.42, | |
| "learning_rate": 2.788740568775392e-05, | |
| "loss": 0.3375, | |
| "step": 190500 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "learning_rate": 2.7829367382472433e-05, | |
| "loss": 0.3379, | |
| "step": 191000 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "learning_rate": 2.7771329077190948e-05, | |
| "loss": 0.3375, | |
| "step": 191500 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "learning_rate": 2.771329077190946e-05, | |
| "loss": 0.3377, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "learning_rate": 2.7655252466627972e-05, | |
| "loss": 0.3377, | |
| "step": 192500 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "learning_rate": 2.759721416134649e-05, | |
| "loss": 0.3379, | |
| "step": 193000 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 2.7539175856065002e-05, | |
| "loss": 0.3374, | |
| "step": 193500 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "learning_rate": 2.748113755078352e-05, | |
| "loss": 0.3375, | |
| "step": 194000 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 2.7423099245502033e-05, | |
| "loss": 0.3372, | |
| "step": 194500 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 2.7365060940220548e-05, | |
| "loss": 0.3378, | |
| "step": 195000 | |
| }, | |
| { | |
| "epoch": 4.54, | |
| "learning_rate": 2.730702263493906e-05, | |
| "loss": 0.3375, | |
| "step": 195500 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "learning_rate": 2.7248984329657572e-05, | |
| "loss": 0.3372, | |
| "step": 196000 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "learning_rate": 2.719094602437609e-05, | |
| "loss": 0.3374, | |
| "step": 196500 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "learning_rate": 2.7132907719094602e-05, | |
| "loss": 0.3371, | |
| "step": 197000 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "learning_rate": 2.707486941381312e-05, | |
| "loss": 0.3373, | |
| "step": 197500 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "learning_rate": 2.7016831108531633e-05, | |
| "loss": 0.3372, | |
| "step": 198000 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "learning_rate": 2.6958792803250148e-05, | |
| "loss": 0.3372, | |
| "step": 198500 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 2.690075449796866e-05, | |
| "loss": 0.3375, | |
| "step": 199000 | |
| }, | |
| { | |
| "epoch": 4.63, | |
| "learning_rate": 2.684271619268717e-05, | |
| "loss": 0.3371, | |
| "step": 199500 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 2.678467788740569e-05, | |
| "loss": 0.3372, | |
| "step": 200000 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "learning_rate": 2.6726639582124202e-05, | |
| "loss": 0.3369, | |
| "step": 200500 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "learning_rate": 2.666860127684272e-05, | |
| "loss": 0.337, | |
| "step": 201000 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 2.6610562971561232e-05, | |
| "loss": 0.3372, | |
| "step": 201500 | |
| }, | |
| { | |
| "epoch": 4.69, | |
| "learning_rate": 2.6552524666279744e-05, | |
| "loss": 0.337, | |
| "step": 202000 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 2.649448636099826e-05, | |
| "loss": 0.3372, | |
| "step": 202500 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 2.643644805571677e-05, | |
| "loss": 0.3373, | |
| "step": 203000 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "learning_rate": 2.637840975043529e-05, | |
| "loss": 0.3368, | |
| "step": 203500 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 2.6320371445153802e-05, | |
| "loss": 0.3368, | |
| "step": 204000 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "learning_rate": 2.626233313987232e-05, | |
| "loss": 0.3372, | |
| "step": 204500 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "learning_rate": 2.6204294834590832e-05, | |
| "loss": 0.3369, | |
| "step": 205000 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 2.6146256529309344e-05, | |
| "loss": 0.3368, | |
| "step": 205500 | |
| }, | |
| { | |
| "epoch": 4.78, | |
| "learning_rate": 2.608821822402786e-05, | |
| "loss": 0.337, | |
| "step": 206000 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 2.603017991874637e-05, | |
| "loss": 0.3371, | |
| "step": 206500 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 2.597214161346489e-05, | |
| "loss": 0.3371, | |
| "step": 207000 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "learning_rate": 2.59141033081834e-05, | |
| "loss": 0.3367, | |
| "step": 207500 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 2.585606500290192e-05, | |
| "loss": 0.3368, | |
| "step": 208000 | |
| }, | |
| { | |
| "epoch": 4.84, | |
| "learning_rate": 2.5798026697620432e-05, | |
| "loss": 0.337, | |
| "step": 208500 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 2.5739988392338944e-05, | |
| "loss": 0.3366, | |
| "step": 209000 | |
| }, | |
| { | |
| "epoch": 4.86, | |
| "learning_rate": 2.568195008705746e-05, | |
| "loss": 0.3369, | |
| "step": 209500 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 2.562391178177597e-05, | |
| "loss": 0.3367, | |
| "step": 210000 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 2.556587347649449e-05, | |
| "loss": 0.3363, | |
| "step": 210500 | |
| }, | |
| { | |
| "epoch": 4.9, | |
| "learning_rate": 2.5507835171213e-05, | |
| "loss": 0.3368, | |
| "step": 211000 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 2.5449796865931513e-05, | |
| "loss": 0.3367, | |
| "step": 211500 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "learning_rate": 2.5391758560650032e-05, | |
| "loss": 0.3365, | |
| "step": 212000 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 2.5333720255368544e-05, | |
| "loss": 0.3366, | |
| "step": 212500 | |
| }, | |
| { | |
| "epoch": 4.94, | |
| "learning_rate": 2.527568195008706e-05, | |
| "loss": 0.3364, | |
| "step": 213000 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "learning_rate": 2.521764364480557e-05, | |
| "loss": 0.3365, | |
| "step": 213500 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 2.515960533952409e-05, | |
| "loss": 0.3364, | |
| "step": 214000 | |
| }, | |
| { | |
| "epoch": 4.98, | |
| "learning_rate": 2.51015670342426e-05, | |
| "loss": 0.3363, | |
| "step": 214500 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 2.5043528728961113e-05, | |
| "loss": 0.3367, | |
| "step": 215000 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_accuracy": 0.8939737566541442, | |
| "eval_loss": 0.32981738448143005, | |
| "eval_runtime": 1986.7948, | |
| "eval_samples_per_second": 192.503, | |
| "eval_steps_per_second": 2.406, | |
| "step": 215375 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 2.4985490423679632e-05, | |
| "loss": 0.336, | |
| "step": 215500 | |
| }, | |
| { | |
| "epoch": 5.01, | |
| "learning_rate": 2.4927452118398144e-05, | |
| "loss": 0.3354, | |
| "step": 216000 | |
| }, | |
| { | |
| "epoch": 5.03, | |
| "learning_rate": 2.486941381311666e-05, | |
| "loss": 0.336, | |
| "step": 216500 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "learning_rate": 2.481137550783517e-05, | |
| "loss": 0.3356, | |
| "step": 217000 | |
| }, | |
| { | |
| "epoch": 5.05, | |
| "learning_rate": 2.4753337202553686e-05, | |
| "loss": 0.3358, | |
| "step": 217500 | |
| }, | |
| { | |
| "epoch": 5.06, | |
| "learning_rate": 2.46952988972722e-05, | |
| "loss": 0.3358, | |
| "step": 218000 | |
| }, | |
| { | |
| "epoch": 5.07, | |
| "learning_rate": 2.4637260591990716e-05, | |
| "loss": 0.3355, | |
| "step": 218500 | |
| }, | |
| { | |
| "epoch": 5.08, | |
| "learning_rate": 2.457922228670923e-05, | |
| "loss": 0.3356, | |
| "step": 219000 | |
| }, | |
| { | |
| "epoch": 5.1, | |
| "learning_rate": 2.4521183981427743e-05, | |
| "loss": 0.3355, | |
| "step": 219500 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 2.4463145676146255e-05, | |
| "loss": 0.3359, | |
| "step": 220000 | |
| }, | |
| { | |
| "epoch": 5.12, | |
| "learning_rate": 2.440510737086477e-05, | |
| "loss": 0.3355, | |
| "step": 220500 | |
| }, | |
| { | |
| "epoch": 5.13, | |
| "learning_rate": 2.4347069065583286e-05, | |
| "loss": 0.3354, | |
| "step": 221000 | |
| }, | |
| { | |
| "epoch": 5.14, | |
| "learning_rate": 2.42890307603018e-05, | |
| "loss": 0.3357, | |
| "step": 221500 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "learning_rate": 2.4230992455020316e-05, | |
| "loss": 0.3353, | |
| "step": 222000 | |
| }, | |
| { | |
| "epoch": 5.17, | |
| "learning_rate": 2.417295414973883e-05, | |
| "loss": 0.3356, | |
| "step": 222500 | |
| }, | |
| { | |
| "epoch": 5.18, | |
| "learning_rate": 2.4114915844457343e-05, | |
| "loss": 0.3352, | |
| "step": 223000 | |
| }, | |
| { | |
| "epoch": 5.19, | |
| "learning_rate": 2.4056877539175855e-05, | |
| "loss": 0.335, | |
| "step": 223500 | |
| }, | |
| { | |
| "epoch": 5.2, | |
| "learning_rate": 2.399883923389437e-05, | |
| "loss": 0.3351, | |
| "step": 224000 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "learning_rate": 2.3940800928612885e-05, | |
| "loss": 0.3353, | |
| "step": 224500 | |
| }, | |
| { | |
| "epoch": 5.22, | |
| "learning_rate": 2.38827626233314e-05, | |
| "loss": 0.3353, | |
| "step": 225000 | |
| }, | |
| { | |
| "epoch": 5.24, | |
| "learning_rate": 2.3824724318049916e-05, | |
| "loss": 0.3355, | |
| "step": 225500 | |
| }, | |
| { | |
| "epoch": 5.25, | |
| "learning_rate": 2.3766686012768428e-05, | |
| "loss": 0.3357, | |
| "step": 226000 | |
| }, | |
| { | |
| "epoch": 5.26, | |
| "learning_rate": 2.3708647707486943e-05, | |
| "loss": 0.3351, | |
| "step": 226500 | |
| }, | |
| { | |
| "epoch": 5.27, | |
| "learning_rate": 2.3650609402205455e-05, | |
| "loss": 0.3355, | |
| "step": 227000 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "learning_rate": 2.359257109692397e-05, | |
| "loss": 0.335, | |
| "step": 227500 | |
| }, | |
| { | |
| "epoch": 5.29, | |
| "learning_rate": 2.3534532791642485e-05, | |
| "loss": 0.3356, | |
| "step": 228000 | |
| }, | |
| { | |
| "epoch": 5.3, | |
| "learning_rate": 2.3476494486361e-05, | |
| "loss": 0.3353, | |
| "step": 228500 | |
| }, | |
| { | |
| "epoch": 5.32, | |
| "learning_rate": 2.3418456181079512e-05, | |
| "loss": 0.3354, | |
| "step": 229000 | |
| }, | |
| { | |
| "epoch": 5.33, | |
| "learning_rate": 2.3360417875798028e-05, | |
| "loss": 0.3351, | |
| "step": 229500 | |
| }, | |
| { | |
| "epoch": 5.34, | |
| "learning_rate": 2.3302379570516543e-05, | |
| "loss": 0.3353, | |
| "step": 230000 | |
| }, | |
| { | |
| "epoch": 5.35, | |
| "learning_rate": 2.3244341265235055e-05, | |
| "loss": 0.3349, | |
| "step": 230500 | |
| }, | |
| { | |
| "epoch": 5.36, | |
| "learning_rate": 2.318630295995357e-05, | |
| "loss": 0.3351, | |
| "step": 231000 | |
| }, | |
| { | |
| "epoch": 5.37, | |
| "learning_rate": 2.3128264654672085e-05, | |
| "loss": 0.3357, | |
| "step": 231500 | |
| }, | |
| { | |
| "epoch": 5.39, | |
| "learning_rate": 2.30702263493906e-05, | |
| "loss": 0.3354, | |
| "step": 232000 | |
| }, | |
| { | |
| "epoch": 5.4, | |
| "learning_rate": 2.3012188044109112e-05, | |
| "loss": 0.3351, | |
| "step": 232500 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "learning_rate": 2.2954149738827627e-05, | |
| "loss": 0.3352, | |
| "step": 233000 | |
| }, | |
| { | |
| "epoch": 5.42, | |
| "learning_rate": 2.2896111433546143e-05, | |
| "loss": 0.3354, | |
| "step": 233500 | |
| }, | |
| { | |
| "epoch": 5.43, | |
| "learning_rate": 2.2838073128264654e-05, | |
| "loss": 0.335, | |
| "step": 234000 | |
| }, | |
| { | |
| "epoch": 5.44, | |
| "learning_rate": 2.278003482298317e-05, | |
| "loss": 0.3352, | |
| "step": 234500 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "learning_rate": 2.2721996517701685e-05, | |
| "loss": 0.3347, | |
| "step": 235000 | |
| }, | |
| { | |
| "epoch": 5.47, | |
| "learning_rate": 2.2663958212420197e-05, | |
| "loss": 0.3351, | |
| "step": 235500 | |
| }, | |
| { | |
| "epoch": 5.48, | |
| "learning_rate": 2.2605919907138712e-05, | |
| "loss": 0.3349, | |
| "step": 236000 | |
| }, | |
| { | |
| "epoch": 5.49, | |
| "learning_rate": 2.2547881601857227e-05, | |
| "loss": 0.3348, | |
| "step": 236500 | |
| }, | |
| { | |
| "epoch": 5.5, | |
| "learning_rate": 2.2489843296575742e-05, | |
| "loss": 0.3348, | |
| "step": 237000 | |
| }, | |
| { | |
| "epoch": 5.51, | |
| "learning_rate": 2.2431804991294254e-05, | |
| "loss": 0.3349, | |
| "step": 237500 | |
| }, | |
| { | |
| "epoch": 5.53, | |
| "learning_rate": 2.237376668601277e-05, | |
| "loss": 0.3345, | |
| "step": 238000 | |
| }, | |
| { | |
| "epoch": 5.54, | |
| "learning_rate": 2.2315728380731285e-05, | |
| "loss": 0.3345, | |
| "step": 238500 | |
| }, | |
| { | |
| "epoch": 5.55, | |
| "learning_rate": 2.2257690075449797e-05, | |
| "loss": 0.3348, | |
| "step": 239000 | |
| }, | |
| { | |
| "epoch": 5.56, | |
| "learning_rate": 2.2199651770168312e-05, | |
| "loss": 0.3344, | |
| "step": 239500 | |
| }, | |
| { | |
| "epoch": 5.57, | |
| "learning_rate": 2.2141613464886827e-05, | |
| "loss": 0.3349, | |
| "step": 240000 | |
| }, | |
| { | |
| "epoch": 5.58, | |
| "learning_rate": 2.2083575159605342e-05, | |
| "loss": 0.3348, | |
| "step": 240500 | |
| }, | |
| { | |
| "epoch": 5.59, | |
| "learning_rate": 2.2025536854323854e-05, | |
| "loss": 0.3347, | |
| "step": 241000 | |
| }, | |
| { | |
| "epoch": 5.61, | |
| "learning_rate": 2.196749854904237e-05, | |
| "loss": 0.3351, | |
| "step": 241500 | |
| }, | |
| { | |
| "epoch": 5.62, | |
| "learning_rate": 2.190946024376088e-05, | |
| "loss": 0.3347, | |
| "step": 242000 | |
| }, | |
| { | |
| "epoch": 5.63, | |
| "learning_rate": 2.1851421938479396e-05, | |
| "loss": 0.3346, | |
| "step": 242500 | |
| }, | |
| { | |
| "epoch": 5.64, | |
| "learning_rate": 2.179338363319791e-05, | |
| "loss": 0.3348, | |
| "step": 243000 | |
| }, | |
| { | |
| "epoch": 5.65, | |
| "learning_rate": 2.1735345327916427e-05, | |
| "loss": 0.335, | |
| "step": 243500 | |
| }, | |
| { | |
| "epoch": 5.66, | |
| "learning_rate": 2.1677307022634942e-05, | |
| "loss": 0.3346, | |
| "step": 244000 | |
| }, | |
| { | |
| "epoch": 5.68, | |
| "learning_rate": 2.1619268717353454e-05, | |
| "loss": 0.3346, | |
| "step": 244500 | |
| }, | |
| { | |
| "epoch": 5.69, | |
| "learning_rate": 2.1561230412071966e-05, | |
| "loss": 0.3346, | |
| "step": 245000 | |
| }, | |
| { | |
| "epoch": 5.7, | |
| "learning_rate": 2.150319210679048e-05, | |
| "loss": 0.3346, | |
| "step": 245500 | |
| }, | |
| { | |
| "epoch": 5.71, | |
| "learning_rate": 2.1445153801508996e-05, | |
| "loss": 0.3344, | |
| "step": 246000 | |
| }, | |
| { | |
| "epoch": 5.72, | |
| "learning_rate": 2.138711549622751e-05, | |
| "loss": 0.3346, | |
| "step": 246500 | |
| }, | |
| { | |
| "epoch": 5.73, | |
| "learning_rate": 2.1329077190946027e-05, | |
| "loss": 0.3345, | |
| "step": 247000 | |
| }, | |
| { | |
| "epoch": 5.75, | |
| "learning_rate": 2.1271038885664542e-05, | |
| "loss": 0.3351, | |
| "step": 247500 | |
| }, | |
| { | |
| "epoch": 5.76, | |
| "learning_rate": 2.1213000580383054e-05, | |
| "loss": 0.3348, | |
| "step": 248000 | |
| }, | |
| { | |
| "epoch": 5.77, | |
| "learning_rate": 2.1154962275101566e-05, | |
| "loss": 0.3343, | |
| "step": 248500 | |
| }, | |
| { | |
| "epoch": 5.78, | |
| "learning_rate": 2.109692396982008e-05, | |
| "loss": 0.3345, | |
| "step": 249000 | |
| }, | |
| { | |
| "epoch": 5.79, | |
| "learning_rate": 2.1038885664538596e-05, | |
| "loss": 0.3345, | |
| "step": 249500 | |
| }, | |
| { | |
| "epoch": 5.8, | |
| "learning_rate": 2.098084735925711e-05, | |
| "loss": 0.3344, | |
| "step": 250000 | |
| }, | |
| { | |
| "epoch": 5.82, | |
| "learning_rate": 2.0922809053975626e-05, | |
| "loss": 0.3344, | |
| "step": 250500 | |
| }, | |
| { | |
| "epoch": 5.83, | |
| "learning_rate": 2.086477074869414e-05, | |
| "loss": 0.3347, | |
| "step": 251000 | |
| }, | |
| { | |
| "epoch": 5.84, | |
| "learning_rate": 2.0806732443412654e-05, | |
| "loss": 0.3345, | |
| "step": 251500 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "learning_rate": 2.0748694138131165e-05, | |
| "loss": 0.3345, | |
| "step": 252000 | |
| }, | |
| { | |
| "epoch": 5.86, | |
| "learning_rate": 2.069065583284968e-05, | |
| "loss": 0.3343, | |
| "step": 252500 | |
| }, | |
| { | |
| "epoch": 5.87, | |
| "learning_rate": 2.0632617527568196e-05, | |
| "loss": 0.3342, | |
| "step": 253000 | |
| }, | |
| { | |
| "epoch": 5.89, | |
| "learning_rate": 2.057457922228671e-05, | |
| "loss": 0.3344, | |
| "step": 253500 | |
| }, | |
| { | |
| "epoch": 5.9, | |
| "learning_rate": 2.0516540917005226e-05, | |
| "loss": 0.3344, | |
| "step": 254000 | |
| }, | |
| { | |
| "epoch": 5.91, | |
| "learning_rate": 2.045850261172374e-05, | |
| "loss": 0.3347, | |
| "step": 254500 | |
| }, | |
| { | |
| "epoch": 5.92, | |
| "learning_rate": 2.0400464306442253e-05, | |
| "loss": 0.3341, | |
| "step": 255000 | |
| }, | |
| { | |
| "epoch": 5.93, | |
| "learning_rate": 2.0342426001160765e-05, | |
| "loss": 0.3344, | |
| "step": 255500 | |
| }, | |
| { | |
| "epoch": 5.94, | |
| "learning_rate": 2.028438769587928e-05, | |
| "loss": 0.3347, | |
| "step": 256000 | |
| }, | |
| { | |
| "epoch": 5.95, | |
| "learning_rate": 2.0226349390597796e-05, | |
| "loss": 0.3343, | |
| "step": 256500 | |
| }, | |
| { | |
| "epoch": 5.97, | |
| "learning_rate": 2.016831108531631e-05, | |
| "loss": 0.3343, | |
| "step": 257000 | |
| }, | |
| { | |
| "epoch": 5.98, | |
| "learning_rate": 2.0110272780034826e-05, | |
| "loss": 0.3342, | |
| "step": 257500 | |
| }, | |
| { | |
| "epoch": 5.99, | |
| "learning_rate": 2.0052234474753338e-05, | |
| "loss": 0.3343, | |
| "step": 258000 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "eval_accuracy": 0.8944719783406293, | |
| "eval_loss": 0.32793664932250977, | |
| "eval_runtime": 1984.9744, | |
| "eval_samples_per_second": 192.68, | |
| "eval_steps_per_second": 2.409, | |
| "step": 258450 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "learning_rate": 1.9994196169471853e-05, | |
| "loss": 0.3343, | |
| "step": 258500 | |
| }, | |
| { | |
| "epoch": 6.01, | |
| "learning_rate": 1.9936157864190365e-05, | |
| "loss": 0.3336, | |
| "step": 259000 | |
| }, | |
| { | |
| "epoch": 6.02, | |
| "learning_rate": 1.987811955890888e-05, | |
| "loss": 0.3334, | |
| "step": 259500 | |
| }, | |
| { | |
| "epoch": 6.04, | |
| "learning_rate": 1.9820081253627395e-05, | |
| "loss": 0.3335, | |
| "step": 260000 | |
| }, | |
| { | |
| "epoch": 6.05, | |
| "learning_rate": 1.976204294834591e-05, | |
| "loss": 0.3335, | |
| "step": 260500 | |
| }, | |
| { | |
| "epoch": 6.06, | |
| "learning_rate": 1.9704004643064426e-05, | |
| "loss": 0.3334, | |
| "step": 261000 | |
| }, | |
| { | |
| "epoch": 6.07, | |
| "learning_rate": 1.9645966337782938e-05, | |
| "loss": 0.3329, | |
| "step": 261500 | |
| }, | |
| { | |
| "epoch": 6.08, | |
| "learning_rate": 1.9587928032501453e-05, | |
| "loss": 0.3334, | |
| "step": 262000 | |
| }, | |
| { | |
| "epoch": 6.09, | |
| "learning_rate": 1.9529889727219965e-05, | |
| "loss": 0.3333, | |
| "step": 262500 | |
| }, | |
| { | |
| "epoch": 6.11, | |
| "learning_rate": 1.947185142193848e-05, | |
| "loss": 0.3334, | |
| "step": 263000 | |
| }, | |
| { | |
| "epoch": 6.12, | |
| "learning_rate": 1.9413813116656995e-05, | |
| "loss": 0.3334, | |
| "step": 263500 | |
| }, | |
| { | |
| "epoch": 6.13, | |
| "learning_rate": 1.935577481137551e-05, | |
| "loss": 0.3337, | |
| "step": 264000 | |
| }, | |
| { | |
| "epoch": 6.14, | |
| "learning_rate": 1.9297736506094022e-05, | |
| "loss": 0.3334, | |
| "step": 264500 | |
| }, | |
| { | |
| "epoch": 6.15, | |
| "learning_rate": 1.9239698200812538e-05, | |
| "loss": 0.3332, | |
| "step": 265000 | |
| }, | |
| { | |
| "epoch": 6.16, | |
| "learning_rate": 1.9181659895531053e-05, | |
| "loss": 0.3331, | |
| "step": 265500 | |
| }, | |
| { | |
| "epoch": 6.18, | |
| "learning_rate": 1.9123621590249565e-05, | |
| "loss": 0.3335, | |
| "step": 266000 | |
| }, | |
| { | |
| "epoch": 6.19, | |
| "learning_rate": 1.906558328496808e-05, | |
| "loss": 0.3333, | |
| "step": 266500 | |
| }, | |
| { | |
| "epoch": 6.2, | |
| "learning_rate": 1.9007544979686595e-05, | |
| "loss": 0.3335, | |
| "step": 267000 | |
| }, | |
| { | |
| "epoch": 6.21, | |
| "learning_rate": 1.8949506674405107e-05, | |
| "loss": 0.3335, | |
| "step": 267500 | |
| }, | |
| { | |
| "epoch": 6.22, | |
| "learning_rate": 1.8891468369123622e-05, | |
| "loss": 0.3333, | |
| "step": 268000 | |
| }, | |
| { | |
| "epoch": 6.23, | |
| "learning_rate": 1.8833430063842137e-05, | |
| "loss": 0.333, | |
| "step": 268500 | |
| }, | |
| { | |
| "epoch": 6.24, | |
| "learning_rate": 1.8775391758560653e-05, | |
| "loss": 0.3332, | |
| "step": 269000 | |
| }, | |
| { | |
| "epoch": 6.26, | |
| "learning_rate": 1.8717353453279164e-05, | |
| "loss": 0.3334, | |
| "step": 269500 | |
| }, | |
| { | |
| "epoch": 6.27, | |
| "learning_rate": 1.865931514799768e-05, | |
| "loss": 0.3334, | |
| "step": 270000 | |
| }, | |
| { | |
| "epoch": 6.28, | |
| "learning_rate": 1.8601276842716195e-05, | |
| "loss": 0.3334, | |
| "step": 270500 | |
| }, | |
| { | |
| "epoch": 6.29, | |
| "learning_rate": 1.8543238537434707e-05, | |
| "loss": 0.3334, | |
| "step": 271000 | |
| }, | |
| { | |
| "epoch": 6.3, | |
| "learning_rate": 1.8485200232153222e-05, | |
| "loss": 0.3331, | |
| "step": 271500 | |
| }, | |
| { | |
| "epoch": 6.31, | |
| "learning_rate": 1.8427161926871737e-05, | |
| "loss": 0.3332, | |
| "step": 272000 | |
| }, | |
| { | |
| "epoch": 6.33, | |
| "learning_rate": 1.836912362159025e-05, | |
| "loss": 0.3333, | |
| "step": 272500 | |
| }, | |
| { | |
| "epoch": 6.34, | |
| "learning_rate": 1.8311085316308764e-05, | |
| "loss": 0.3332, | |
| "step": 273000 | |
| }, | |
| { | |
| "epoch": 6.35, | |
| "learning_rate": 1.825304701102728e-05, | |
| "loss": 0.3333, | |
| "step": 273500 | |
| }, | |
| { | |
| "epoch": 6.36, | |
| "learning_rate": 1.819500870574579e-05, | |
| "loss": 0.3332, | |
| "step": 274000 | |
| }, | |
| { | |
| "epoch": 6.37, | |
| "learning_rate": 1.8136970400464307e-05, | |
| "loss": 0.3332, | |
| "step": 274500 | |
| }, | |
| { | |
| "epoch": 6.38, | |
| "learning_rate": 1.8078932095182822e-05, | |
| "loss": 0.3333, | |
| "step": 275000 | |
| }, | |
| { | |
| "epoch": 6.4, | |
| "learning_rate": 1.8020893789901337e-05, | |
| "loss": 0.3333, | |
| "step": 275500 | |
| }, | |
| { | |
| "epoch": 6.41, | |
| "learning_rate": 1.796285548461985e-05, | |
| "loss": 0.3331, | |
| "step": 276000 | |
| }, | |
| { | |
| "epoch": 6.42, | |
| "learning_rate": 1.7904817179338364e-05, | |
| "loss": 0.3333, | |
| "step": 276500 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "learning_rate": 1.784677887405688e-05, | |
| "loss": 0.3331, | |
| "step": 277000 | |
| }, | |
| { | |
| "epoch": 6.44, | |
| "learning_rate": 1.778874056877539e-05, | |
| "loss": 0.3331, | |
| "step": 277500 | |
| }, | |
| { | |
| "epoch": 6.45, | |
| "learning_rate": 1.7730702263493906e-05, | |
| "loss": 0.3331, | |
| "step": 278000 | |
| }, | |
| { | |
| "epoch": 6.47, | |
| "learning_rate": 1.767266395821242e-05, | |
| "loss": 0.3331, | |
| "step": 278500 | |
| }, | |
| { | |
| "epoch": 6.48, | |
| "learning_rate": 1.7614625652930937e-05, | |
| "loss": 0.3331, | |
| "step": 279000 | |
| }, | |
| { | |
| "epoch": 6.49, | |
| "learning_rate": 1.755658734764945e-05, | |
| "loss": 0.3332, | |
| "step": 279500 | |
| }, | |
| { | |
| "epoch": 6.5, | |
| "learning_rate": 1.7498549042367964e-05, | |
| "loss": 0.333, | |
| "step": 280000 | |
| }, | |
| { | |
| "epoch": 6.51, | |
| "learning_rate": 1.7440510737086476e-05, | |
| "loss": 0.3329, | |
| "step": 280500 | |
| }, | |
| { | |
| "epoch": 6.52, | |
| "learning_rate": 1.738247243180499e-05, | |
| "loss": 0.3326, | |
| "step": 281000 | |
| }, | |
| { | |
| "epoch": 6.54, | |
| "learning_rate": 1.7324434126523506e-05, | |
| "loss": 0.333, | |
| "step": 281500 | |
| }, | |
| { | |
| "epoch": 6.55, | |
| "learning_rate": 1.726639582124202e-05, | |
| "loss": 0.3329, | |
| "step": 282000 | |
| }, | |
| { | |
| "epoch": 6.56, | |
| "learning_rate": 1.7208357515960537e-05, | |
| "loss": 0.3329, | |
| "step": 282500 | |
| }, | |
| { | |
| "epoch": 6.57, | |
| "learning_rate": 1.715031921067905e-05, | |
| "loss": 0.333, | |
| "step": 283000 | |
| }, | |
| { | |
| "epoch": 6.58, | |
| "learning_rate": 1.709228090539756e-05, | |
| "loss": 0.3333, | |
| "step": 283500 | |
| }, | |
| { | |
| "epoch": 6.59, | |
| "learning_rate": 1.7034242600116076e-05, | |
| "loss": 0.3326, | |
| "step": 284000 | |
| }, | |
| { | |
| "epoch": 6.6, | |
| "learning_rate": 1.697620429483459e-05, | |
| "loss": 0.3329, | |
| "step": 284500 | |
| }, | |
| { | |
| "epoch": 6.62, | |
| "learning_rate": 1.6918165989553106e-05, | |
| "loss": 0.3326, | |
| "step": 285000 | |
| }, | |
| { | |
| "epoch": 6.63, | |
| "learning_rate": 1.686012768427162e-05, | |
| "loss": 0.3326, | |
| "step": 285500 | |
| }, | |
| { | |
| "epoch": 6.64, | |
| "learning_rate": 1.6802089378990136e-05, | |
| "loss": 0.333, | |
| "step": 286000 | |
| }, | |
| { | |
| "epoch": 6.65, | |
| "learning_rate": 1.6744051073708648e-05, | |
| "loss": 0.3326, | |
| "step": 286500 | |
| }, | |
| { | |
| "epoch": 6.66, | |
| "learning_rate": 1.668601276842716e-05, | |
| "loss": 0.3329, | |
| "step": 287000 | |
| }, | |
| { | |
| "epoch": 6.67, | |
| "learning_rate": 1.6627974463145675e-05, | |
| "loss": 0.3327, | |
| "step": 287500 | |
| }, | |
| { | |
| "epoch": 6.69, | |
| "learning_rate": 1.656993615786419e-05, | |
| "loss": 0.3326, | |
| "step": 288000 | |
| }, | |
| { | |
| "epoch": 6.7, | |
| "learning_rate": 1.6511897852582706e-05, | |
| "loss": 0.3329, | |
| "step": 288500 | |
| }, | |
| { | |
| "epoch": 6.71, | |
| "learning_rate": 1.645385954730122e-05, | |
| "loss": 0.333, | |
| "step": 289000 | |
| }, | |
| { | |
| "epoch": 6.72, | |
| "learning_rate": 1.6395821242019736e-05, | |
| "loss": 0.3327, | |
| "step": 289500 | |
| }, | |
| { | |
| "epoch": 6.73, | |
| "learning_rate": 1.6337782936738248e-05, | |
| "loss": 0.3328, | |
| "step": 290000 | |
| }, | |
| { | |
| "epoch": 6.74, | |
| "learning_rate": 1.627974463145676e-05, | |
| "loss": 0.3325, | |
| "step": 290500 | |
| }, | |
| { | |
| "epoch": 6.76, | |
| "learning_rate": 1.6221706326175275e-05, | |
| "loss": 0.3326, | |
| "step": 291000 | |
| }, | |
| { | |
| "epoch": 6.77, | |
| "learning_rate": 1.616366802089379e-05, | |
| "loss": 0.3327, | |
| "step": 291500 | |
| }, | |
| { | |
| "epoch": 6.78, | |
| "learning_rate": 1.6105629715612306e-05, | |
| "loss": 0.3327, | |
| "step": 292000 | |
| }, | |
| { | |
| "epoch": 6.79, | |
| "learning_rate": 1.604759141033082e-05, | |
| "loss": 0.3329, | |
| "step": 292500 | |
| }, | |
| { | |
| "epoch": 6.8, | |
| "learning_rate": 1.5989553105049336e-05, | |
| "loss": 0.3326, | |
| "step": 293000 | |
| }, | |
| { | |
| "epoch": 6.81, | |
| "learning_rate": 1.5931514799767848e-05, | |
| "loss": 0.3326, | |
| "step": 293500 | |
| }, | |
| { | |
| "epoch": 6.83, | |
| "learning_rate": 1.587347649448636e-05, | |
| "loss": 0.3331, | |
| "step": 294000 | |
| }, | |
| { | |
| "epoch": 6.84, | |
| "learning_rate": 1.5815438189204875e-05, | |
| "loss": 0.3328, | |
| "step": 294500 | |
| }, | |
| { | |
| "epoch": 6.85, | |
| "learning_rate": 1.575739988392339e-05, | |
| "loss": 0.3327, | |
| "step": 295000 | |
| }, | |
| { | |
| "epoch": 6.86, | |
| "learning_rate": 1.5699361578641905e-05, | |
| "loss": 0.3326, | |
| "step": 295500 | |
| }, | |
| { | |
| "epoch": 6.87, | |
| "learning_rate": 1.564132327336042e-05, | |
| "loss": 0.3326, | |
| "step": 296000 | |
| }, | |
| { | |
| "epoch": 6.88, | |
| "learning_rate": 1.5583284968078933e-05, | |
| "loss": 0.3324, | |
| "step": 296500 | |
| }, | |
| { | |
| "epoch": 6.89, | |
| "learning_rate": 1.5525246662797448e-05, | |
| "loss": 0.3327, | |
| "step": 297000 | |
| }, | |
| { | |
| "epoch": 6.91, | |
| "learning_rate": 1.546720835751596e-05, | |
| "loss": 0.3326, | |
| "step": 297500 | |
| }, | |
| { | |
| "epoch": 6.92, | |
| "learning_rate": 1.5409170052234475e-05, | |
| "loss": 0.3322, | |
| "step": 298000 | |
| }, | |
| { | |
| "epoch": 6.93, | |
| "learning_rate": 1.535113174695299e-05, | |
| "loss": 0.3325, | |
| "step": 298500 | |
| }, | |
| { | |
| "epoch": 6.94, | |
| "learning_rate": 1.5293093441671505e-05, | |
| "loss": 0.3326, | |
| "step": 299000 | |
| }, | |
| { | |
| "epoch": 6.95, | |
| "learning_rate": 1.5235055136390017e-05, | |
| "loss": 0.3326, | |
| "step": 299500 | |
| }, | |
| { | |
| "epoch": 6.96, | |
| "learning_rate": 1.517701683110853e-05, | |
| "loss": 0.3325, | |
| "step": 300000 | |
| }, | |
| { | |
| "epoch": 6.98, | |
| "learning_rate": 1.5118978525827046e-05, | |
| "loss": 0.3325, | |
| "step": 300500 | |
| }, | |
| { | |
| "epoch": 6.99, | |
| "learning_rate": 1.5060940220545561e-05, | |
| "loss": 0.3323, | |
| "step": 301000 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "learning_rate": 1.5002901915264075e-05, | |
| "loss": 0.3326, | |
| "step": 301500 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "eval_accuracy": 0.8947962788449951, | |
| "eval_loss": 0.3267107605934143, | |
| "eval_runtime": 1985.0991, | |
| "eval_samples_per_second": 192.667, | |
| "eval_steps_per_second": 2.408, | |
| "step": 301525 | |
| }, | |
| { | |
| "epoch": 7.01, | |
| "learning_rate": 1.494486360998259e-05, | |
| "loss": 0.3319, | |
| "step": 302000 | |
| }, | |
| { | |
| "epoch": 7.02, | |
| "learning_rate": 1.4886825304701105e-05, | |
| "loss": 0.3318, | |
| "step": 302500 | |
| }, | |
| { | |
| "epoch": 7.03, | |
| "learning_rate": 1.4828786999419617e-05, | |
| "loss": 0.3321, | |
| "step": 303000 | |
| }, | |
| { | |
| "epoch": 7.05, | |
| "learning_rate": 1.477074869413813e-05, | |
| "loss": 0.3315, | |
| "step": 303500 | |
| }, | |
| { | |
| "epoch": 7.06, | |
| "learning_rate": 1.4712710388856646e-05, | |
| "loss": 0.3317, | |
| "step": 304000 | |
| }, | |
| { | |
| "epoch": 7.07, | |
| "learning_rate": 1.4654672083575161e-05, | |
| "loss": 0.3321, | |
| "step": 304500 | |
| }, | |
| { | |
| "epoch": 7.08, | |
| "learning_rate": 1.4596633778293674e-05, | |
| "loss": 0.3317, | |
| "step": 305000 | |
| }, | |
| { | |
| "epoch": 7.09, | |
| "learning_rate": 1.453859547301219e-05, | |
| "loss": 0.3318, | |
| "step": 305500 | |
| }, | |
| { | |
| "epoch": 7.1, | |
| "learning_rate": 1.4480557167730702e-05, | |
| "loss": 0.3319, | |
| "step": 306000 | |
| }, | |
| { | |
| "epoch": 7.12, | |
| "learning_rate": 1.4422518862449217e-05, | |
| "loss": 0.3319, | |
| "step": 306500 | |
| }, | |
| { | |
| "epoch": 7.13, | |
| "learning_rate": 1.436448055716773e-05, | |
| "loss": 0.3316, | |
| "step": 307000 | |
| }, | |
| { | |
| "epoch": 7.14, | |
| "learning_rate": 1.4306442251886245e-05, | |
| "loss": 0.3318, | |
| "step": 307500 | |
| }, | |
| { | |
| "epoch": 7.15, | |
| "learning_rate": 1.424840394660476e-05, | |
| "loss": 0.3318, | |
| "step": 308000 | |
| }, | |
| { | |
| "epoch": 7.16, | |
| "learning_rate": 1.4190365641323274e-05, | |
| "loss": 0.3316, | |
| "step": 308500 | |
| }, | |
| { | |
| "epoch": 7.17, | |
| "learning_rate": 1.413232733604179e-05, | |
| "loss": 0.3319, | |
| "step": 309000 | |
| }, | |
| { | |
| "epoch": 7.19, | |
| "learning_rate": 1.4074289030760301e-05, | |
| "loss": 0.3319, | |
| "step": 309500 | |
| }, | |
| { | |
| "epoch": 7.2, | |
| "learning_rate": 1.4016250725478817e-05, | |
| "loss": 0.3316, | |
| "step": 310000 | |
| }, | |
| { | |
| "epoch": 7.21, | |
| "learning_rate": 1.395821242019733e-05, | |
| "loss": 0.332, | |
| "step": 310500 | |
| }, | |
| { | |
| "epoch": 7.22, | |
| "learning_rate": 1.3900174114915845e-05, | |
| "loss": 0.3314, | |
| "step": 311000 | |
| }, | |
| { | |
| "epoch": 7.23, | |
| "learning_rate": 1.384213580963436e-05, | |
| "loss": 0.332, | |
| "step": 311500 | |
| }, | |
| { | |
| "epoch": 7.24, | |
| "learning_rate": 1.3784097504352874e-05, | |
| "loss": 0.332, | |
| "step": 312000 | |
| }, | |
| { | |
| "epoch": 7.25, | |
| "learning_rate": 1.3726059199071386e-05, | |
| "loss": 0.3318, | |
| "step": 312500 | |
| }, | |
| { | |
| "epoch": 7.27, | |
| "learning_rate": 1.3668020893789901e-05, | |
| "loss": 0.3318, | |
| "step": 313000 | |
| }, | |
| { | |
| "epoch": 7.28, | |
| "learning_rate": 1.3609982588508416e-05, | |
| "loss": 0.3317, | |
| "step": 313500 | |
| }, | |
| { | |
| "epoch": 7.29, | |
| "learning_rate": 1.355194428322693e-05, | |
| "loss": 0.3317, | |
| "step": 314000 | |
| }, | |
| { | |
| "epoch": 7.3, | |
| "learning_rate": 1.3493905977945445e-05, | |
| "loss": 0.3318, | |
| "step": 314500 | |
| }, | |
| { | |
| "epoch": 7.31, | |
| "learning_rate": 1.343586767266396e-05, | |
| "loss": 0.3312, | |
| "step": 315000 | |
| }, | |
| { | |
| "epoch": 7.32, | |
| "learning_rate": 1.3377829367382472e-05, | |
| "loss": 0.3318, | |
| "step": 315500 | |
| }, | |
| { | |
| "epoch": 7.34, | |
| "learning_rate": 1.3319791062100986e-05, | |
| "loss": 0.3316, | |
| "step": 316000 | |
| }, | |
| { | |
| "epoch": 7.35, | |
| "learning_rate": 1.3261752756819501e-05, | |
| "loss": 0.3319, | |
| "step": 316500 | |
| }, | |
| { | |
| "epoch": 7.36, | |
| "learning_rate": 1.3203714451538016e-05, | |
| "loss": 0.3317, | |
| "step": 317000 | |
| }, | |
| { | |
| "epoch": 7.37, | |
| "learning_rate": 1.314567614625653e-05, | |
| "loss": 0.3314, | |
| "step": 317500 | |
| }, | |
| { | |
| "epoch": 7.38, | |
| "learning_rate": 1.3087637840975045e-05, | |
| "loss": 0.3316, | |
| "step": 318000 | |
| }, | |
| { | |
| "epoch": 7.39, | |
| "learning_rate": 1.302959953569356e-05, | |
| "loss": 0.3318, | |
| "step": 318500 | |
| }, | |
| { | |
| "epoch": 7.41, | |
| "learning_rate": 1.2971561230412072e-05, | |
| "loss": 0.3318, | |
| "step": 319000 | |
| }, | |
| { | |
| "epoch": 7.42, | |
| "learning_rate": 1.2913522925130586e-05, | |
| "loss": 0.3317, | |
| "step": 319500 | |
| }, | |
| { | |
| "epoch": 7.43, | |
| "learning_rate": 1.28554846198491e-05, | |
| "loss": 0.3317, | |
| "step": 320000 | |
| }, | |
| { | |
| "epoch": 7.44, | |
| "learning_rate": 1.2797446314567616e-05, | |
| "loss": 0.3318, | |
| "step": 320500 | |
| }, | |
| { | |
| "epoch": 7.45, | |
| "learning_rate": 1.273940800928613e-05, | |
| "loss": 0.3317, | |
| "step": 321000 | |
| }, | |
| { | |
| "epoch": 7.46, | |
| "learning_rate": 1.2681369704004645e-05, | |
| "loss": 0.3314, | |
| "step": 321500 | |
| }, | |
| { | |
| "epoch": 7.48, | |
| "learning_rate": 1.2623331398723157e-05, | |
| "loss": 0.3313, | |
| "step": 322000 | |
| }, | |
| { | |
| "epoch": 7.49, | |
| "learning_rate": 1.2565293093441672e-05, | |
| "loss": 0.3318, | |
| "step": 322500 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "learning_rate": 1.2507254788160185e-05, | |
| "loss": 0.3315, | |
| "step": 323000 | |
| }, | |
| { | |
| "epoch": 7.51, | |
| "learning_rate": 1.24492164828787e-05, | |
| "loss": 0.3311, | |
| "step": 323500 | |
| }, | |
| { | |
| "epoch": 7.52, | |
| "learning_rate": 1.2391178177597216e-05, | |
| "loss": 0.3315, | |
| "step": 324000 | |
| }, | |
| { | |
| "epoch": 7.53, | |
| "learning_rate": 1.2333139872315728e-05, | |
| "loss": 0.3317, | |
| "step": 324500 | |
| }, | |
| { | |
| "epoch": 7.54, | |
| "learning_rate": 1.2275101567034243e-05, | |
| "loss": 0.3313, | |
| "step": 325000 | |
| }, | |
| { | |
| "epoch": 7.56, | |
| "learning_rate": 1.2217063261752758e-05, | |
| "loss": 0.3314, | |
| "step": 325500 | |
| }, | |
| { | |
| "epoch": 7.57, | |
| "learning_rate": 1.2159024956471272e-05, | |
| "loss": 0.3315, | |
| "step": 326000 | |
| }, | |
| { | |
| "epoch": 7.58, | |
| "learning_rate": 1.2100986651189785e-05, | |
| "loss": 0.3314, | |
| "step": 326500 | |
| }, | |
| { | |
| "epoch": 7.59, | |
| "learning_rate": 1.20429483459083e-05, | |
| "loss": 0.3314, | |
| "step": 327000 | |
| }, | |
| { | |
| "epoch": 7.6, | |
| "learning_rate": 1.1984910040626814e-05, | |
| "loss": 0.3314, | |
| "step": 327500 | |
| }, | |
| { | |
| "epoch": 7.61, | |
| "learning_rate": 1.1926871735345327e-05, | |
| "loss": 0.3314, | |
| "step": 328000 | |
| }, | |
| { | |
| "epoch": 7.63, | |
| "learning_rate": 1.1868833430063843e-05, | |
| "loss": 0.3316, | |
| "step": 328500 | |
| }, | |
| { | |
| "epoch": 7.64, | |
| "learning_rate": 1.1810795124782358e-05, | |
| "loss": 0.3315, | |
| "step": 329000 | |
| }, | |
| { | |
| "epoch": 7.65, | |
| "learning_rate": 1.1752756819500871e-05, | |
| "loss": 0.3311, | |
| "step": 329500 | |
| }, | |
| { | |
| "epoch": 7.66, | |
| "learning_rate": 1.1694718514219385e-05, | |
| "loss": 0.3314, | |
| "step": 330000 | |
| }, | |
| { | |
| "epoch": 7.67, | |
| "learning_rate": 1.16366802089379e-05, | |
| "loss": 0.3314, | |
| "step": 330500 | |
| }, | |
| { | |
| "epoch": 7.68, | |
| "learning_rate": 1.1578641903656414e-05, | |
| "loss": 0.3313, | |
| "step": 331000 | |
| }, | |
| { | |
| "epoch": 7.7, | |
| "learning_rate": 1.1520603598374927e-05, | |
| "loss": 0.331, | |
| "step": 331500 | |
| }, | |
| { | |
| "epoch": 7.71, | |
| "learning_rate": 1.1462565293093443e-05, | |
| "loss": 0.3315, | |
| "step": 332000 | |
| }, | |
| { | |
| "epoch": 7.72, | |
| "learning_rate": 1.1404526987811956e-05, | |
| "loss": 0.3314, | |
| "step": 332500 | |
| }, | |
| { | |
| "epoch": 7.73, | |
| "learning_rate": 1.1346488682530471e-05, | |
| "loss": 0.3315, | |
| "step": 333000 | |
| }, | |
| { | |
| "epoch": 7.74, | |
| "learning_rate": 1.1288450377248985e-05, | |
| "loss": 0.3314, | |
| "step": 333500 | |
| }, | |
| { | |
| "epoch": 7.75, | |
| "learning_rate": 1.1230412071967498e-05, | |
| "loss": 0.3312, | |
| "step": 334000 | |
| }, | |
| { | |
| "epoch": 7.77, | |
| "learning_rate": 1.1172373766686014e-05, | |
| "loss": 0.3315, | |
| "step": 334500 | |
| }, | |
| { | |
| "epoch": 7.78, | |
| "learning_rate": 1.1114335461404527e-05, | |
| "loss": 0.3313, | |
| "step": 335000 | |
| }, | |
| { | |
| "epoch": 7.79, | |
| "learning_rate": 1.105629715612304e-05, | |
| "loss": 0.3311, | |
| "step": 335500 | |
| }, | |
| { | |
| "epoch": 7.8, | |
| "learning_rate": 1.0998258850841556e-05, | |
| "loss": 0.3313, | |
| "step": 336000 | |
| }, | |
| { | |
| "epoch": 7.81, | |
| "learning_rate": 1.0940220545560071e-05, | |
| "loss": 0.3316, | |
| "step": 336500 | |
| }, | |
| { | |
| "epoch": 7.82, | |
| "learning_rate": 1.0882182240278585e-05, | |
| "loss": 0.3314, | |
| "step": 337000 | |
| }, | |
| { | |
| "epoch": 7.84, | |
| "learning_rate": 1.0824143934997098e-05, | |
| "loss": 0.3314, | |
| "step": 337500 | |
| }, | |
| { | |
| "epoch": 7.85, | |
| "learning_rate": 1.0766105629715613e-05, | |
| "loss": 0.3311, | |
| "step": 338000 | |
| }, | |
| { | |
| "epoch": 7.86, | |
| "learning_rate": 1.0708067324434127e-05, | |
| "loss": 0.3311, | |
| "step": 338500 | |
| }, | |
| { | |
| "epoch": 7.87, | |
| "learning_rate": 1.065002901915264e-05, | |
| "loss": 0.331, | |
| "step": 339000 | |
| }, | |
| { | |
| "epoch": 7.88, | |
| "learning_rate": 1.0591990713871156e-05, | |
| "loss": 0.3311, | |
| "step": 339500 | |
| }, | |
| { | |
| "epoch": 7.89, | |
| "learning_rate": 1.0533952408589671e-05, | |
| "loss": 0.3314, | |
| "step": 340000 | |
| }, | |
| { | |
| "epoch": 7.9, | |
| "learning_rate": 1.0475914103308183e-05, | |
| "loss": 0.3312, | |
| "step": 340500 | |
| }, | |
| { | |
| "epoch": 7.92, | |
| "learning_rate": 1.0417875798026698e-05, | |
| "loss": 0.3311, | |
| "step": 341000 | |
| }, | |
| { | |
| "epoch": 7.93, | |
| "learning_rate": 1.0359837492745213e-05, | |
| "loss": 0.3311, | |
| "step": 341500 | |
| }, | |
| { | |
| "epoch": 7.94, | |
| "learning_rate": 1.0301799187463727e-05, | |
| "loss": 0.3314, | |
| "step": 342000 | |
| }, | |
| { | |
| "epoch": 7.95, | |
| "learning_rate": 1.024376088218224e-05, | |
| "loss": 0.3312, | |
| "step": 342500 | |
| }, | |
| { | |
| "epoch": 7.96, | |
| "learning_rate": 1.0185722576900755e-05, | |
| "loss": 0.3313, | |
| "step": 343000 | |
| }, | |
| { | |
| "epoch": 7.97, | |
| "learning_rate": 1.0127684271619269e-05, | |
| "loss": 0.3309, | |
| "step": 343500 | |
| }, | |
| { | |
| "epoch": 7.99, | |
| "learning_rate": 1.0069645966337783e-05, | |
| "loss": 0.3311, | |
| "step": 344000 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "learning_rate": 1.0011607661056298e-05, | |
| "loss": 0.3311, | |
| "step": 344500 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "eval_accuracy": 0.8951035305605985, | |
| "eval_loss": 0.32558220624923706, | |
| "eval_runtime": 1982.3976, | |
| "eval_samples_per_second": 192.93, | |
| "eval_steps_per_second": 2.412, | |
| "step": 344600 | |
| }, | |
| { | |
| "epoch": 8.01, | |
| "learning_rate": 9.953569355774813e-06, | |
| "loss": 0.3307, | |
| "step": 345000 | |
| }, | |
| { | |
| "epoch": 8.02, | |
| "learning_rate": 9.895531050493327e-06, | |
| "loss": 0.3303, | |
| "step": 345500 | |
| }, | |
| { | |
| "epoch": 8.03, | |
| "learning_rate": 9.83749274521184e-06, | |
| "loss": 0.3309, | |
| "step": 346000 | |
| }, | |
| { | |
| "epoch": 8.04, | |
| "learning_rate": 9.779454439930355e-06, | |
| "loss": 0.3306, | |
| "step": 346500 | |
| }, | |
| { | |
| "epoch": 8.06, | |
| "learning_rate": 9.721416134648869e-06, | |
| "loss": 0.3305, | |
| "step": 347000 | |
| }, | |
| { | |
| "epoch": 8.07, | |
| "learning_rate": 9.663377829367382e-06, | |
| "loss": 0.3306, | |
| "step": 347500 | |
| }, | |
| { | |
| "epoch": 8.08, | |
| "learning_rate": 9.605339524085898e-06, | |
| "loss": 0.3308, | |
| "step": 348000 | |
| }, | |
| { | |
| "epoch": 8.09, | |
| "learning_rate": 9.547301218804411e-06, | |
| "loss": 0.3306, | |
| "step": 348500 | |
| }, | |
| { | |
| "epoch": 8.1, | |
| "learning_rate": 9.489262913522926e-06, | |
| "loss": 0.3307, | |
| "step": 349000 | |
| }, | |
| { | |
| "epoch": 8.11, | |
| "learning_rate": 9.43122460824144e-06, | |
| "loss": 0.3307, | |
| "step": 349500 | |
| }, | |
| { | |
| "epoch": 8.13, | |
| "learning_rate": 9.373186302959953e-06, | |
| "loss": 0.3302, | |
| "step": 350000 | |
| }, | |
| { | |
| "epoch": 8.14, | |
| "learning_rate": 9.315147997678469e-06, | |
| "loss": 0.3308, | |
| "step": 350500 | |
| }, | |
| { | |
| "epoch": 8.15, | |
| "learning_rate": 9.257109692396982e-06, | |
| "loss": 0.3304, | |
| "step": 351000 | |
| }, | |
| { | |
| "epoch": 8.16, | |
| "learning_rate": 9.199071387115496e-06, | |
| "loss": 0.3305, | |
| "step": 351500 | |
| }, | |
| { | |
| "epoch": 8.17, | |
| "learning_rate": 9.141033081834011e-06, | |
| "loss": 0.3305, | |
| "step": 352000 | |
| }, | |
| { | |
| "epoch": 8.18, | |
| "learning_rate": 9.082994776552524e-06, | |
| "loss": 0.3305, | |
| "step": 352500 | |
| }, | |
| { | |
| "epoch": 8.2, | |
| "learning_rate": 9.02495647127104e-06, | |
| "loss": 0.3306, | |
| "step": 353000 | |
| }, | |
| { | |
| "epoch": 8.21, | |
| "learning_rate": 8.966918165989553e-06, | |
| "loss": 0.3308, | |
| "step": 353500 | |
| }, | |
| { | |
| "epoch": 8.22, | |
| "learning_rate": 8.908879860708068e-06, | |
| "loss": 0.3304, | |
| "step": 354000 | |
| }, | |
| { | |
| "epoch": 8.23, | |
| "learning_rate": 8.850841555426582e-06, | |
| "loss": 0.331, | |
| "step": 354500 | |
| }, | |
| { | |
| "epoch": 8.24, | |
| "learning_rate": 8.792803250145096e-06, | |
| "loss": 0.3305, | |
| "step": 355000 | |
| }, | |
| { | |
| "epoch": 8.25, | |
| "learning_rate": 8.73476494486361e-06, | |
| "loss": 0.3307, | |
| "step": 355500 | |
| }, | |
| { | |
| "epoch": 8.26, | |
| "learning_rate": 8.676726639582124e-06, | |
| "loss": 0.3309, | |
| "step": 356000 | |
| }, | |
| { | |
| "epoch": 8.28, | |
| "learning_rate": 8.618688334300638e-06, | |
| "loss": 0.3304, | |
| "step": 356500 | |
| }, | |
| { | |
| "epoch": 8.29, | |
| "learning_rate": 8.560650029019153e-06, | |
| "loss": 0.3304, | |
| "step": 357000 | |
| }, | |
| { | |
| "epoch": 8.3, | |
| "learning_rate": 8.502611723737668e-06, | |
| "loss": 0.3306, | |
| "step": 357500 | |
| }, | |
| { | |
| "epoch": 8.31, | |
| "learning_rate": 8.44457341845618e-06, | |
| "loss": 0.3304, | |
| "step": 358000 | |
| }, | |
| { | |
| "epoch": 8.32, | |
| "learning_rate": 8.386535113174695e-06, | |
| "loss": 0.3305, | |
| "step": 358500 | |
| }, | |
| { | |
| "epoch": 8.33, | |
| "learning_rate": 8.32849680789321e-06, | |
| "loss": 0.3308, | |
| "step": 359000 | |
| }, | |
| { | |
| "epoch": 8.35, | |
| "learning_rate": 8.270458502611724e-06, | |
| "loss": 0.3303, | |
| "step": 359500 | |
| }, | |
| { | |
| "epoch": 8.36, | |
| "learning_rate": 8.212420197330238e-06, | |
| "loss": 0.3305, | |
| "step": 360000 | |
| }, | |
| { | |
| "epoch": 8.37, | |
| "learning_rate": 8.154381892048753e-06, | |
| "loss": 0.3308, | |
| "step": 360500 | |
| }, | |
| { | |
| "epoch": 8.38, | |
| "learning_rate": 8.096343586767268e-06, | |
| "loss": 0.3303, | |
| "step": 361000 | |
| }, | |
| { | |
| "epoch": 8.39, | |
| "learning_rate": 8.03830528148578e-06, | |
| "loss": 0.3306, | |
| "step": 361500 | |
| }, | |
| { | |
| "epoch": 8.4, | |
| "learning_rate": 7.980266976204295e-06, | |
| "loss": 0.3305, | |
| "step": 362000 | |
| }, | |
| { | |
| "epoch": 8.42, | |
| "learning_rate": 7.92222867092281e-06, | |
| "loss": 0.3301, | |
| "step": 362500 | |
| }, | |
| { | |
| "epoch": 8.43, | |
| "learning_rate": 7.864190365641324e-06, | |
| "loss": 0.3303, | |
| "step": 363000 | |
| }, | |
| { | |
| "epoch": 8.44, | |
| "learning_rate": 7.806152060359837e-06, | |
| "loss": 0.33, | |
| "step": 363500 | |
| }, | |
| { | |
| "epoch": 8.45, | |
| "learning_rate": 7.748113755078353e-06, | |
| "loss": 0.3302, | |
| "step": 364000 | |
| }, | |
| { | |
| "epoch": 8.46, | |
| "learning_rate": 7.690075449796866e-06, | |
| "loss": 0.3304, | |
| "step": 364500 | |
| }, | |
| { | |
| "epoch": 8.47, | |
| "learning_rate": 7.63203714451538e-06, | |
| "loss": 0.3305, | |
| "step": 365000 | |
| }, | |
| { | |
| "epoch": 8.49, | |
| "learning_rate": 7.573998839233895e-06, | |
| "loss": 0.3303, | |
| "step": 365500 | |
| }, | |
| { | |
| "epoch": 8.5, | |
| "learning_rate": 7.5159605339524085e-06, | |
| "loss": 0.3303, | |
| "step": 366000 | |
| }, | |
| { | |
| "epoch": 8.51, | |
| "learning_rate": 7.457922228670923e-06, | |
| "loss": 0.3303, | |
| "step": 366500 | |
| }, | |
| { | |
| "epoch": 8.52, | |
| "learning_rate": 7.399883923389437e-06, | |
| "loss": 0.3302, | |
| "step": 367000 | |
| }, | |
| { | |
| "epoch": 8.53, | |
| "learning_rate": 7.341845618107951e-06, | |
| "loss": 0.3307, | |
| "step": 367500 | |
| }, | |
| { | |
| "epoch": 8.54, | |
| "learning_rate": 7.283807312826465e-06, | |
| "loss": 0.3303, | |
| "step": 368000 | |
| }, | |
| { | |
| "epoch": 8.55, | |
| "learning_rate": 7.22576900754498e-06, | |
| "loss": 0.3303, | |
| "step": 368500 | |
| }, | |
| { | |
| "epoch": 8.57, | |
| "learning_rate": 7.167730702263495e-06, | |
| "loss": 0.3304, | |
| "step": 369000 | |
| }, | |
| { | |
| "epoch": 8.58, | |
| "learning_rate": 7.109692396982008e-06, | |
| "loss": 0.3303, | |
| "step": 369500 | |
| }, | |
| { | |
| "epoch": 8.59, | |
| "learning_rate": 7.051654091700523e-06, | |
| "loss": 0.3303, | |
| "step": 370000 | |
| }, | |
| { | |
| "epoch": 8.6, | |
| "learning_rate": 6.993615786419037e-06, | |
| "loss": 0.3303, | |
| "step": 370500 | |
| }, | |
| { | |
| "epoch": 8.61, | |
| "learning_rate": 6.935577481137551e-06, | |
| "loss": 0.3302, | |
| "step": 371000 | |
| }, | |
| { | |
| "epoch": 8.62, | |
| "learning_rate": 6.877539175856065e-06, | |
| "loss": 0.3305, | |
| "step": 371500 | |
| }, | |
| { | |
| "epoch": 8.64, | |
| "learning_rate": 6.81950087057458e-06, | |
| "loss": 0.3302, | |
| "step": 372000 | |
| }, | |
| { | |
| "epoch": 8.65, | |
| "learning_rate": 6.761462565293093e-06, | |
| "loss": 0.3304, | |
| "step": 372500 | |
| }, | |
| { | |
| "epoch": 8.66, | |
| "learning_rate": 6.703424260011608e-06, | |
| "loss": 0.3301, | |
| "step": 373000 | |
| }, | |
| { | |
| "epoch": 8.67, | |
| "learning_rate": 6.6453859547301225e-06, | |
| "loss": 0.3303, | |
| "step": 373500 | |
| }, | |
| { | |
| "epoch": 8.68, | |
| "learning_rate": 6.587347649448636e-06, | |
| "loss": 0.3304, | |
| "step": 374000 | |
| }, | |
| { | |
| "epoch": 8.69, | |
| "learning_rate": 6.5293093441671504e-06, | |
| "loss": 0.3304, | |
| "step": 374500 | |
| }, | |
| { | |
| "epoch": 8.71, | |
| "learning_rate": 6.471271038885665e-06, | |
| "loss": 0.3302, | |
| "step": 375000 | |
| }, | |
| { | |
| "epoch": 8.72, | |
| "learning_rate": 6.41323273360418e-06, | |
| "loss": 0.3302, | |
| "step": 375500 | |
| }, | |
| { | |
| "epoch": 8.73, | |
| "learning_rate": 6.355194428322693e-06, | |
| "loss": 0.3305, | |
| "step": 376000 | |
| }, | |
| { | |
| "epoch": 8.74, | |
| "learning_rate": 6.297156123041208e-06, | |
| "loss": 0.3304, | |
| "step": 376500 | |
| }, | |
| { | |
| "epoch": 8.75, | |
| "learning_rate": 6.2391178177597215e-06, | |
| "loss": 0.3304, | |
| "step": 377000 | |
| }, | |
| { | |
| "epoch": 8.76, | |
| "learning_rate": 6.181079512478236e-06, | |
| "loss": 0.3305, | |
| "step": 377500 | |
| }, | |
| { | |
| "epoch": 8.78, | |
| "learning_rate": 6.12304120719675e-06, | |
| "loss": 0.3298, | |
| "step": 378000 | |
| }, | |
| { | |
| "epoch": 8.79, | |
| "learning_rate": 6.065002901915264e-06, | |
| "loss": 0.3308, | |
| "step": 378500 | |
| }, | |
| { | |
| "epoch": 8.8, | |
| "learning_rate": 6.006964596633779e-06, | |
| "loss": 0.3303, | |
| "step": 379000 | |
| }, | |
| { | |
| "epoch": 8.81, | |
| "learning_rate": 5.9489262913522925e-06, | |
| "loss": 0.3301, | |
| "step": 379500 | |
| }, | |
| { | |
| "epoch": 8.82, | |
| "learning_rate": 5.890887986070807e-06, | |
| "loss": 0.3302, | |
| "step": 380000 | |
| }, | |
| { | |
| "epoch": 8.83, | |
| "learning_rate": 5.832849680789321e-06, | |
| "loss": 0.3306, | |
| "step": 380500 | |
| }, | |
| { | |
| "epoch": 8.85, | |
| "learning_rate": 5.774811375507836e-06, | |
| "loss": 0.3306, | |
| "step": 381000 | |
| }, | |
| { | |
| "epoch": 8.86, | |
| "learning_rate": 5.71677307022635e-06, | |
| "loss": 0.3304, | |
| "step": 381500 | |
| }, | |
| { | |
| "epoch": 8.87, | |
| "learning_rate": 5.658734764944864e-06, | |
| "loss": 0.33, | |
| "step": 382000 | |
| }, | |
| { | |
| "epoch": 8.88, | |
| "learning_rate": 5.600696459663378e-06, | |
| "loss": 0.3303, | |
| "step": 382500 | |
| }, | |
| { | |
| "epoch": 8.89, | |
| "learning_rate": 5.542658154381892e-06, | |
| "loss": 0.3301, | |
| "step": 383000 | |
| }, | |
| { | |
| "epoch": 8.9, | |
| "learning_rate": 5.484619849100407e-06, | |
| "loss": 0.3303, | |
| "step": 383500 | |
| }, | |
| { | |
| "epoch": 8.91, | |
| "learning_rate": 5.42658154381892e-06, | |
| "loss": 0.3305, | |
| "step": 384000 | |
| }, | |
| { | |
| "epoch": 8.93, | |
| "learning_rate": 5.3685432385374355e-06, | |
| "loss": 0.3299, | |
| "step": 384500 | |
| }, | |
| { | |
| "epoch": 8.94, | |
| "learning_rate": 5.310504933255949e-06, | |
| "loss": 0.3305, | |
| "step": 385000 | |
| }, | |
| { | |
| "epoch": 8.95, | |
| "learning_rate": 5.252466627974463e-06, | |
| "loss": 0.3304, | |
| "step": 385500 | |
| }, | |
| { | |
| "epoch": 8.96, | |
| "learning_rate": 5.194428322692978e-06, | |
| "loss": 0.33, | |
| "step": 386000 | |
| }, | |
| { | |
| "epoch": 8.97, | |
| "learning_rate": 5.136390017411491e-06, | |
| "loss": 0.3304, | |
| "step": 386500 | |
| }, | |
| { | |
| "epoch": 8.98, | |
| "learning_rate": 5.0783517121300065e-06, | |
| "loss": 0.3301, | |
| "step": 387000 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "learning_rate": 5.02031340684852e-06, | |
| "loss": 0.33, | |
| "step": 387500 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "eval_accuracy": 0.8952824353922539, | |
| "eval_loss": 0.3249286413192749, | |
| "eval_runtime": 1998.0692, | |
| "eval_samples_per_second": 191.417, | |
| "eval_steps_per_second": 2.393, | |
| "step": 387675 | |
| }, | |
| { | |
| "epoch": 9.01, | |
| "learning_rate": 4.9622751015670345e-06, | |
| "loss": 0.33, | |
| "step": 388000 | |
| }, | |
| { | |
| "epoch": 9.02, | |
| "learning_rate": 4.904236796285549e-06, | |
| "loss": 0.3294, | |
| "step": 388500 | |
| }, | |
| { | |
| "epoch": 9.03, | |
| "learning_rate": 4.846198491004063e-06, | |
| "loss": 0.3299, | |
| "step": 389000 | |
| }, | |
| { | |
| "epoch": 9.04, | |
| "learning_rate": 4.788160185722578e-06, | |
| "loss": 0.3298, | |
| "step": 389500 | |
| }, | |
| { | |
| "epoch": 9.05, | |
| "learning_rate": 4.730121880441091e-06, | |
| "loss": 0.3296, | |
| "step": 390000 | |
| }, | |
| { | |
| "epoch": 9.07, | |
| "learning_rate": 4.6720835751596055e-06, | |
| "loss": 0.3296, | |
| "step": 390500 | |
| }, | |
| { | |
| "epoch": 9.08, | |
| "learning_rate": 4.61404526987812e-06, | |
| "loss": 0.3297, | |
| "step": 391000 | |
| }, | |
| { | |
| "epoch": 9.09, | |
| "learning_rate": 4.556006964596634e-06, | |
| "loss": 0.3294, | |
| "step": 391500 | |
| }, | |
| { | |
| "epoch": 9.1, | |
| "learning_rate": 4.497968659315148e-06, | |
| "loss": 0.3301, | |
| "step": 392000 | |
| }, | |
| { | |
| "epoch": 9.11, | |
| "learning_rate": 4.439930354033662e-06, | |
| "loss": 0.3295, | |
| "step": 392500 | |
| }, | |
| { | |
| "epoch": 9.12, | |
| "learning_rate": 4.3818920487521766e-06, | |
| "loss": 0.3299, | |
| "step": 393000 | |
| }, | |
| { | |
| "epoch": 9.14, | |
| "learning_rate": 4.323853743470691e-06, | |
| "loss": 0.3298, | |
| "step": 393500 | |
| }, | |
| { | |
| "epoch": 9.15, | |
| "learning_rate": 4.265815438189205e-06, | |
| "loss": 0.3294, | |
| "step": 394000 | |
| }, | |
| { | |
| "epoch": 9.16, | |
| "learning_rate": 4.207777132907719e-06, | |
| "loss": 0.3297, | |
| "step": 394500 | |
| }, | |
| { | |
| "epoch": 9.17, | |
| "learning_rate": 4.149738827626234e-06, | |
| "loss": 0.3297, | |
| "step": 395000 | |
| }, | |
| { | |
| "epoch": 9.18, | |
| "learning_rate": 4.091700522344748e-06, | |
| "loss": 0.3297, | |
| "step": 395500 | |
| }, | |
| { | |
| "epoch": 9.19, | |
| "learning_rate": 4.033662217063262e-06, | |
| "loss": 0.3296, | |
| "step": 396000 | |
| }, | |
| { | |
| "epoch": 9.2, | |
| "learning_rate": 3.975623911781776e-06, | |
| "loss": 0.3299, | |
| "step": 396500 | |
| }, | |
| { | |
| "epoch": 9.22, | |
| "learning_rate": 3.91758560650029e-06, | |
| "loss": 0.3295, | |
| "step": 397000 | |
| }, | |
| { | |
| "epoch": 9.23, | |
| "learning_rate": 3.859547301218805e-06, | |
| "loss": 0.3299, | |
| "step": 397500 | |
| }, | |
| { | |
| "epoch": 9.24, | |
| "learning_rate": 3.8015089959373187e-06, | |
| "loss": 0.33, | |
| "step": 398000 | |
| }, | |
| { | |
| "epoch": 9.25, | |
| "learning_rate": 3.7434706906558326e-06, | |
| "loss": 0.3296, | |
| "step": 398500 | |
| }, | |
| { | |
| "epoch": 9.26, | |
| "learning_rate": 3.6854323853743474e-06, | |
| "loss": 0.3295, | |
| "step": 399000 | |
| }, | |
| { | |
| "epoch": 9.27, | |
| "learning_rate": 3.6273940800928614e-06, | |
| "loss": 0.3295, | |
| "step": 399500 | |
| }, | |
| { | |
| "epoch": 9.29, | |
| "learning_rate": 3.5693557748113754e-06, | |
| "loss": 0.3295, | |
| "step": 400000 | |
| }, | |
| { | |
| "epoch": 9.3, | |
| "learning_rate": 3.51131746952989e-06, | |
| "loss": 0.3296, | |
| "step": 400500 | |
| }, | |
| { | |
| "epoch": 9.31, | |
| "learning_rate": 3.453279164248404e-06, | |
| "loss": 0.3299, | |
| "step": 401000 | |
| }, | |
| { | |
| "epoch": 9.32, | |
| "learning_rate": 3.3952408589669185e-06, | |
| "loss": 0.3294, | |
| "step": 401500 | |
| }, | |
| { | |
| "epoch": 9.33, | |
| "learning_rate": 3.3372025536854324e-06, | |
| "loss": 0.33, | |
| "step": 402000 | |
| }, | |
| { | |
| "epoch": 9.34, | |
| "learning_rate": 3.2791642484039464e-06, | |
| "loss": 0.3296, | |
| "step": 402500 | |
| }, | |
| { | |
| "epoch": 9.36, | |
| "learning_rate": 3.221125943122461e-06, | |
| "loss": 0.3298, | |
| "step": 403000 | |
| }, | |
| { | |
| "epoch": 9.37, | |
| "learning_rate": 3.163087637840975e-06, | |
| "loss": 0.3299, | |
| "step": 403500 | |
| }, | |
| { | |
| "epoch": 9.38, | |
| "learning_rate": 3.1050493325594895e-06, | |
| "loss": 0.3299, | |
| "step": 404000 | |
| }, | |
| { | |
| "epoch": 9.39, | |
| "learning_rate": 3.047011027278004e-06, | |
| "loss": 0.3299, | |
| "step": 404500 | |
| }, | |
| { | |
| "epoch": 9.4, | |
| "learning_rate": 2.988972721996518e-06, | |
| "loss": 0.3295, | |
| "step": 405000 | |
| }, | |
| { | |
| "epoch": 9.41, | |
| "learning_rate": 2.930934416715032e-06, | |
| "loss": 0.3293, | |
| "step": 405500 | |
| }, | |
| { | |
| "epoch": 9.43, | |
| "learning_rate": 2.8728961114335462e-06, | |
| "loss": 0.3294, | |
| "step": 406000 | |
| }, | |
| { | |
| "epoch": 9.44, | |
| "learning_rate": 2.8148578061520606e-06, | |
| "loss": 0.3295, | |
| "step": 406500 | |
| }, | |
| { | |
| "epoch": 9.45, | |
| "learning_rate": 2.7568195008705746e-06, | |
| "loss": 0.3298, | |
| "step": 407000 | |
| }, | |
| { | |
| "epoch": 9.46, | |
| "learning_rate": 2.698781195589089e-06, | |
| "loss": 0.3298, | |
| "step": 407500 | |
| }, | |
| { | |
| "epoch": 9.47, | |
| "learning_rate": 2.6407428903076033e-06, | |
| "loss": 0.3296, | |
| "step": 408000 | |
| }, | |
| { | |
| "epoch": 9.48, | |
| "learning_rate": 2.5827045850261177e-06, | |
| "loss": 0.3298, | |
| "step": 408500 | |
| }, | |
| { | |
| "epoch": 9.5, | |
| "learning_rate": 2.5246662797446317e-06, | |
| "loss": 0.3293, | |
| "step": 409000 | |
| }, | |
| { | |
| "epoch": 9.51, | |
| "learning_rate": 2.4666279744631456e-06, | |
| "loss": 0.3296, | |
| "step": 409500 | |
| }, | |
| { | |
| "epoch": 9.52, | |
| "learning_rate": 2.40858966918166e-06, | |
| "loss": 0.3293, | |
| "step": 410000 | |
| }, | |
| { | |
| "epoch": 9.53, | |
| "learning_rate": 2.3505513639001744e-06, | |
| "loss": 0.3298, | |
| "step": 410500 | |
| }, | |
| { | |
| "epoch": 9.54, | |
| "learning_rate": 2.2925130586186883e-06, | |
| "loss": 0.3296, | |
| "step": 411000 | |
| }, | |
| { | |
| "epoch": 9.55, | |
| "learning_rate": 2.2344747533372027e-06, | |
| "loss": 0.3297, | |
| "step": 411500 | |
| }, | |
| { | |
| "epoch": 9.56, | |
| "learning_rate": 2.176436448055717e-06, | |
| "loss": 0.3294, | |
| "step": 412000 | |
| }, | |
| { | |
| "epoch": 9.58, | |
| "learning_rate": 2.118398142774231e-06, | |
| "loss": 0.3299, | |
| "step": 412500 | |
| }, | |
| { | |
| "epoch": 9.59, | |
| "learning_rate": 2.060359837492745e-06, | |
| "loss": 0.3296, | |
| "step": 413000 | |
| }, | |
| { | |
| "epoch": 9.6, | |
| "learning_rate": 2.0023215322112594e-06, | |
| "loss": 0.3297, | |
| "step": 413500 | |
| }, | |
| { | |
| "epoch": 9.61, | |
| "learning_rate": 1.9442832269297738e-06, | |
| "loss": 0.3294, | |
| "step": 414000 | |
| }, | |
| { | |
| "epoch": 9.62, | |
| "learning_rate": 1.8862449216482881e-06, | |
| "loss": 0.3296, | |
| "step": 414500 | |
| }, | |
| { | |
| "epoch": 9.63, | |
| "learning_rate": 1.828206616366802e-06, | |
| "loss": 0.3297, | |
| "step": 415000 | |
| }, | |
| { | |
| "epoch": 9.65, | |
| "learning_rate": 1.7701683110853163e-06, | |
| "loss": 0.3298, | |
| "step": 415500 | |
| }, | |
| { | |
| "epoch": 9.66, | |
| "learning_rate": 1.7121300058038307e-06, | |
| "loss": 0.3291, | |
| "step": 416000 | |
| }, | |
| { | |
| "epoch": 9.67, | |
| "learning_rate": 1.654091700522345e-06, | |
| "loss": 0.3299, | |
| "step": 416500 | |
| }, | |
| { | |
| "epoch": 9.68, | |
| "learning_rate": 1.596053395240859e-06, | |
| "loss": 0.3295, | |
| "step": 417000 | |
| }, | |
| { | |
| "epoch": 9.69, | |
| "learning_rate": 1.5380150899593732e-06, | |
| "loss": 0.3298, | |
| "step": 417500 | |
| }, | |
| { | |
| "epoch": 9.7, | |
| "learning_rate": 1.4799767846778875e-06, | |
| "loss": 0.3297, | |
| "step": 418000 | |
| }, | |
| { | |
| "epoch": 9.72, | |
| "learning_rate": 1.4219384793964017e-06, | |
| "loss": 0.3298, | |
| "step": 418500 | |
| }, | |
| { | |
| "epoch": 9.73, | |
| "learning_rate": 1.3639001741149159e-06, | |
| "loss": 0.3295, | |
| "step": 419000 | |
| }, | |
| { | |
| "epoch": 9.74, | |
| "learning_rate": 1.30586186883343e-06, | |
| "loss": 0.3299, | |
| "step": 419500 | |
| }, | |
| { | |
| "epoch": 9.75, | |
| "learning_rate": 1.2478235635519444e-06, | |
| "loss": 0.3294, | |
| "step": 420000 | |
| }, | |
| { | |
| "epoch": 9.76, | |
| "learning_rate": 1.1897852582704586e-06, | |
| "loss": 0.3298, | |
| "step": 420500 | |
| }, | |
| { | |
| "epoch": 9.77, | |
| "learning_rate": 1.1317469529889728e-06, | |
| "loss": 0.3299, | |
| "step": 421000 | |
| }, | |
| { | |
| "epoch": 9.79, | |
| "learning_rate": 1.073708647707487e-06, | |
| "loss": 0.3297, | |
| "step": 421500 | |
| }, | |
| { | |
| "epoch": 9.8, | |
| "learning_rate": 1.0156703424260013e-06, | |
| "loss": 0.3299, | |
| "step": 422000 | |
| }, | |
| { | |
| "epoch": 9.81, | |
| "learning_rate": 9.576320371445155e-07, | |
| "loss": 0.3297, | |
| "step": 422500 | |
| }, | |
| { | |
| "epoch": 9.82, | |
| "learning_rate": 8.995937318630296e-07, | |
| "loss": 0.3295, | |
| "step": 423000 | |
| }, | |
| { | |
| "epoch": 9.83, | |
| "learning_rate": 8.415554265815438e-07, | |
| "loss": 0.3294, | |
| "step": 423500 | |
| }, | |
| { | |
| "epoch": 9.84, | |
| "learning_rate": 7.835171213000581e-07, | |
| "loss": 0.3296, | |
| "step": 424000 | |
| }, | |
| { | |
| "epoch": 9.85, | |
| "learning_rate": 7.254788160185723e-07, | |
| "loss": 0.3292, | |
| "step": 424500 | |
| }, | |
| { | |
| "epoch": 9.87, | |
| "learning_rate": 6.674405107370864e-07, | |
| "loss": 0.3296, | |
| "step": 425000 | |
| }, | |
| { | |
| "epoch": 9.88, | |
| "learning_rate": 6.094022054556007e-07, | |
| "loss": 0.3293, | |
| "step": 425500 | |
| }, | |
| { | |
| "epoch": 9.89, | |
| "learning_rate": 5.513639001741149e-07, | |
| "loss": 0.3295, | |
| "step": 426000 | |
| }, | |
| { | |
| "epoch": 9.9, | |
| "learning_rate": 4.933255948926291e-07, | |
| "loss": 0.3295, | |
| "step": 426500 | |
| }, | |
| { | |
| "epoch": 9.91, | |
| "learning_rate": 4.352872896111433e-07, | |
| "loss": 0.3294, | |
| "step": 427000 | |
| }, | |
| { | |
| "epoch": 9.92, | |
| "learning_rate": 3.772489843296576e-07, | |
| "loss": 0.3296, | |
| "step": 427500 | |
| }, | |
| { | |
| "epoch": 9.94, | |
| "learning_rate": 3.192106790481718e-07, | |
| "loss": 0.3299, | |
| "step": 428000 | |
| }, | |
| { | |
| "epoch": 9.95, | |
| "learning_rate": 2.6117237376668603e-07, | |
| "loss": 0.3297, | |
| "step": 428500 | |
| }, | |
| { | |
| "epoch": 9.96, | |
| "learning_rate": 2.0313406848520025e-07, | |
| "loss": 0.3294, | |
| "step": 429000 | |
| }, | |
| { | |
| "epoch": 9.97, | |
| "learning_rate": 1.4509576320371445e-07, | |
| "loss": 0.3293, | |
| "step": 429500 | |
| }, | |
| { | |
| "epoch": 9.98, | |
| "learning_rate": 8.705745792222868e-08, | |
| "loss": 0.3297, | |
| "step": 430000 | |
| }, | |
| { | |
| "epoch": 9.99, | |
| "learning_rate": 2.9019152640742893e-08, | |
| "loss": 0.3296, | |
| "step": 430500 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "eval_accuracy": 0.8953714759150758, | |
| "eval_loss": 0.3246208131313324, | |
| "eval_runtime": 1996.1692, | |
| "eval_samples_per_second": 191.599, | |
| "eval_steps_per_second": 2.395, | |
| "step": 430750 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "step": 430750, | |
| "total_flos": 9.004123682884289e+18, | |
| "train_loss": 0.34171454371066545, | |
| "train_runtime": 354822.8145, | |
| "train_samples_per_second": 97.119, | |
| "train_steps_per_second": 1.214 | |
| } | |
| ], | |
| "logging_steps": 500, | |
| "max_steps": 430750, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 10, | |
| "save_steps": 500, | |
| "total_flos": 9.004123682884289e+18, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |