| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.9952, |
| "eval_steps": 500, |
| "global_step": 936, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 6.896551724137931e-07, |
| "loss": 2.3172, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.3793103448275862e-06, |
| "loss": 2.5335, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.0689655172413796e-06, |
| "loss": 2.3783, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 2.7586206896551725e-06, |
| "loss": 2.3682, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.448275862068966e-06, |
| "loss": 2.3227, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.137931034482759e-06, |
| "loss": 2.0661, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8275862068965525e-06, |
| "loss": 2.368, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 5.517241379310345e-06, |
| "loss": 2.2495, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 6.206896551724138e-06, |
| "loss": 2.2111, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 6.896551724137932e-06, |
| "loss": 2.079, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 7.586206896551724e-06, |
| "loss": 1.8298, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 8.275862068965518e-06, |
| "loss": 1.9533, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 8.965517241379312e-06, |
| "loss": 1.9097, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 9.655172413793105e-06, |
| "loss": 2.0271, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.0344827586206898e-05, |
| "loss": 2.0544, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.103448275862069e-05, |
| "loss": 1.9293, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.1724137931034483e-05, |
| "loss": 1.777, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.2413793103448277e-05, |
| "loss": 1.723, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.310344827586207e-05, |
| "loss": 1.8557, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.3793103448275863e-05, |
| "loss": 1.7499, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.4482758620689657e-05, |
| "loss": 1.8048, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.5172413793103448e-05, |
| "loss": 1.8325, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.586206896551724e-05, |
| "loss": 1.6855, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.6551724137931037e-05, |
| "loss": 1.4802, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.7241379310344828e-05, |
| "loss": 1.6372, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.7931034482758623e-05, |
| "loss": 1.7358, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.8620689655172415e-05, |
| "loss": 1.6731, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.931034482758621e-05, |
| "loss": 1.6498, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 2e-05, |
| "loss": 1.4205, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.999994001333173e-05, |
| "loss": 1.6402, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.999976005404659e-05, |
| "loss": 1.696, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9999460124303614e-05, |
| "loss": 1.5169, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.999904022770116e-05, |
| "loss": 1.6764, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9998500369276873e-05, |
| "loss": 1.6429, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9997840555507605e-05, |
| "loss": 1.6175, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9997060794309367e-05, |
| "loss": 1.5782, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9996161095037215e-05, |
| "loss": 1.5128, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9995141468485138e-05, |
| "loss": 1.4396, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9994001926885936e-05, |
| "loss": 1.5293, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.999274248391108e-05, |
| "loss": 1.4725, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9991363154670512e-05, |
| "loss": 1.3653, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9989863955712518e-05, |
| "loss": 1.6944, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.9988244905023476e-05, |
| "loss": 1.6718, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.998650602202769e-05, |
| "loss": 1.4897, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.998464732758711e-05, |
| "loss": 1.4364, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.998266884400112e-05, |
| "loss": 1.4994, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.9980570595006243e-05, |
| "loss": 1.3317, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.9978352605775874e-05, |
| "loss": 1.4053, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.997601490291997e-05, |
| "loss": 1.4366, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.9973557514484726e-05, |
| "loss": 1.553, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.9970980469952264e-05, |
| "loss": 1.4289, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.996828380024024e-05, |
| "loss": 1.505, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.9965467537701496e-05, |
| "loss": 1.5202, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.996253171612368e-05, |
| "loss": 1.3883, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.995947637072882e-05, |
| "loss": 1.3935, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.9956301538172913e-05, |
| "loss": 1.3272, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.995300725654549e-05, |
| "loss": 1.5009, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.994959356536914e-05, |
| "loss": 1.4443, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.9946060505599058e-05, |
| "loss": 1.6184, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.994240811962254e-05, |
| "loss": 1.4231, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.993863645125848e-05, |
| "loss": 1.3921, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.9934745545756847e-05, |
| "loss": 1.3142, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.9930735449798125e-05, |
| "loss": 1.5005, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.9926606211492773e-05, |
| "loss": 1.2946, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.9922357880380644e-05, |
| "loss": 1.4166, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.9917990507430385e-05, |
| "loss": 1.6474, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.9913504145038823e-05, |
| "loss": 1.3899, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.9908898847030348e-05, |
| "loss": 1.1886, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.9904174668656252e-05, |
| "loss": 1.4622, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.9899331666594085e-05, |
| "loss": 1.3867, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.9894369898946955e-05, |
| "loss": 1.5177, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.9889289425242845e-05, |
| "loss": 1.4268, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.9884090306433892e-05, |
| "loss": 1.3998, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.9878772604895657e-05, |
| "loss": 1.5961, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.9873336384426388e-05, |
| "loss": 1.4743, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.9867781710246228e-05, |
| "loss": 1.4967, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.9862108648996457e-05, |
| "loss": 1.4504, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.985631726873869e-05, |
| "loss": 1.5035, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.985040763895404e-05, |
| "loss": 1.224, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.9844379830542312e-05, |
| "loss": 1.512, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.9838233915821133e-05, |
| "loss": 1.3871, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.9831969968525096e-05, |
| "loss": 1.4091, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.982558806380486e-05, |
| "loss": 1.3401, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.9819088278226273e-05, |
| "loss": 1.2054, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.9812470689769424e-05, |
| "loss": 1.3014, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.9805735377827738e-05, |
| "loss": 1.4982, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.979888242320699e-05, |
| "loss": 1.4731, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.979191190812437e-05, |
| "loss": 1.376, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.9784823916207472e-05, |
| "loss": 1.263, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.9777618532493298e-05, |
| "loss": 1.2613, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.9770295843427242e-05, |
| "loss": 1.4536, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.976285593686205e-05, |
| "loss": 1.4708, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.9755298902056758e-05, |
| "loss": 1.4129, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.974762482967564e-05, |
| "loss": 1.3933, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.9739833811787097e-05, |
| "loss": 1.368, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.9731925941862573e-05, |
| "loss": 1.2403, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.9723901314775423e-05, |
| "loss": 1.4722, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.9715760026799776e-05, |
| "loss": 1.3146, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.9707502175609377e-05, |
| "loss": 1.3537, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.9699127860276426e-05, |
| "loss": 1.3931, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.9690637181270372e-05, |
| "loss": 1.3322, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.968203024045673e-05, |
| "loss": 1.3933, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.967330714109584e-05, |
| "loss": 1.3024, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.9664467987841632e-05, |
| "loss": 1.422, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.9655512886740383e-05, |
| "loss": 1.3223, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.9646441945229424e-05, |
| "loss": 1.1517, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.9637255272135863e-05, |
| "loss": 1.2205, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.9627952977675292e-05, |
| "loss": 1.3114, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.9618535173450434e-05, |
| "loss": 1.4354, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.9609001972449834e-05, |
| "loss": 1.3486, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.959935348904648e-05, |
| "loss": 1.3698, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.958958983899645e-05, |
| "loss": 1.4744, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.957971113943751e-05, |
| "loss": 1.1944, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.956971750888771e-05, |
| "loss": 1.4073, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.955960906724398e-05, |
| "loss": 1.3119, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.9549385935780664e-05, |
| "loss": 1.4867, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.9539048237148078e-05, |
| "loss": 1.4325, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.952859609537104e-05, |
| "loss": 1.3476, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.9518029635847387e-05, |
| "loss": 1.3902, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.9507348985346458e-05, |
| "loss": 1.2178, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.949655427200758e-05, |
| "loss": 1.4009, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.948564562533853e-05, |
| "loss": 1.1641, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.9474623176213988e-05, |
| "loss": 1.3717, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.9463487056873945e-05, |
| "loss": 1.2272, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.9452237400922142e-05, |
| "loss": 1.4411, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.9440874343324464e-05, |
| "loss": 1.2594, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.9429398020407292e-05, |
| "loss": 1.4625, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.9417808569855907e-05, |
| "loss": 1.3649, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.9406106130712813e-05, |
| "loss": 1.3346, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.939429084337608e-05, |
| "loss": 1.267, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.938236284959765e-05, |
| "loss": 1.1933, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.9370322292481642e-05, |
| "loss": 1.2708, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.935816931648264e-05, |
| "loss": 1.2231, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.9345904067403953e-05, |
| "loss": 1.1761, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.9333526692395863e-05, |
| "loss": 1.2886, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.9321037339953873e-05, |
| "loss": 1.289, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.9308436159916905e-05, |
| "loss": 1.4652, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.9295723303465523e-05, |
| "loss": 1.4461, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.928289892312011e-05, |
| "loss": 1.3263, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.9269963172739033e-05, |
| "loss": 1.2746, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.9256916207516806e-05, |
| "loss": 1.4045, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.9243758183982226e-05, |
| "loss": 1.201, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.9230489259996487e-05, |
| "loss": 1.3803, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.9217109594751303e-05, |
| "loss": 1.2685, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.9203619348766974e-05, |
| "loss": 1.1703, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.9190018683890492e-05, |
| "loss": 1.4119, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.9176307763293563e-05, |
| "loss": 1.2005, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.9162486751470687e-05, |
| "loss": 1.216, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.914855581423714e-05, |
| "loss": 1.413, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.9134515118727035e-05, |
| "loss": 1.2447, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.9120364833391277e-05, |
| "loss": 1.399, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.910610512799556e-05, |
| "loss": 1.1665, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.9091736173618326e-05, |
| "loss": 1.2137, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.907725814264872e-05, |
| "loss": 1.2291, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.9062671208784508e-05, |
| "loss": 1.2755, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.9047975547029998e-05, |
| "loss": 1.3342, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.9033171333693952e-05, |
| "loss": 1.3475, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.9018258746387458e-05, |
| "loss": 1.3621, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.9003237964021796e-05, |
| "loss": 1.4957, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.8988109166806313e-05, |
| "loss": 1.3468, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.8972872536246224e-05, |
| "loss": 1.2981, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.8957528255140482e-05, |
| "loss": 1.3762, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.894207650757954e-05, |
| "loss": 1.4384, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.892651747894317e-05, |
| "loss": 1.3045, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.8910851355898238e-05, |
| "loss": 1.2176, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.8895078326396436e-05, |
| "loss": 1.3519, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.8879198579672068e-05, |
| "loss": 1.3789, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.8863212306239753e-05, |
| "loss": 1.4566, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.884711969789215e-05, |
| "loss": 1.2376, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.883092094769765e-05, |
| "loss": 1.3335, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.8814616249998063e-05, |
| "loss": 1.2798, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.8798205800406283e-05, |
| "loss": 1.2719, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.8781689795803954e-05, |
| "loss": 1.4, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.8765068434339095e-05, |
| "loss": 1.3316, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.8748341915423723e-05, |
| "loss": 1.3677, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.8731510439731465e-05, |
| "loss": 1.2105, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.8714574209195153e-05, |
| "loss": 1.34, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.8697533427004395e-05, |
| "loss": 1.3036, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.868038829760314e-05, |
| "loss": 1.2229, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.866313902668723e-05, |
| "loss": 1.3492, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.8645785821201918e-05, |
| "loss": 1.1391, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.8628328889339403e-05, |
| "loss": 1.1359, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.8610768440536317e-05, |
| "loss": 1.222, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.859310468547123e-05, |
| "loss": 1.1345, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.85753378360621e-05, |
| "loss": 1.2547, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.8557468105463753e-05, |
| "loss": 1.3382, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.8539495708065304e-05, |
| "loss": 1.3435, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.85214208594876e-05, |
| "loss": 1.258, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.8503243776580637e-05, |
| "loss": 1.4196, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.8484964677420937e-05, |
| "loss": 1.3237, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.8466583781308954e-05, |
| "loss": 1.2552, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.8448101308766433e-05, |
| "loss": 1.3336, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.8429517481533762e-05, |
| "loss": 1.3072, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.8410832522567318e-05, |
| "loss": 1.2765, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.8392046656036788e-05, |
| "loss": 1.4138, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.8373160107322476e-05, |
| "loss": 1.118, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.8354173103012614e-05, |
| "loss": 1.3164, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.8335085870900627e-05, |
| "loss": 1.0868, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.8315898639982404e-05, |
| "loss": 1.4739, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.8296611640453562e-05, |
| "loss": 1.3315, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.827722510370667e-05, |
| "loss": 1.4071, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.825773926232847e-05, |
| "loss": 1.1968, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.8238154350097103e-05, |
| "loss": 1.203, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.8218470601979302e-05, |
| "loss": 1.3302, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.819868825412756e-05, |
| "loss": 1.3479, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.8178807543877303e-05, |
| "loss": 1.1735, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.815882870974404e-05, |
| "loss": 1.4118, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.8138751991420524e-05, |
| "loss": 1.4945, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.8118577629773824e-05, |
| "loss": 1.1916, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.8098305866842506e-05, |
| "loss": 1.5478, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.8077936945833662e-05, |
| "loss": 1.277, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.805747111112004e-05, |
| "loss": 1.4654, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.8036908608237085e-05, |
| "loss": 1.1941, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.801624968388e-05, |
| "loss": 1.1873, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.7995494585900802e-05, |
| "loss": 1.1864, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.7974643563305326e-05, |
| "loss": 1.115, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.795369686625024e-05, |
| "loss": 1.1453, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.7932654746040063e-05, |
| "loss": 1.42, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.791151745512413e-05, |
| "loss": 1.2897, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.7890285247093574e-05, |
| "loss": 1.2854, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.786895837667828e-05, |
| "loss": 1.1274, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.7847537099743824e-05, |
| "loss": 1.2839, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.7826021673288413e-05, |
| "loss": 1.1834, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.7804412355439803e-05, |
| "loss": 1.5192, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.7782709405452184e-05, |
| "loss": 1.3566, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.7760913083703088e-05, |
| "loss": 1.2277, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.7739023651690267e-05, |
| "loss": 1.2414, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.771704137202853e-05, |
| "loss": 1.3937, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.769496650844663e-05, |
| "loss": 1.2114, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.7672799325784066e-05, |
| "loss": 1.113, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.7650540089987926e-05, |
| "loss": 1.0849, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.762818906810969e-05, |
| "loss": 1.2413, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.7605746528302017e-05, |
| "loss": 1.233, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.7583212739815555e-05, |
| "loss": 1.0594, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.7560587972995678e-05, |
| "loss": 1.3367, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.7537872499279265e-05, |
| "loss": 1.2846, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.751506659119143e-05, |
| "loss": 1.2923, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.7492170522342267e-05, |
| "loss": 1.1856, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.7469184567423548e-05, |
| "loss": 1.1919, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.7446109002205444e-05, |
| "loss": 1.2731, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.7422944103533212e-05, |
| "loss": 1.3955, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.739969014932387e-05, |
| "loss": 1.2863, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.7376347418562866e-05, |
| "loss": 1.245, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.735291619130073e-05, |
| "loss": 1.1457, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.732939674864971e-05, |
| "loss": 1.4096, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.730578937278041e-05, |
| "loss": 1.3531, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.7282094346918395e-05, |
| "loss": 1.3947, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.7258311955340794e-05, |
| "loss": 1.3041, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.7234442483372894e-05, |
| "loss": 1.2553, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.721048621738472e-05, |
| "loss": 1.3328, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.7186443444787578e-05, |
| "loss": 1.1772, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.7162314454030644e-05, |
| "loss": 1.0941, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.7138099534597464e-05, |
| "loss": 1.2324, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.7113798977002506e-05, |
| "loss": 1.0949, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.7089413072787667e-05, |
| "loss": 1.3798, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.706494211451878e-05, |
| "loss": 1.2483, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.7040386395782093e-05, |
| "loss": 1.0528, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.701574621118076e-05, |
| "loss": 1.2136, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.6991021856331297e-05, |
| "loss": 1.4656, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.696621362786003e-05, |
| "loss": 1.2376, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.6941321823399567e-05, |
| "loss": 1.2852, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.691634674158518e-05, |
| "loss": 1.2205, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.6891288682051264e-05, |
| "loss": 1.3117, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.686614794542772e-05, |
| "loss": 1.2704, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.684092483333635e-05, |
| "loss": 1.1265, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.681561964838725e-05, |
| "loss": 1.1492, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.6790232694175164e-05, |
| "loss": 1.2796, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.6764764275275852e-05, |
| "loss": 1.238, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.6739214697242437e-05, |
| "loss": 1.0388, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.6713584266601728e-05, |
| "loss": 1.2182, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.6687873290850554e-05, |
| "loss": 1.3731, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.6662082078452068e-05, |
| "loss": 1.1172, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.6636210938832053e-05, |
| "loss": 1.3127, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.6610260182375202e-05, |
| "loss": 1.217, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.65842301204214e-05, |
| "loss": 1.1905, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.6558121065261982e-05, |
| "loss": 1.2278, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.6531933330136e-05, |
| "loss": 1.3432, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.6505667229226445e-05, |
| "loss": 1.2011, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.6479323077656492e-05, |
| "loss": 1.2781, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.6452901191485725e-05, |
| "loss": 1.1543, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.642640188770632e-05, |
| "loss": 1.3686, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.639982548423927e-05, |
| "loss": 1.3256, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.6373172299930553e-05, |
| "loss": 1.2598, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.6346442654547314e-05, |
| "loss": 1.3221, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.631963686877403e-05, |
| "loss": 1.2581, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.6292755264208656e-05, |
| "loss": 1.432, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.626579816335877e-05, |
| "loss": 1.3085, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.6238765889637704e-05, |
| "loss": 1.2833, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.6211658767360667e-05, |
| "loss": 1.5326, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.6184477121740848e-05, |
| "loss": 1.0384, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.6157221278885523e-05, |
| "loss": 1.3251, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.612989156579213e-05, |
| "loss": 1.2479, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.610248831034435e-05, |
| "loss": 1.1987, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.607501184130819e-05, |
| "loss": 1.3294, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.6047462488328017e-05, |
| "loss": 1.2128, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.6019840581922604e-05, |
| "loss": 1.3674, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.599214645348118e-05, |
| "loss": 1.2131, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.5964380435259448e-05, |
| "loss": 1.2444, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.5936542860375594e-05, |
| "loss": 1.132, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.5908634062806285e-05, |
| "loss": 1.1912, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.588065437738268e-05, |
| "loss": 1.2514, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.585260413978641e-05, |
| "loss": 1.3142, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.5824483686545517e-05, |
| "loss": 1.0959, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.5796293355030476e-05, |
| "loss": 1.2714, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.5768033483450088e-05, |
| "loss": 1.4746, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.5739704410847475e-05, |
| "loss": 1.2518, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.5711306477095962e-05, |
| "loss": 1.3225, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.568284002289504e-05, |
| "loss": 1.2877, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.5654305389766257e-05, |
| "loss": 1.2796, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.562570292004913e-05, |
| "loss": 1.1916, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.5597032956897028e-05, |
| "loss": 1.1361, |
| "step": 311 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.5568295844273064e-05, |
| "loss": 1.0916, |
| "step": 312 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.553949192694597e-05, |
| "loss": 1.1436, |
| "step": 313 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.551062155048595e-05, |
| "loss": 1.1519, |
| "step": 314 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.5481685061260547e-05, |
| "loss": 0.9804, |
| "step": 315 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.5452682806430473e-05, |
| "loss": 1.0737, |
| "step": 316 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.5423615133945457e-05, |
| "loss": 1.1061, |
| "step": 317 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.5394482392540066e-05, |
| "loss": 1.1782, |
| "step": 318 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.5365284931729513e-05, |
| "loss": 1.2215, |
| "step": 319 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.5336023101805486e-05, |
| "loss": 1.2741, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.5306697253831914e-05, |
| "loss": 1.0344, |
| "step": 321 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.5277307739640787e-05, |
| "loss": 1.0388, |
| "step": 322 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.524785491182791e-05, |
| "loss": 1.0273, |
| "step": 323 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.5218339123748682e-05, |
| "loss": 0.9247, |
| "step": 324 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.5188760729513865e-05, |
| "loss": 1.12, |
| "step": 325 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.5159120083985319e-05, |
| "loss": 1.0136, |
| "step": 326 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.5129417542771761e-05, |
| "loss": 1.1744, |
| "step": 327 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.5099653462224492e-05, |
| "loss": 0.966, |
| "step": 328 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.506982819943311e-05, |
| "loss": 1.1586, |
| "step": 329 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.503994211222125e-05, |
| "loss": 0.9398, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.5009995559142268e-05, |
| "loss": 1.1101, |
| "step": 331 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.4979988899474955e-05, |
| "loss": 1.1331, |
| "step": 332 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.494992249321922e-05, |
| "loss": 1.0797, |
| "step": 333 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.4919796701091767e-05, |
| "loss": 1.1103, |
| "step": 334 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.4889611884521777e-05, |
| "loss": 0.9671, |
| "step": 335 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.4859368405646568e-05, |
| "loss": 1.1242, |
| "step": 336 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.4829066627307246e-05, |
| "loss": 1.0974, |
| "step": 337 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.4798706913044357e-05, |
| "loss": 1.1175, |
| "step": 338 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.476828962709352e-05, |
| "loss": 1.0538, |
| "step": 339 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.4737815134381066e-05, |
| "loss": 1.0735, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.4707283800519647e-05, |
| "loss": 1.3825, |
| "step": 341 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.4676695991803869e-05, |
| "loss": 1.1401, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.4646052075205874e-05, |
| "loss": 1.1364, |
| "step": 343 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.4615352418370958e-05, |
| "loss": 1.0251, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.4584597389613144e-05, |
| "loss": 1.1488, |
| "step": 345 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.4553787357910774e-05, |
| "loss": 1.0476, |
| "step": 346 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.452292269290208e-05, |
| "loss": 0.9702, |
| "step": 347 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.4492003764880744e-05, |
| "loss": 1.0864, |
| "step": 348 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.4461030944791464e-05, |
| "loss": 1.0446, |
| "step": 349 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.4430004604225493e-05, |
| "loss": 1.16, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.4398925115416196e-05, |
| "loss": 1.014, |
| "step": 351 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 1.4367792851234566e-05, |
| "loss": 1.0546, |
| "step": 352 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 1.4336608185184765e-05, |
| "loss": 1.0048, |
| "step": 353 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 1.4305371491399638e-05, |
| "loss": 1.0999, |
| "step": 354 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.427408314463622e-05, |
| "loss": 1.1723, |
| "step": 355 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.4242743520271249e-05, |
| "loss": 1.0517, |
| "step": 356 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.4211352994296655e-05, |
| "loss": 1.1067, |
| "step": 357 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 1.417991194331505e-05, |
| "loss": 1.0712, |
| "step": 358 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 1.4148420744535214e-05, |
| "loss": 1.1264, |
| "step": 359 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 1.4116879775767567e-05, |
| "loss": 1.0318, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.4085289415419632e-05, |
| "loss": 0.9812, |
| "step": 361 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.4053650042491507e-05, |
| "loss": 1.0524, |
| "step": 362 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.4021962036571301e-05, |
| "loss": 1.1601, |
| "step": 363 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.3990225777830595e-05, |
| "loss": 1.0957, |
| "step": 364 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 1.3958441647019877e-05, |
| "loss": 0.8799, |
| "step": 365 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 1.3926610025463967e-05, |
| "loss": 1.205, |
| "step": 366 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 1.3894731295057446e-05, |
| "loss": 0.9758, |
| "step": 367 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 1.3862805838260087e-05, |
| "loss": 1.0533, |
| "step": 368 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 1.383083403809224e-05, |
| "loss": 1.0062, |
| "step": 369 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 1.3798816278130268e-05, |
| "loss": 1.0966, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.3766752942501911e-05, |
| "loss": 1.0667, |
| "step": 371 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.3734644415881708e-05, |
| "loss": 1.1089, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.3702491083486366e-05, |
| "loss": 1.0091, |
| "step": 373 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 1.3670293331070142e-05, |
| "loss": 1.075, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 1.3638051544920217e-05, |
| "loss": 1.1298, |
| "step": 375 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 1.3605766111852052e-05, |
| "loss": 0.9582, |
| "step": 376 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.3573437419204765e-05, |
| "loss": 1.2061, |
| "step": 377 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.3541065854836464e-05, |
| "loss": 1.0294, |
| "step": 378 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.3508651807119609e-05, |
| "loss": 1.1752, |
| "step": 379 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.3476195664936347e-05, |
| "loss": 1.1268, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.3443697817673842e-05, |
| "loss": 0.996, |
| "step": 381 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.3411158655219615e-05, |
| "loss": 0.9984, |
| "step": 382 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 1.337857856795685e-05, |
| "loss": 1.044, |
| "step": 383 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 1.334595794675973e-05, |
| "loss": 1.2486, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 1.3313297182988722e-05, |
| "loss": 1.1302, |
| "step": 385 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.3280596668485919e-05, |
| "loss": 1.0039, |
| "step": 386 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.3247856795570295e-05, |
| "loss": 1.0926, |
| "step": 387 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.3215077957033032e-05, |
| "loss": 1.0572, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.3182260546132795e-05, |
| "loss": 1.0954, |
| "step": 389 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 1.3149404956591008e-05, |
| "loss": 1.1361, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 1.3116511582587144e-05, |
| "loss": 1.0782, |
| "step": 391 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 1.3083580818753985e-05, |
| "loss": 1.0336, |
| "step": 392 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 1.3050613060172893e-05, |
| "loss": 1.1352, |
| "step": 393 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 1.3017608702369065e-05, |
| "loss": 1.066, |
| "step": 394 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 1.2984568141306797e-05, |
| "loss": 1.176, |
| "step": 395 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.2951491773384722e-05, |
| "loss": 1.1472, |
| "step": 396 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.2918379995431062e-05, |
| "loss": 0.9962, |
| "step": 397 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.2885233204698866e-05, |
| "loss": 1.073, |
| "step": 398 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 1.2852051798861243e-05, |
| "loss": 1.0615, |
| "step": 399 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 1.2818836176006586e-05, |
| "loss": 1.0729, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 1.278558673463381e-05, |
| "loss": 1.2626, |
| "step": 401 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.275230387364755e-05, |
| "loss": 0.9681, |
| "step": 402 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.2718987992353403e-05, |
| "loss": 1.2131, |
| "step": 403 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.2685639490453113e-05, |
| "loss": 1.0451, |
| "step": 404 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.2652258768039775e-05, |
| "loss": 1.0072, |
| "step": 405 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.2618846225593057e-05, |
| "loss": 1.0376, |
| "step": 406 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.2585402263974383e-05, |
| "loss": 1.1437, |
| "step": 407 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 1.2551927284422117e-05, |
| "loss": 1.1727, |
| "step": 408 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 1.2518421688546757e-05, |
| "loss": 1.1347, |
| "step": 409 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 1.2484885878326114e-05, |
| "loss": 1.0493, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.2451320256100497e-05, |
| "loss": 1.2051, |
| "step": 411 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.2417725224567872e-05, |
| "loss": 1.1211, |
| "step": 412 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.2384101186779042e-05, |
| "loss": 1.0312, |
| "step": 413 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.235044854613281e-05, |
| "loss": 1.0435, |
| "step": 414 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.231676770637113e-05, |
| "loss": 0.9924, |
| "step": 415 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.2283059071574278e-05, |
| "loss": 0.9695, |
| "step": 416 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.224932304615599e-05, |
| "loss": 0.9465, |
| "step": 417 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 1.221556003485862e-05, |
| "loss": 1.0483, |
| "step": 418 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 1.218177044274828e-05, |
| "loss": 1.1043, |
| "step": 419 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 1.2147954675209982e-05, |
| "loss": 1.1211, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 1.2114113137942767e-05, |
| "loss": 1.1638, |
| "step": 421 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 1.2080246236954856e-05, |
| "loss": 0.9418, |
| "step": 422 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 1.2046354378558753e-05, |
| "loss": 0.9473, |
| "step": 423 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 1.2012437969366397e-05, |
| "loss": 1.1651, |
| "step": 424 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 1.1978497416284265e-05, |
| "loss": 1.017, |
| "step": 425 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 1.1944533126508491e-05, |
| "loss": 1.082, |
| "step": 426 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.191054550752e-05, |
| "loss": 1.0173, |
| "step": 427 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.187653496707959e-05, |
| "loss": 1.0361, |
| "step": 428 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.1842501913223066e-05, |
| "loss": 1.0836, |
| "step": 429 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.1808446754256329e-05, |
| "loss": 1.0667, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.1774369898750484e-05, |
| "loss": 1.0376, |
| "step": 431 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.1740271755536939e-05, |
| "loss": 1.091, |
| "step": 432 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 1.1706152733702489e-05, |
| "loss": 1.0371, |
| "step": 433 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 1.167201324258443e-05, |
| "loss": 1.0681, |
| "step": 434 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 1.1637853691765625e-05, |
| "loss": 0.9842, |
| "step": 435 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.16036744910696e-05, |
| "loss": 1.0513, |
| "step": 436 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.1569476050555637e-05, |
| "loss": 1.0221, |
| "step": 437 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.153525878051383e-05, |
| "loss": 1.1218, |
| "step": 438 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.1501023091460187e-05, |
| "loss": 1.1723, |
| "step": 439 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.146676939413169e-05, |
| "loss": 0.9689, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.143249809948138e-05, |
| "loss": 1.0378, |
| "step": 441 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.139820961867341e-05, |
| "loss": 0.8955, |
| "step": 442 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 1.1363904363078126e-05, |
| "loss": 1.0282, |
| "step": 443 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 1.1329582744267125e-05, |
| "loss": 1.0508, |
| "step": 444 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 1.1295245174008317e-05, |
| "loss": 1.0539, |
| "step": 445 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 1.1260892064260995e-05, |
| "loss": 1.0883, |
| "step": 446 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 1.1226523827170876e-05, |
| "loss": 1.1207, |
| "step": 447 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 1.1192140875065167e-05, |
| "loss": 1.0696, |
| "step": 448 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 1.1157743620447611e-05, |
| "loss": 1.1171, |
| "step": 449 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 1.112333247599356e-05, |
| "loss": 1.0022, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 1.1088907854544985e-05, |
| "loss": 1.2792, |
| "step": 451 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.1054470169105564e-05, |
| "loss": 1.0204, |
| "step": 452 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.1020019832835694e-05, |
| "loss": 1.0105, |
| "step": 453 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.0985557259047557e-05, |
| "loss": 1.0508, |
| "step": 454 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 1.0951082861200142e-05, |
| "loss": 0.9506, |
| "step": 455 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 1.091659705289431e-05, |
| "loss": 1.0708, |
| "step": 456 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 1.088210024786781e-05, |
| "loss": 1.0901, |
| "step": 457 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 1.084759285999032e-05, |
| "loss": 1.1882, |
| "step": 458 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 1.0813075303258483e-05, |
| "loss": 1.1313, |
| "step": 459 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 1.0778547991790946e-05, |
| "loss": 1.0493, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.0744011339823389e-05, |
| "loss": 1.1251, |
| "step": 461 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.0709465761703542e-05, |
| "loss": 1.0042, |
| "step": 462 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.0674911671886236e-05, |
| "loss": 0.9988, |
| "step": 463 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.0640349484928413e-05, |
| "loss": 1.0945, |
| "step": 464 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.0605779615484167e-05, |
| "loss": 1.0302, |
| "step": 465 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.057120247829975e-05, |
| "loss": 1.0194, |
| "step": 466 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.053661848820862e-05, |
| "loss": 0.9858, |
| "step": 467 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 1.050202806012644e-05, |
| "loss": 1.1142, |
| "step": 468 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 1.0467431609046116e-05, |
| "loss": 1.0762, |
| "step": 469 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 1.0432829550032818e-05, |
| "loss": 1.1463, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.0398222298218996e-05, |
| "loss": 1.1158, |
| "step": 471 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.0363610268799393e-05, |
| "loss": 1.0152, |
| "step": 472 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.0328993877026075e-05, |
| "loss": 0.9611, |
| "step": 473 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 1.0294373538203439e-05, |
| "loss": 1.1085, |
| "step": 474 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 1.0259749667683252e-05, |
| "loss": 1.1732, |
| "step": 475 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 1.0225122680859633e-05, |
| "loss": 1.0898, |
| "step": 476 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.0190492993164101e-05, |
| "loss": 1.0462, |
| "step": 477 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.0155861020060566e-05, |
| "loss": 0.9384, |
| "step": 478 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.0121227177040373e-05, |
| "loss": 1.1798, |
| "step": 479 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 1.008659187961729e-05, |
| "loss": 1.013, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 1.0051955543322533e-05, |
| "loss": 1.0267, |
| "step": 481 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 1.0017318583699786e-05, |
| "loss": 1.0784, |
| "step": 482 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 9.982681416300217e-06, |
| "loss": 0.942, |
| "step": 483 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 9.948044456677472e-06, |
| "loss": 1.1899, |
| "step": 484 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 9.913408120382714e-06, |
| "loss": 0.9433, |
| "step": 485 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 9.878772822959628e-06, |
| "loss": 1.0775, |
| "step": 486 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 9.844138979939437e-06, |
| "loss": 1.2114, |
| "step": 487 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 9.809507006835904e-06, |
| "loss": 1.0274, |
| "step": 488 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 9.774877319140372e-06, |
| "loss": 1.197, |
| "step": 489 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 9.74025033231675e-06, |
| "loss": 1.1706, |
| "step": 490 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 9.70562646179656e-06, |
| "loss": 0.97, |
| "step": 491 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 9.67100612297393e-06, |
| "loss": 1.1402, |
| "step": 492 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 9.63638973120061e-06, |
| "loss": 1.0599, |
| "step": 493 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 9.601777701781009e-06, |
| "loss": 0.8845, |
| "step": 494 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 9.567170449967183e-06, |
| "loss": 1.0115, |
| "step": 495 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 9.532568390953886e-06, |
| "loss": 1.173, |
| "step": 496 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 9.497971939873567e-06, |
| "loss": 1.0143, |
| "step": 497 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 9.463381511791386e-06, |
| "loss": 0.9509, |
| "step": 498 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 9.428797521700254e-06, |
| "loss": 1.0972, |
| "step": 499 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 9.394220384515836e-06, |
| "loss": 0.8946, |
| "step": 500 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 9.35965051507159e-06, |
| "loss": 0.8823, |
| "step": 501 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 9.325088328113769e-06, |
| "loss": 1.0511, |
| "step": 502 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 9.290534238296462e-06, |
| "loss": 1.0009, |
| "step": 503 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 9.255988660176613e-06, |
| "loss": 1.1827, |
| "step": 504 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 9.221452008209057e-06, |
| "loss": 1.2179, |
| "step": 505 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 9.186924696741519e-06, |
| "loss": 0.9779, |
| "step": 506 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 9.152407140009684e-06, |
| "loss": 1.1262, |
| "step": 507 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 9.117899752132193e-06, |
| "loss": 1.0488, |
| "step": 508 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 9.083402947105688e-06, |
| "loss": 1.0966, |
| "step": 509 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 9.04891713879986e-06, |
| "loss": 1.059, |
| "step": 510 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 9.014442740952446e-06, |
| "loss": 1.1366, |
| "step": 511 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 8.979980167164311e-06, |
| "loss": 1.007, |
| "step": 512 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 8.945529830894439e-06, |
| "loss": 0.9041, |
| "step": 513 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 8.911092145455015e-06, |
| "loss": 1.129, |
| "step": 514 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 8.876667524006442e-06, |
| "loss": 1.0292, |
| "step": 515 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 8.84225637955239e-06, |
| "loss": 0.9756, |
| "step": 516 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 8.80785912493484e-06, |
| "loss": 0.9823, |
| "step": 517 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 8.773476172829127e-06, |
| "loss": 1.07, |
| "step": 518 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 8.739107935739004e-06, |
| "loss": 1.0255, |
| "step": 519 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 8.704754825991684e-06, |
| "loss": 1.088, |
| "step": 520 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 8.670417255732876e-06, |
| "loss": 0.9728, |
| "step": 521 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 8.636095636921878e-06, |
| "loss": 1.1451, |
| "step": 522 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 8.601790381326593e-06, |
| "loss": 0.9947, |
| "step": 523 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 8.56750190051862e-06, |
| "loss": 1.0182, |
| "step": 524 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 8.533230605868314e-06, |
| "loss": 1.123, |
| "step": 525 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 8.498976908539817e-06, |
| "loss": 0.8985, |
| "step": 526 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 8.464741219486175e-06, |
| "loss": 1.0555, |
| "step": 527 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 8.430523949444367e-06, |
| "loss": 0.9661, |
| "step": 528 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 8.396325508930398e-06, |
| "loss": 0.8877, |
| "step": 529 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 8.36214630823438e-06, |
| "loss": 1.0659, |
| "step": 530 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 8.327986757415571e-06, |
| "loss": 1.1536, |
| "step": 531 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 8.293847266297513e-06, |
| "loss": 1.0392, |
| "step": 532 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 8.259728244463065e-06, |
| "loss": 1.0649, |
| "step": 533 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 8.22563010124952e-06, |
| "loss": 1.1273, |
| "step": 534 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 8.191553245743675e-06, |
| "loss": 1.0693, |
| "step": 535 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 8.157498086776937e-06, |
| "loss": 1.042, |
| "step": 536 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 8.123465032920415e-06, |
| "loss": 1.0062, |
| "step": 537 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 8.089454492480004e-06, |
| "loss": 1.0604, |
| "step": 538 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 8.05546687349151e-06, |
| "loss": 0.8809, |
| "step": 539 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 8.02150258371574e-06, |
| "loss": 1.1149, |
| "step": 540 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 7.987562030633604e-06, |
| "loss": 1.0213, |
| "step": 541 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 7.953645621441245e-06, |
| "loss": 1.0774, |
| "step": 542 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 7.919753763045148e-06, |
| "loss": 1.1105, |
| "step": 543 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 7.885886862057233e-06, |
| "loss": 1.0765, |
| "step": 544 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 7.852045324790023e-06, |
| "loss": 1.0284, |
| "step": 545 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 7.818229557251722e-06, |
| "loss": 0.957, |
| "step": 546 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 7.784439965141381e-06, |
| "loss": 1.0343, |
| "step": 547 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 7.750676953844011e-06, |
| "loss": 1.041, |
| "step": 548 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 7.716940928425724e-06, |
| "loss": 0.9928, |
| "step": 549 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 7.683232293628873e-06, |
| "loss": 1.047, |
| "step": 550 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 7.649551453867192e-06, |
| "loss": 1.011, |
| "step": 551 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 7.615898813220958e-06, |
| "loss": 0.9905, |
| "step": 552 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 7.5822747754321315e-06, |
| "loss": 1.1501, |
| "step": 553 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 7.548679743899505e-06, |
| "loss": 1.086, |
| "step": 554 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 7.51511412167389e-06, |
| "loss": 0.9918, |
| "step": 555 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 7.4815783114532485e-06, |
| "loss": 1.0353, |
| "step": 556 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 7.448072715577885e-06, |
| "loss": 0.9666, |
| "step": 557 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 7.414597736025621e-06, |
| "loss": 1.1618, |
| "step": 558 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 7.381153774406944e-06, |
| "loss": 0.9998, |
| "step": 559 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 7.3477412319602306e-06, |
| "loss": 1.0338, |
| "step": 560 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 7.3143605095468915e-06, |
| "loss": 1.0355, |
| "step": 561 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 7.281012007646595e-06, |
| "loss": 1.0918, |
| "step": 562 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 7.24769612635245e-06, |
| "loss": 1.0435, |
| "step": 563 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 7.214413265366194e-06, |
| "loss": 1.0664, |
| "step": 564 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 7.181163823993418e-06, |
| "loss": 1.0416, |
| "step": 565 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 7.147948201138761e-06, |
| "loss": 1.0615, |
| "step": 566 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 7.114766795301138e-06, |
| "loss": 1.0638, |
| "step": 567 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 7.081620004568943e-06, |
| "loss": 1.0645, |
| "step": 568 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 7.048508226615282e-06, |
| "loss": 1.057, |
| "step": 569 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 7.015431858693209e-06, |
| "loss": 0.9833, |
| "step": 570 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 6.982391297630939e-06, |
| "loss": 1.0721, |
| "step": 571 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 6.94938693982711e-06, |
| "loss": 1.0194, |
| "step": 572 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 6.9164191812460194e-06, |
| "loss": 1.0491, |
| "step": 573 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 6.883488417412858e-06, |
| "loss": 1.1079, |
| "step": 574 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 6.850595043408997e-06, |
| "loss": 1.1694, |
| "step": 575 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 6.817739453867209e-06, |
| "loss": 0.9875, |
| "step": 576 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 6.784922042966968e-06, |
| "loss": 0.9197, |
| "step": 577 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 6.752143204429709e-06, |
| "loss": 1.0801, |
| "step": 578 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 6.719403331514085e-06, |
| "loss": 0.9948, |
| "step": 579 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 6.686702817011277e-06, |
| "loss": 1.1176, |
| "step": 580 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 6.654042053240275e-06, |
| "loss": 1.0743, |
| "step": 581 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 6.6214214320431534e-06, |
| "loss": 1.1574, |
| "step": 582 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 6.5888413447803905e-06, |
| "loss": 1.0499, |
| "step": 583 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 6.55630218232616e-06, |
| "loss": 1.0335, |
| "step": 584 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 6.523804335063655e-06, |
| "loss": 1.0262, |
| "step": 585 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 6.491348192880395e-06, |
| "loss": 1.0413, |
| "step": 586 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 6.458934145163539e-06, |
| "loss": 0.9775, |
| "step": 587 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 6.426562580795242e-06, |
| "loss": 1.0768, |
| "step": 588 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 6.39423388814795e-06, |
| "loss": 1.1318, |
| "step": 589 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 6.361948455079785e-06, |
| "loss": 1.038, |
| "step": 590 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 6.329706668929861e-06, |
| "loss": 1.0734, |
| "step": 591 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 6.297508916513636e-06, |
| "loss": 1.0483, |
| "step": 592 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 6.265355584118297e-06, |
| "loss": 1.1054, |
| "step": 593 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 6.233247057498093e-06, |
| "loss": 0.9653, |
| "step": 594 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 6.201183721869735e-06, |
| "loss": 1.0618, |
| "step": 595 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 6.169165961907762e-06, |
| "loss": 1.0555, |
| "step": 596 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 6.137194161739915e-06, |
| "loss": 1.015, |
| "step": 597 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 6.105268704942555e-06, |
| "loss": 0.9668, |
| "step": 598 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 6.073389974536037e-06, |
| "loss": 1.0904, |
| "step": 599 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 6.041558352980126e-06, |
| "loss": 1.2238, |
| "step": 600 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 6.009774222169409e-06, |
| "loss": 0.9288, |
| "step": 601 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 5.978037963428702e-06, |
| "loss": 1.0405, |
| "step": 602 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 5.946349957508499e-06, |
| "loss": 1.0117, |
| "step": 603 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 5.91471058458037e-06, |
| "loss": 1.0103, |
| "step": 604 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 5.8831202242324345e-06, |
| "loss": 0.9376, |
| "step": 605 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 5.85157925546479e-06, |
| "loss": 0.9576, |
| "step": 606 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 5.8200880566849535e-06, |
| "loss": 1.0823, |
| "step": 607 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 5.788647005703349e-06, |
| "loss": 0.9424, |
| "step": 608 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 5.7572564797287525e-06, |
| "loss": 0.9721, |
| "step": 609 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 5.7259168553637815e-06, |
| "loss": 1.0454, |
| "step": 610 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 5.6946285086003636e-06, |
| "loss": 1.1216, |
| "step": 611 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 5.663391814815238e-06, |
| "loss": 1.0952, |
| "step": 612 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 5.632207148765438e-06, |
| "loss": 1.0676, |
| "step": 613 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 5.601074884583809e-06, |
| "loss": 1.1402, |
| "step": 614 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 5.569995395774508e-06, |
| "loss": 1.1028, |
| "step": 615 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 5.538969055208543e-06, |
| "loss": 1.0062, |
| "step": 616 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 5.5079962351192585e-06, |
| "loss": 1.115, |
| "step": 617 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 5.4770773070979225e-06, |
| "loss": 1.0301, |
| "step": 618 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 5.446212642089228e-06, |
| "loss": 1.1297, |
| "step": 619 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 5.415402610386859e-06, |
| "loss": 0.9464, |
| "step": 620 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 5.384647581629045e-06, |
| "loss": 0.9658, |
| "step": 621 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 5.353947924794129e-06, |
| "loss": 1.1283, |
| "step": 622 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 5.323304008196133e-06, |
| "loss": 1.1993, |
| "step": 623 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 5.292716199480354e-06, |
| "loss": 0.943, |
| "step": 624 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 5.262184865618938e-06, |
| "loss": 0.9043, |
| "step": 625 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 5.231710372906482e-06, |
| "loss": 0.7984, |
| "step": 626 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 5.201293086955646e-06, |
| "loss": 0.8364, |
| "step": 627 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 5.170933372692752e-06, |
| "loss": 0.7504, |
| "step": 628 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 5.140631594353434e-06, |
| "loss": 0.8231, |
| "step": 629 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 5.110388115478222e-06, |
| "loss": 0.815, |
| "step": 630 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 5.080203298908239e-06, |
| "loss": 0.8905, |
| "step": 631 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 5.050077506780783e-06, |
| "loss": 0.8251, |
| "step": 632 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 5.020011100525047e-06, |
| "loss": 0.851, |
| "step": 633 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 4.990004440857735e-06, |
| "loss": 0.9853, |
| "step": 634 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 4.960057887778754e-06, |
| "loss": 0.8867, |
| "step": 635 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 4.930171800566893e-06, |
| "loss": 0.8327, |
| "step": 636 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 4.900346537775513e-06, |
| "loss": 0.8015, |
| "step": 637 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 4.870582457228239e-06, |
| "loss": 0.7943, |
| "step": 638 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 4.840879916014683e-06, |
| "loss": 0.8705, |
| "step": 639 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 4.811239270486139e-06, |
| "loss": 0.9057, |
| "step": 640 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 4.781660876251322e-06, |
| "loss": 0.795, |
| "step": 641 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 4.752145088172094e-06, |
| "loss": 0.8026, |
| "step": 642 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 4.722692260359211e-06, |
| "loss": 0.8117, |
| "step": 643 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 4.693302746168088e-06, |
| "loss": 0.8619, |
| "step": 644 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 4.663976898194516e-06, |
| "loss": 0.8921, |
| "step": 645 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 4.634715068270491e-06, |
| "loss": 0.8118, |
| "step": 646 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 4.605517607459938e-06, |
| "loss": 0.8502, |
| "step": 647 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 4.576384866054546e-06, |
| "loss": 0.7814, |
| "step": 648 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 4.54731719356953e-06, |
| "loss": 0.668, |
| "step": 649 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 4.5183149387394566e-06, |
| "loss": 0.7598, |
| "step": 650 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 4.489378449514051e-06, |
| "loss": 0.7201, |
| "step": 651 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 4.460508073054033e-06, |
| "loss": 0.8612, |
| "step": 652 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 4.431704155726936e-06, |
| "loss": 0.8817, |
| "step": 653 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 4.402967043102974e-06, |
| "loss": 0.8476, |
| "step": 654 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 4.374297079950872e-06, |
| "loss": 0.8226, |
| "step": 655 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 4.345694610233744e-06, |
| "loss": 0.8367, |
| "step": 656 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 4.3171599771049625e-06, |
| "loss": 0.7801, |
| "step": 657 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 4.2886935229040375e-06, |
| "loss": 0.7861, |
| "step": 658 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 4.26029558915253e-06, |
| "loss": 0.9223, |
| "step": 659 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 4.23196651654991e-06, |
| "loss": 0.7652, |
| "step": 660 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 4.2037066449695275e-06, |
| "loss": 0.7641, |
| "step": 661 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 4.175516313454485e-06, |
| "loss": 0.8669, |
| "step": 662 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 4.1473958602135956e-06, |
| "loss": 0.9552, |
| "step": 663 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 4.11934562261732e-06, |
| "loss": 0.8949, |
| "step": 664 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 4.091365937193719e-06, |
| "loss": 0.76, |
| "step": 665 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 4.063457139624407e-06, |
| "loss": 0.8771, |
| "step": 666 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 4.035619564740555e-06, |
| "loss": 0.8563, |
| "step": 667 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 4.00785354651882e-06, |
| "loss": 0.7991, |
| "step": 668 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 3.980159418077403e-06, |
| "loss": 0.8959, |
| "step": 669 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 3.952537511671988e-06, |
| "loss": 0.7973, |
| "step": 670 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 3.924988158691812e-06, |
| "loss": 0.8514, |
| "step": 671 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 3.897511689655653e-06, |
| "loss": 0.7774, |
| "step": 672 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 3.870108434207877e-06, |
| "loss": 0.8148, |
| "step": 673 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 3.842778721114482e-06, |
| "loss": 0.8981, |
| "step": 674 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 3.815522878259153e-06, |
| "loss": 0.7853, |
| "step": 675 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 3.7883412326393352e-06, |
| "loss": 0.7976, |
| "step": 676 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 3.7612341103622984e-06, |
| "loss": 0.8359, |
| "step": 677 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 3.7342018366412336e-06, |
| "loss": 0.7882, |
| "step": 678 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 3.7072447357913477e-06, |
| "loss": 0.8734, |
| "step": 679 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 3.6803631312259724e-06, |
| "loss": 0.951, |
| "step": 680 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 3.653557345452685e-06, |
| "loss": 0.8285, |
| "step": 681 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 3.626827700069452e-06, |
| "loss": 0.8321, |
| "step": 682 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 3.600174515760733e-06, |
| "loss": 0.8634, |
| "step": 683 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 3.573598112293687e-06, |
| "loss": 0.9543, |
| "step": 684 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 3.547098808514279e-06, |
| "loss": 0.8536, |
| "step": 685 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 3.52067692234351e-06, |
| "loss": 0.891, |
| "step": 686 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 3.4943327707735586e-06, |
| "loss": 0.8742, |
| "step": 687 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 3.468066669864004e-06, |
| "loss": 0.8274, |
| "step": 688 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 3.44187893473802e-06, |
| "loss": 0.8308, |
| "step": 689 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 3.415769879578601e-06, |
| "loss": 0.7936, |
| "step": 690 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 3.3897398176247984e-06, |
| "loss": 0.8117, |
| "step": 691 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 3.363789061167949e-06, |
| "loss": 0.8561, |
| "step": 692 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 3.337917921547934e-06, |
| "loss": 0.7836, |
| "step": 693 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 3.312126709149447e-06, |
| "loss": 0.8442, |
| "step": 694 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 3.286415733398276e-06, |
| "loss": 0.8044, |
| "step": 695 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 3.2607853027575643e-06, |
| "loss": 0.8329, |
| "step": 696 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 3.2352357247241517e-06, |
| "loss": 0.8418, |
| "step": 697 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 3.2097673058248378e-06, |
| "loss": 0.8355, |
| "step": 698 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 3.1843803516127537e-06, |
| "loss": 0.8381, |
| "step": 699 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 3.159075166663653e-06, |
| "loss": 0.8766, |
| "step": 700 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 3.1338520545722852e-06, |
| "loss": 0.6602, |
| "step": 701 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 3.1087113179487394e-06, |
| "loss": 0.8645, |
| "step": 702 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 3.0836532584148237e-06, |
| "loss": 0.8401, |
| "step": 703 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 3.058678176600436e-06, |
| "loss": 0.8713, |
| "step": 704 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 3.0337863721399694e-06, |
| "loss": 0.8039, |
| "step": 705 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 3.008978143668707e-06, |
| "loss": 0.9484, |
| "step": 706 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 2.9842537888192414e-06, |
| "loss": 0.8308, |
| "step": 707 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 2.959613604217908e-06, |
| "loss": 0.8186, |
| "step": 708 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 2.9350578854812194e-06, |
| "loss": 0.7842, |
| "step": 709 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 2.9105869272123366e-06, |
| "loss": 0.917, |
| "step": 710 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 2.886201022997497e-06, |
| "loss": 0.764, |
| "step": 711 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 2.8619004654025418e-06, |
| "loss": 0.7358, |
| "step": 712 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 2.837685545969359e-06, |
| "loss": 0.8287, |
| "step": 713 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 2.8135565552124224e-06, |
| "loss": 0.7697, |
| "step": 714 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 2.789513782615283e-06, |
| "loss": 0.8188, |
| "step": 715 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 2.7655575166271067e-06, |
| "loss": 0.7441, |
| "step": 716 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 2.7416880446592087e-06, |
| "loss": 0.755, |
| "step": 717 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 2.717905653081608e-06, |
| "loss": 0.8617, |
| "step": 718 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 2.69421062721959e-06, |
| "loss": 0.8404, |
| "step": 719 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 2.6706032513502913e-06, |
| "loss": 0.9404, |
| "step": 720 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 2.6470838086992724e-06, |
| "loss": 0.8623, |
| "step": 721 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 2.623652581437135e-06, |
| "loss": 0.8585, |
| "step": 722 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 2.6003098506761316e-06, |
| "loss": 0.9078, |
| "step": 723 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 2.577055896466788e-06, |
| "loss": 0.8267, |
| "step": 724 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 2.5538909977945593e-06, |
| "loss": 0.786, |
| "step": 725 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 2.5308154325764543e-06, |
| "loss": 0.8536, |
| "step": 726 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 2.5078294776577372e-06, |
| "loss": 0.8552, |
| "step": 727 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 2.48493340880857e-06, |
| "loss": 0.8809, |
| "step": 728 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 2.462127500720737e-06, |
| "loss": 0.8399, |
| "step": 729 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 2.4394120270043233e-06, |
| "loss": 0.7897, |
| "step": 730 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 2.4167872601844476e-06, |
| "loss": 0.959, |
| "step": 731 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 2.3942534716979827e-06, |
| "loss": 0.8263, |
| "step": 732 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 2.371810931890316e-06, |
| "loss": 0.8896, |
| "step": 733 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 2.349459910012075e-06, |
| "loss": 0.8499, |
| "step": 734 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 2.327200674215937e-06, |
| "loss": 0.7637, |
| "step": 735 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 2.3050334915533713e-06, |
| "loss": 0.8722, |
| "step": 736 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 2.28295862797147e-06, |
| "loss": 0.8261, |
| "step": 737 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 2.260976348309737e-06, |
| "loss": 0.7776, |
| "step": 738 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 2.239086916296914e-06, |
| "loss": 0.8082, |
| "step": 739 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 2.217290594547822e-06, |
| "loss": 0.8048, |
| "step": 740 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 2.1955876445602008e-06, |
| "loss": 0.8619, |
| "step": 741 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 2.1739783267115888e-06, |
| "loss": 0.8038, |
| "step": 742 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 2.1524629002561803e-06, |
| "loss": 0.8358, |
| "step": 743 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 2.1310416233217246e-06, |
| "loss": 0.7924, |
| "step": 744 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 2.1097147529064286e-06, |
| "loss": 0.7898, |
| "step": 745 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 2.088482544875873e-06, |
| "loss": 0.7631, |
| "step": 746 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 2.067345253959938e-06, |
| "loss": 0.8176, |
| "step": 747 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 2.046303133749764e-06, |
| "loss": 0.8491, |
| "step": 748 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 2.0253564366946764e-06, |
| "loss": 0.7257, |
| "step": 749 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 2.0045054140992002e-06, |
| "loss": 0.8799, |
| "step": 750 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 1.98375031612e-06, |
| "loss": 0.9011, |
| "step": 751 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 1.96309139176292e-06, |
| "loss": 0.9194, |
| "step": 752 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 1.942528888879964e-06, |
| "loss": 0.8291, |
| "step": 753 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 1.922063054166341e-06, |
| "loss": 0.7967, |
| "step": 754 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 1.901694133157499e-06, |
| "loss": 0.8713, |
| "step": 755 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 1.8814223702261757e-06, |
| "loss": 0.7713, |
| "step": 756 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 1.8612480085794804e-06, |
| "loss": 0.878, |
| "step": 757 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 1.8411712902559597e-06, |
| "loss": 0.7099, |
| "step": 758 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 1.8211924561227001e-06, |
| "loss": 0.7599, |
| "step": 759 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 1.8013117458724416e-06, |
| "loss": 0.708, |
| "step": 760 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 1.7815293980206993e-06, |
| "loss": 0.837, |
| "step": 761 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 1.7618456499028968e-06, |
| "loss": 0.8756, |
| "step": 762 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 1.7422607376715362e-06, |
| "loss": 0.6622, |
| "step": 763 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 1.7227748962933343e-06, |
| "loss": 0.7935, |
| "step": 764 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 1.7033883595464407e-06, |
| "loss": 0.9484, |
| "step": 765 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 1.684101360017596e-06, |
| "loss": 0.8127, |
| "step": 766 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 1.6649141290993765e-06, |
| "loss": 0.7279, |
| "step": 767 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 1.6458268969873892e-06, |
| "loss": 0.8932, |
| "step": 768 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 1.6268398926775286e-06, |
| "loss": 0.8146, |
| "step": 769 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 1.6079533439632166e-06, |
| "loss": 0.8509, |
| "step": 770 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 1.5891674774326848e-06, |
| "loss": 0.7943, |
| "step": 771 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 1.5704825184662397e-06, |
| "loss": 0.8581, |
| "step": 772 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 1.5518986912335686e-06, |
| "loss": 0.8402, |
| "step": 773 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 1.5334162186910474e-06, |
| "loss": 0.8229, |
| "step": 774 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 1.5150353225790626e-06, |
| "loss": 0.8608, |
| "step": 775 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 1.4967562234193655e-06, |
| "loss": 0.7602, |
| "step": 776 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 1.4785791405123995e-06, |
| "loss": 0.8029, |
| "step": 777 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 1.4605042919347e-06, |
| "loss": 0.82, |
| "step": 778 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 1.4425318945362488e-06, |
| "loss": 0.8683, |
| "step": 779 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 1.4246621639378998e-06, |
| "loss": 0.8857, |
| "step": 780 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 1.406895314528771e-06, |
| "loss": 0.7438, |
| "step": 781 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 1.389231559463684e-06, |
| "loss": 0.7489, |
| "step": 782 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 1.3716711106606007e-06, |
| "loss": 0.8991, |
| "step": 783 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 1.3542141787980855e-06, |
| "loss": 0.7701, |
| "step": 784 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 1.3368609733127714e-06, |
| "loss": 0.8547, |
| "step": 785 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 1.3196117023968613e-06, |
| "loss": 0.8242, |
| "step": 786 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 1.3024665729956054e-06, |
| "loss": 0.8078, |
| "step": 787 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 1.2854257908048483e-06, |
| "loss": 0.7044, |
| "step": 788 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 1.2684895602685377e-06, |
| "loss": 0.8678, |
| "step": 789 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 1.2516580845762804e-06, |
| "loss": 0.8452, |
| "step": 790 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 1.2349315656609085e-06, |
| "loss": 0.8298, |
| "step": 791 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 1.218310204196046e-06, |
| "loss": 0.7758, |
| "step": 792 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 1.201794199593721e-06, |
| "loss": 0.8743, |
| "step": 793 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 1.1853837500019406e-06, |
| "loss": 0.8386, |
| "step": 794 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 1.169079052302352e-06, |
| "loss": 0.866, |
| "step": 795 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 1.1528803021078505e-06, |
| "loss": 0.8144, |
| "step": 796 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 1.1367876937602474e-06, |
| "loss": 0.8112, |
| "step": 797 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 1.120801420327935e-06, |
| "loss": 0.7749, |
| "step": 798 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 1.1049216736035673e-06, |
| "loss": 0.7661, |
| "step": 799 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 1.0891486441017652e-06, |
| "loss": 0.953, |
| "step": 800 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 1.07348252105683e-06, |
| "loss": 0.896, |
| "step": 801 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 1.0579234924204608e-06, |
| "loss": 0.9156, |
| "step": 802 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 1.04247174485952e-06, |
| "loss": 0.8013, |
| "step": 803 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 1.0271274637537764e-06, |
| "loss": 0.8123, |
| "step": 804 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 1.0118908331936915e-06, |
| "loss": 0.8831, |
| "step": 805 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 9.96762035978206e-07, |
| "loss": 0.816, |
| "step": 806 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 9.817412536125449e-07, |
| "loss": 0.7784, |
| "step": 807 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.66828666306049e-07, |
| "loss": 0.8352, |
| "step": 808 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.520244529700041e-07, |
| "loss": 0.8662, |
| "step": 809 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.37328791215496e-07, |
| "loss": 0.9414, |
| "step": 810 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 9.227418573512825e-07, |
| "loss": 0.8586, |
| "step": 811 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 9.082638263816756e-07, |
| "loss": 0.7699, |
| "step": 812 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 8.938948720044416e-07, |
| "loss": 0.6999, |
| "step": 813 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 8.796351666087266e-07, |
| "loss": 0.8929, |
| "step": 814 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 8.654848812729655e-07, |
| "loss": 0.7612, |
| "step": 815 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 8.514441857628619e-07, |
| "loss": 0.7071, |
| "step": 816 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 8.375132485293158e-07, |
| "loss": 0.8244, |
| "step": 817 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 8.236922367064359e-07, |
| "loss": 0.933, |
| "step": 818 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 8.099813161095094e-07, |
| "loss": 0.7496, |
| "step": 819 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 7.963806512330275e-07, |
| "loss": 0.7874, |
| "step": 820 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 7.828904052487019e-07, |
| "loss": 0.7135, |
| "step": 821 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 7.69510740003514e-07, |
| "loss": 0.8509, |
| "step": 822 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 7.562418160177765e-07, |
| "loss": 0.8332, |
| "step": 823 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 7.430837924831958e-07, |
| "loss": 0.8783, |
| "step": 824 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 7.300368272609692e-07, |
| "loss": 0.8069, |
| "step": 825 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 7.171010768798925e-07, |
| "loss": 0.8632, |
| "step": 826 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 7.042766965344782e-07, |
| "loss": 0.789, |
| "step": 827 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 6.915638400830959e-07, |
| "loss": 0.7937, |
| "step": 828 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 6.789626600461307e-07, |
| "loss": 0.786, |
| "step": 829 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 6.664733076041374e-07, |
| "loss": 0.7508, |
| "step": 830 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 6.540959325960494e-07, |
| "loss": 0.8023, |
| "step": 831 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 6.418306835173605e-07, |
| "loss": 0.6802, |
| "step": 832 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 6.296777075183602e-07, |
| "loss": 0.6633, |
| "step": 833 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 6.176371504023537e-07, |
| "loss": 0.9035, |
| "step": 834 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 6.057091566239226e-07, |
| "loss": 0.7717, |
| "step": 835 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 5.938938692871887e-07, |
| "loss": 0.5934, |
| "step": 836 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 5.821914301440956e-07, |
| "loss": 0.9244, |
| "step": 837 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 5.70601979592711e-07, |
| "loss": 0.8018, |
| "step": 838 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 5.591256566755399e-07, |
| "loss": 0.8674, |
| "step": 839 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 5.477625990778579e-07, |
| "loss": 0.8433, |
| "step": 840 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 5.365129431260574e-07, |
| "loss": 0.7924, |
| "step": 841 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 5.253768237860146e-07, |
| "loss": 0.7547, |
| "step": 842 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 5.143543746614688e-07, |
| "loss": 0.7739, |
| "step": 843 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 5.034457279924221e-07, |
| "loss": 0.8245, |
| "step": 844 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 4.926510146535434e-07, |
| "loss": 0.8931, |
| "step": 845 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 4.819703641526141e-07, |
| "loss": 0.826, |
| "step": 846 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 4.71403904628962e-07, |
| "loss": 0.9297, |
| "step": 847 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 4.6095176285192556e-07, |
| "loss": 0.9613, |
| "step": 848 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 4.506140642193391e-07, |
| "loss": 0.8248, |
| "step": 849 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 4.403909327560207e-07, |
| "loss": 0.8495, |
| "step": 850 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 4.3028249111228824e-07, |
| "loss": 0.8472, |
| "step": 851 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 4.202888605624944e-07, |
| "loss": 0.892, |
| "step": 852 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 4.104101610035527e-07, |
| "loss": 0.7369, |
| "step": 853 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 4.006465109535218e-07, |
| "loss": 0.721, |
| "step": 854 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 3.909980275501679e-07, |
| "loss": 0.7539, |
| "step": 855 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 3.8146482654956574e-07, |
| "loss": 0.77, |
| "step": 856 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 3.720470223247097e-07, |
| "loss": 0.8977, |
| "step": 857 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 3.6274472786413605e-07, |
| "loss": 0.7508, |
| "step": 858 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 3.535580547705797e-07, |
| "loss": 0.9097, |
| "step": 859 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 3.4448711325961834e-07, |
| "loss": 0.8055, |
| "step": 860 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 3.355320121583672e-07, |
| "loss": 0.7388, |
| "step": 861 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 3.266928589041607e-07, |
| "loss": 0.8352, |
| "step": 862 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 3.179697595432707e-07, |
| "loss": 0.8735, |
| "step": 863 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 3.093628187296294e-07, |
| "loss": 0.8795, |
| "step": 864 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 3.008721397235781e-07, |
| "loss": 0.8442, |
| "step": 865 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 2.924978243906251e-07, |
| "loss": 0.8908, |
| "step": 866 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 2.8423997320022765e-07, |
| "loss": 0.8749, |
| "step": 867 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 2.760986852245784e-07, |
| "loss": 0.7929, |
| "step": 868 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 2.680740581374286e-07, |
| "loss": 0.774, |
| "step": 869 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 2.6016618821290583e-07, |
| "loss": 0.9041, |
| "step": 870 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 2.5237517032436374e-07, |
| "loss": 0.7084, |
| "step": 871 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 2.4470109794324405e-07, |
| "loss": 0.745, |
| "step": 872 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 2.371440631379529e-07, |
| "loss": 0.8629, |
| "step": 873 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 2.297041565727598e-07, |
| "loss": 0.8275, |
| "step": 874 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 2.2238146750670264e-07, |
| "loss": 0.7469, |
| "step": 875 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 2.1517608379252985e-07, |
| "loss": 0.7013, |
| "step": 876 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 2.0808809187563118e-07, |
| "loss": 0.8515, |
| "step": 877 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 2.011175767930118e-07, |
| "loss": 0.8224, |
| "step": 878 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 1.942646221722655e-07, |
| "loss": 0.8434, |
| "step": 879 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 1.8752931023057753e-07, |
| "loss": 0.9319, |
| "step": 880 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 1.8091172177372994e-07, |
| "loss": 0.7354, |
| "step": 881 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 1.744119361951413e-07, |
| "loss": 0.8801, |
| "step": 882 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 1.6803003147490727e-07, |
| "loss": 0.7487, |
| "step": 883 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 1.617660841788682e-07, |
| "loss": 0.6959, |
| "step": 884 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 1.5562016945769088e-07, |
| "loss": 0.7871, |
| "step": 885 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 1.4959236104596265e-07, |
| "loss": 0.784, |
| "step": 886 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 1.4368273126131428e-07, |
| "loss": 0.7408, |
| "step": 887 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 1.378913510035429e-07, |
| "loss": 0.7482, |
| "step": 888 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 1.3221828975377382e-07, |
| "loss": 0.8331, |
| "step": 889 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 1.266636155736145e-07, |
| "loss": 0.8149, |
| "step": 890 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 1.21227395104343e-07, |
| "loss": 0.813, |
| "step": 891 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 1.1590969356611081e-07, |
| "loss": 0.7902, |
| "step": 892 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 1.1071057475715797e-07, |
| "loss": 0.7681, |
| "step": 893 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 1.0563010105304694e-07, |
| "loss": 0.8899, |
| "step": 894 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 1.0066833340591664e-07, |
| "loss": 0.8155, |
| "step": 895 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 9.582533134374849e-08, |
| "loss": 0.8696, |
| "step": 896 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 9.110115296965482e-08, |
| "loss": 0.8154, |
| "step": 897 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 8.64958549611783e-08, |
| "loss": 0.8744, |
| "step": 898 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.200949256961687e-08, |
| "loss": 0.8758, |
| "step": 899 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 7.764211961935664e-08, |
| "loss": 0.835, |
| "step": 900 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 7.339378850722889e-08, |
| "loss": 0.8079, |
| "step": 901 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 6.92645502018785e-08, |
| "loss": 0.8294, |
| "step": 902 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 6.525445424315546e-08, |
| "loss": 0.9074, |
| "step": 903 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 6.136354874151874e-08, |
| "loss": 0.7819, |
| "step": 904 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 5.7591880377459995e-08, |
| "loss": 0.7962, |
| "step": 905 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 5.393949440094415e-08, |
| "loss": 0.7038, |
| "step": 906 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 5.040643463086303e-08, |
| "loss": 0.7854, |
| "step": 907 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 4.6992743454513654e-08, |
| "loss": 0.7814, |
| "step": 908 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 4.369846182708748e-08, |
| "loss": 0.8807, |
| "step": 909 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 4.052362927118303e-08, |
| "loss": 0.783, |
| "step": 910 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 3.746828387632184e-08, |
| "loss": 0.797, |
| "step": 911 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 3.4532462298506596e-08, |
| "loss": 0.9004, |
| "step": 912 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 3.17161997597637e-08, |
| "loss": 0.7802, |
| "step": 913 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 2.9019530047736944e-08, |
| "loss": 0.8022, |
| "step": 914 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 2.6442485515273397e-08, |
| "loss": 0.7636, |
| "step": 915 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 2.3985097080033715e-08, |
| "loss": 0.8552, |
| "step": 916 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 2.1647394224129092e-08, |
| "loss": 0.7889, |
| "step": 917 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 1.942940499376045e-08, |
| "loss": 0.7047, |
| "step": 918 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 1.733115599888202e-08, |
| "loss": 0.8248, |
| "step": 919 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 1.535267241289051e-08, |
| "loss": 0.7434, |
| "step": 920 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 1.3493977972312e-08, |
| "loss": 0.821, |
| "step": 921 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 1.1755094976523273e-08, |
| "loss": 0.8172, |
| "step": 922 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 1.013604428748538e-08, |
| "loss": 0.8044, |
| "step": 923 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 8.636845329488274e-09, |
| "loss": 0.8212, |
| "step": 924 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 7.257516088923222e-09, |
| "loss": 0.736, |
| "step": 925 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 5.998073114062975e-09, |
| "loss": 0.8705, |
| "step": 926 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 4.858531514864151e-09, |
| "loss": 0.8007, |
| "step": 927 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 3.838904962788492e-09, |
| "loss": 0.9261, |
| "step": 928 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 2.9392056906352162e-09, |
| "loss": 0.9464, |
| "step": 929 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 2.1594444923978e-09, |
| "loss": 0.7596, |
| "step": 930 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 1.4996307231307517e-09, |
| "loss": 0.77, |
| "step": 931 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 9.59772298840811e-10, |
| "loss": 0.8575, |
| "step": 932 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 5.398756963881368e-10, |
| "loss": 0.898, |
| "step": 933 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 2.399459534130344e-10, |
| "loss": 0.8638, |
| "step": 934 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 5.998666827378153e-11, |
| "loss": 0.8143, |
| "step": 935 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 0.0, |
| "loss": 0.8272, |
| "step": 936 |
| }, |
| { |
| "epoch": 3.0, |
| "step": 936, |
| "total_flos": 9.559359129596723e+16, |
| "train_loss": 1.0894396532422457, |
| "train_runtime": 5839.8707, |
| "train_samples_per_second": 2.569, |
| "train_steps_per_second": 0.16 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 936, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 50000, |
| "total_flos": 9.559359129596723e+16, |
| "train_batch_size": 2, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|