| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 5.0, |
| "global_step": 2230, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 0.0, |
| "loss": 16.546, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 0.0, |
| "loss": 16.4393, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 0.0, |
| "loss": 16.6399, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 0.0, |
| "loss": 16.3894, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 0.0, |
| "loss": 16.4595, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 0.0, |
| "loss": 16.5615, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 0.0, |
| "loss": 16.5432, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 6e-07, |
| "loss": 16.1849, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.2e-06, |
| "loss": 16.1996, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8e-06, |
| "loss": 16.4122, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8e-06, |
| "loss": 16.2819, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.4e-06, |
| "loss": 16.6967, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.9999999999999997e-06, |
| "loss": 16.0174, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.6e-06, |
| "loss": 14.9526, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 4.2e-06, |
| "loss": 14.131, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.8e-06, |
| "loss": 14.1671, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 5.399999999999999e-06, |
| "loss": 13.1201, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 5.999999999999999e-06, |
| "loss": 13.0468, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 6.599999999999999e-06, |
| "loss": 12.3705, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 7.2e-06, |
| "loss": 11.8373, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 7.799999999999998e-06, |
| "loss": 11.4204, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 8.4e-06, |
| "loss": 11.0682, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 8.999999999999999e-06, |
| "loss": 10.5008, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 9.6e-06, |
| "loss": 10.2011, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.02e-05, |
| "loss": 9.938, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.0799999999999998e-05, |
| "loss": 9.5281, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.14e-05, |
| "loss": 9.2004, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.1999999999999999e-05, |
| "loss": 8.8697, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.26e-05, |
| "loss": 8.5042, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.3199999999999997e-05, |
| "loss": 8.5187, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.3799999999999998e-05, |
| "loss": 8.2898, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.44e-05, |
| "loss": 8.0075, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.4999999999999999e-05, |
| "loss": 7.9263, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.5599999999999996e-05, |
| "loss": 7.8395, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.6199999999999997e-05, |
| "loss": 7.7316, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.68e-05, |
| "loss": 7.6921, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.74e-05, |
| "loss": 7.6817, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.7999999999999997e-05, |
| "loss": 7.6584, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.8599999999999998e-05, |
| "loss": 7.509, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.92e-05, |
| "loss": 7.5629, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.98e-05, |
| "loss": 7.6358, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 2.04e-05, |
| "loss": 7.5198, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 2.1e-05, |
| "loss": 7.4522, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 2.1599999999999996e-05, |
| "loss": 7.5332, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 2.2199999999999998e-05, |
| "loss": 7.4824, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 2.28e-05, |
| "loss": 7.462, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 2.34e-05, |
| "loss": 7.2823, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 2.3999999999999997e-05, |
| "loss": 7.3827, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 2.4599999999999998e-05, |
| "loss": 7.2548, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 2.52e-05, |
| "loss": 7.2403, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 2.5799999999999997e-05, |
| "loss": 7.3941, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 2.6399999999999995e-05, |
| "loss": 7.2777, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 2.6999999999999996e-05, |
| "loss": 7.3119, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 2.7599999999999997e-05, |
| "loss": 7.2278, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 2.8199999999999998e-05, |
| "loss": 7.1765, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 2.88e-05, |
| "loss": 7.0998, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 2.94e-05, |
| "loss": 7.1789, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 2.9999999999999997e-05, |
| "loss": 7.2313, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.06e-05, |
| "loss": 7.3444, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.119999999999999e-05, |
| "loss": 7.2777, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.1799999999999994e-05, |
| "loss": 7.1813, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.2399999999999995e-05, |
| "loss": 7.1605, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.2999999999999996e-05, |
| "loss": 7.1049, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.36e-05, |
| "loss": 7.2172, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.42e-05, |
| "loss": 7.0423, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.48e-05, |
| "loss": 6.9283, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.539999999999999e-05, |
| "loss": 6.9222, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.5999999999999994e-05, |
| "loss": 6.8744, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.6599999999999995e-05, |
| "loss": 6.6111, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.7199999999999996e-05, |
| "loss": 6.619, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.78e-05, |
| "loss": 6.618, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.84e-05, |
| "loss": 6.5127, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.9e-05, |
| "loss": 6.5249, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 3.96e-05, |
| "loss": 6.3357, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.02e-05, |
| "loss": 6.1504, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.08e-05, |
| "loss": 6.1607, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.14e-05, |
| "loss": 6.0448, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.2e-05, |
| "loss": 6.0503, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.259999999999999e-05, |
| "loss": 5.8263, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.319999999999999e-05, |
| "loss": 5.7994, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.3799999999999994e-05, |
| "loss": 5.6549, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.4399999999999995e-05, |
| "loss": 5.659, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.4999999999999996e-05, |
| "loss": 5.6599, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.56e-05, |
| "loss": 5.3888, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.62e-05, |
| "loss": 5.6597, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.68e-05, |
| "loss": 5.3514, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.7399999999999993e-05, |
| "loss": 5.3263, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.7999999999999994e-05, |
| "loss": 5.3934, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.7999999999999994e-05, |
| "loss": 5.3523, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.8599999999999995e-05, |
| "loss": 5.3707, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.9199999999999997e-05, |
| "loss": 5.374, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.98e-05, |
| "loss": 5.42, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 5.04e-05, |
| "loss": 5.1491, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 5.1e-05, |
| "loss": 5.2381, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 5.1599999999999994e-05, |
| "loss": 5.1267, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 5.2199999999999995e-05, |
| "loss": 5.1448, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 5.279999999999999e-05, |
| "loss": 5.1356, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 5.339999999999999e-05, |
| "loss": 5.1214, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 5.399999999999999e-05, |
| "loss": 5.0075, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 5.459999999999999e-05, |
| "loss": 5.1493, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 5.519999999999999e-05, |
| "loss": 5.0098, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 5.5799999999999994e-05, |
| "loss": 5.0883, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 5.6399999999999995e-05, |
| "loss": 5.1593, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 5.6999999999999996e-05, |
| "loss": 4.9999, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 5.76e-05, |
| "loss": 4.9341, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 5.82e-05, |
| "loss": 5.1407, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 5.88e-05, |
| "loss": 4.8794, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 5.94e-05, |
| "loss": 4.9841, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 5.9999999999999995e-05, |
| "loss": 5.0156, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 6.0599999999999996e-05, |
| "loss": 4.8778, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 6.12e-05, |
| "loss": 5.0093, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 6.18e-05, |
| "loss": 4.967, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 6.239999999999999e-05, |
| "loss": 4.9708, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 6.299999999999999e-05, |
| "loss": 4.9504, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 6.359999999999999e-05, |
| "loss": 4.9283, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 6.419999999999999e-05, |
| "loss": 4.9677, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 6.479999999999999e-05, |
| "loss": 4.7931, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 6.539999999999999e-05, |
| "loss": 4.825, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 6.599999999999999e-05, |
| "loss": 4.9354, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 6.659999999999999e-05, |
| "loss": 4.9477, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 6.72e-05, |
| "loss": 4.8924, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 6.78e-05, |
| "loss": 5.1611, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 6.84e-05, |
| "loss": 4.9055, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 6.9e-05, |
| "loss": 4.7754, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 6.96e-05, |
| "loss": 4.8363, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 7.02e-05, |
| "loss": 4.956, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 7.079999999999999e-05, |
| "loss": 4.827, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 7.139999999999999e-05, |
| "loss": 4.9409, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 7.199999999999999e-05, |
| "loss": 4.8033, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 7.259999999999999e-05, |
| "loss": 4.9295, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 7.319999999999999e-05, |
| "loss": 4.8662, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 7.379999999999999e-05, |
| "loss": 4.8764, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 7.439999999999999e-05, |
| "loss": 4.7186, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 7.5e-05, |
| "loss": 4.8272, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 7.56e-05, |
| "loss": 4.9537, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 7.62e-05, |
| "loss": 4.7755, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 7.68e-05, |
| "loss": 4.9047, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 7.74e-05, |
| "loss": 4.9293, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 7.8e-05, |
| "loss": 4.6925, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 7.86e-05, |
| "loss": 4.8088, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 7.92e-05, |
| "loss": 4.8417, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 7.98e-05, |
| "loss": 4.9157, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 8.04e-05, |
| "loss": 4.7353, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 8.1e-05, |
| "loss": 4.7543, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 8.16e-05, |
| "loss": 4.865, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 8.22e-05, |
| "loss": 4.6149, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 8.28e-05, |
| "loss": 4.9074, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 8.34e-05, |
| "loss": 4.9443, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 8.4e-05, |
| "loss": 4.7991, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 8.459999999999998e-05, |
| "loss": 4.9514, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 8.519999999999998e-05, |
| "loss": 4.7717, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 8.579999999999998e-05, |
| "loss": 4.7741, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 8.639999999999999e-05, |
| "loss": 4.6585, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 8.699999999999999e-05, |
| "loss": 4.8158, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 8.759999999999999e-05, |
| "loss": 4.8793, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 8.819999999999999e-05, |
| "loss": 4.6931, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 8.879999999999999e-05, |
| "loss": 4.7524, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 8.939999999999999e-05, |
| "loss": 4.9697, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 8.999999999999999e-05, |
| "loss": 4.7365, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 9.059999999999999e-05, |
| "loss": 4.6159, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 9.12e-05, |
| "loss": 4.7704, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 9.18e-05, |
| "loss": 4.8444, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 9.24e-05, |
| "loss": 4.8326, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 9.3e-05, |
| "loss": 4.7848, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 9.36e-05, |
| "loss": 4.6493, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 9.419999999999999e-05, |
| "loss": 4.7066, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 9.479999999999999e-05, |
| "loss": 4.6681, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 9.539999999999999e-05, |
| "loss": 4.7274, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 9.599999999999999e-05, |
| "loss": 4.8622, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 9.659999999999999e-05, |
| "loss": 4.5877, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 9.719999999999999e-05, |
| "loss": 4.7318, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 9.779999999999999e-05, |
| "loss": 4.9793, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 9.839999999999999e-05, |
| "loss": 4.6915, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 9.9e-05, |
| "loss": 4.68, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 9.96e-05, |
| "loss": 4.7438, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.0001002, |
| "loss": 4.8424, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.0001008, |
| "loss": 4.7419, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.0001014, |
| "loss": 4.7248, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.000102, |
| "loss": 4.6635, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.0001026, |
| "loss": 4.7799, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00010319999999999999, |
| "loss": 4.812, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00010379999999999999, |
| "loss": 4.6821, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00010439999999999999, |
| "loss": 4.8236, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00010499999999999999, |
| "loss": 4.7645, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00010559999999999998, |
| "loss": 4.7679, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00010619999999999998, |
| "loss": 4.7112, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00010679999999999998, |
| "loss": 4.7586, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00010739999999999998, |
| "loss": 4.9391, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00010799999999999998, |
| "loss": 4.7763, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00010859999999999998, |
| "loss": 4.7955, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00010919999999999998, |
| "loss": 4.7554, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00010979999999999999, |
| "loss": 4.5645, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00011039999999999999, |
| "loss": 4.6836, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00011099999999999999, |
| "loss": 4.6808, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00011159999999999999, |
| "loss": 4.7833, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00011219999999999999, |
| "loss": 4.7403, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00011279999999999999, |
| "loss": 4.5388, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00011339999999999999, |
| "loss": 4.715, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00011399999999999999, |
| "loss": 4.7899, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.0001146, |
| "loss": 4.7792, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.0001152, |
| "loss": 4.6869, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.0001158, |
| "loss": 4.6582, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.0001164, |
| "loss": 4.6555, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.000117, |
| "loss": 4.8133, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.0001176, |
| "loss": 4.7527, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.0001182, |
| "loss": 4.6774, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.0001188, |
| "loss": 4.6489, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.0001194, |
| "loss": 4.7002, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00011999999999999999, |
| "loss": 4.6374, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00012059999999999999, |
| "loss": 4.7362, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00012119999999999999, |
| "loss": 4.7831, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00012179999999999999, |
| "loss": 4.7987, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.0001224, |
| "loss": 4.895, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00012299999999999998, |
| "loss": 4.659, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.0001236, |
| "loss": 4.689, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00012419999999999998, |
| "loss": 4.7106, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00012479999999999997, |
| "loss": 4.8498, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00012539999999999999, |
| "loss": 4.6768, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00012599999999999997, |
| "loss": 4.7751, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.0001266, |
| "loss": 4.749, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00012719999999999997, |
| "loss": 4.6704, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.0001278, |
| "loss": 4.8314, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00012839999999999998, |
| "loss": 4.6156, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.000129, |
| "loss": 4.7307, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00012959999999999998, |
| "loss": 4.683, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.0001302, |
| "loss": 4.6377, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00013079999999999998, |
| "loss": 4.6931, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.0001314, |
| "loss": 4.7502, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00013199999999999998, |
| "loss": 4.5353, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.0001326, |
| "loss": 4.6527, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00013319999999999999, |
| "loss": 4.4937, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.0001338, |
| "loss": 4.5865, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.0001344, |
| "loss": 4.5845, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.000135, |
| "loss": 4.63, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.0001356, |
| "loss": 4.6471, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.0001362, |
| "loss": 4.4935, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.0001368, |
| "loss": 4.6369, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.0001374, |
| "loss": 4.6642, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.000138, |
| "loss": 4.5825, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.0001386, |
| "loss": 4.6848, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.0001392, |
| "loss": 4.6064, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00013979999999999998, |
| "loss": 4.7473, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.0001404, |
| "loss": 4.5383, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00014099999999999998, |
| "loss": 4.7213, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00014159999999999997, |
| "loss": 4.6893, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.0001422, |
| "loss": 4.6338, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00014279999999999997, |
| "loss": 4.6192, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.0001434, |
| "loss": 4.6615, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00014399999999999998, |
| "loss": 4.6148, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.0001446, |
| "loss": 4.7811, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00014519999999999998, |
| "loss": 4.6101, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.0001458, |
| "loss": 4.7264, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00014639999999999998, |
| "loss": 4.4822, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.000147, |
| "loss": 4.6788, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00014759999999999998, |
| "loss": 4.6348, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.0001482, |
| "loss": 4.5415, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00014879999999999998, |
| "loss": 4.7111, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.0001494, |
| "loss": 4.7448, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00015, |
| "loss": 4.712, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00015059999999999997, |
| "loss": 4.6759, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.0001512, |
| "loss": 4.5578, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00015179999999999998, |
| "loss": 4.6506, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.0001524, |
| "loss": 4.6107, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00015299999999999998, |
| "loss": 4.563, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.0001536, |
| "loss": 4.7158, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00015419999999999998, |
| "loss": 4.719, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.0001548, |
| "loss": 4.6998, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00015539999999999998, |
| "loss": 4.5441, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.000156, |
| "loss": 4.6558, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 0.00015659999999999998, |
| "loss": 4.5673, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 0.0001572, |
| "loss": 4.541, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 0.0001578, |
| "loss": 4.5867, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 0.0001584, |
| "loss": 4.7094, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 0.000159, |
| "loss": 4.5704, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 0.0001596, |
| "loss": 4.551, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 0.0001602, |
| "loss": 4.657, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 0.0001608, |
| "loss": 4.5553, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 0.0001614, |
| "loss": 4.7104, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 0.000162, |
| "loss": 4.6263, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 0.0001626, |
| "loss": 4.5731, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 0.0001632, |
| "loss": 4.5898, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 0.0001638, |
| "loss": 4.6458, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 0.0001644, |
| "loss": 4.5407, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 0.000165, |
| "loss": 4.598, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 0.0001656, |
| "loss": 4.7309, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 0.0001662, |
| "loss": 4.5592, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 0.0001668, |
| "loss": 4.8032, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 0.0001674, |
| "loss": 4.5494, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 0.000168, |
| "loss": 4.5496, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 0.0001686, |
| "loss": 4.6866, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 0.00016919999999999997, |
| "loss": 4.6359, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 0.00016979999999999998, |
| "loss": 4.6596, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 0.00017039999999999997, |
| "loss": 4.5546, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 0.00017099999999999998, |
| "loss": 4.6861, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 0.00017159999999999997, |
| "loss": 4.5227, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 0.00017219999999999998, |
| "loss": 4.478, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 0.00017279999999999997, |
| "loss": 4.5898, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 0.00017339999999999996, |
| "loss": 4.7316, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 0.00017399999999999997, |
| "loss": 4.6082, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 0.00017459999999999996, |
| "loss": 4.7225, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 0.00017519999999999998, |
| "loss": 4.6562, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 0.00017579999999999996, |
| "loss": 4.638, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 0.00017639999999999998, |
| "loss": 4.5919, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 0.00017699999999999997, |
| "loss": 4.4868, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 0.00017759999999999998, |
| "loss": 4.6701, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 0.00017819999999999997, |
| "loss": 4.6447, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 0.00017879999999999998, |
| "loss": 4.5421, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 0.00017939999999999997, |
| "loss": 4.5936, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 0.00017999999999999998, |
| "loss": 4.3826, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 0.00018059999999999997, |
| "loss": 4.6613, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 0.00018119999999999999, |
| "loss": 4.5657, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 0.00018179999999999997, |
| "loss": 4.5333, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 0.0001824, |
| "loss": 4.5485, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 0.00018299999999999998, |
| "loss": 4.5868, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 0.0001836, |
| "loss": 4.5932, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 0.00018419999999999998, |
| "loss": 4.572, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 0.0001848, |
| "loss": 4.4496, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 0.00018539999999999998, |
| "loss": 4.5901, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 0.000186, |
| "loss": 4.529, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 0.00018659999999999998, |
| "loss": 4.4892, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 0.0001872, |
| "loss": 4.4649, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 0.00018779999999999998, |
| "loss": 4.5311, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 0.00018839999999999997, |
| "loss": 4.6743, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 0.00018899999999999999, |
| "loss": 4.5358, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 0.00018959999999999997, |
| "loss": 4.6968, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 0.0001902, |
| "loss": 4.5919, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 0.00019079999999999998, |
| "loss": 4.6008, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 0.0001914, |
| "loss": 4.4298, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 0.00019199999999999998, |
| "loss": 4.5898, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 0.0001926, |
| "loss": 4.6175, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 0.00019319999999999998, |
| "loss": 4.8276, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 0.0001938, |
| "loss": 4.5257, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 0.00019439999999999998, |
| "loss": 4.5392, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 0.000195, |
| "loss": 4.4774, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 0.00019559999999999998, |
| "loss": 4.5318, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 0.0001962, |
| "loss": 4.4261, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 0.00019679999999999999, |
| "loss": 4.5244, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 0.0001974, |
| "loss": 4.7178, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 0.000198, |
| "loss": 4.3889, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 0.0001986, |
| "loss": 4.6082, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 0.0001992, |
| "loss": 4.3951, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 0.0001998, |
| "loss": 4.5304, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 0.0002004, |
| "loss": 4.5191, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 0.000201, |
| "loss": 4.4645, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 0.0002016, |
| "loss": 4.5721, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 0.0002022, |
| "loss": 4.5999, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 0.0002028, |
| "loss": 4.5558, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 0.00020339999999999998, |
| "loss": 4.5103, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 0.000204, |
| "loss": 4.3491, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 0.00020459999999999999, |
| "loss": 4.5434, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 0.0002052, |
| "loss": 4.5285, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 0.0002058, |
| "loss": 4.6069, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 0.00020639999999999998, |
| "loss": 4.4765, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 0.00020699999999999996, |
| "loss": 4.4035, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 0.00020759999999999998, |
| "loss": 4.5182, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 0.00020819999999999996, |
| "loss": 4.4212, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 0.00020879999999999998, |
| "loss": 4.4384, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 0.00020939999999999997, |
| "loss": 4.6651, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 0.00020999999999999998, |
| "loss": 4.4562, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 0.00021059999999999997, |
| "loss": 4.5427, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 0.00021119999999999996, |
| "loss": 4.5367, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 0.00021179999999999997, |
| "loss": 4.3515, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 0.00021239999999999996, |
| "loss": 4.2992, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 0.00021299999999999997, |
| "loss": 4.5807, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 0.00021359999999999996, |
| "loss": 4.4651, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 0.00021419999999999998, |
| "loss": 4.4487, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 0.00021479999999999996, |
| "loss": 4.6004, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 0.00021539999999999998, |
| "loss": 4.4618, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 0.00021599999999999996, |
| "loss": 4.3591, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 0.00021659999999999998, |
| "loss": 4.62, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 0.00021719999999999997, |
| "loss": 4.4279, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 0.00021779999999999998, |
| "loss": 4.57, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 0.00021839999999999997, |
| "loss": 4.4838, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 0.00021899999999999998, |
| "loss": 4.4313, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 0.00021959999999999997, |
| "loss": 4.3267, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 0.00022019999999999999, |
| "loss": 4.4464, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 0.00022079999999999997, |
| "loss": 4.4367, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 0.0002214, |
| "loss": 4.4783, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 0.00022199999999999998, |
| "loss": 4.4356, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 0.0002226, |
| "loss": 4.5422, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 0.00022319999999999998, |
| "loss": 4.4684, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 0.0002238, |
| "loss": 4.5642, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 0.00022439999999999998, |
| "loss": 4.5447, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 0.000225, |
| "loss": 4.5311, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 0.00022559999999999998, |
| "loss": 4.4227, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 0.00022619999999999997, |
| "loss": 4.4713, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 0.00022679999999999998, |
| "loss": 4.3636, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 0.00022739999999999997, |
| "loss": 4.4665, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 0.00022799999999999999, |
| "loss": 4.5124, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 0.00022859999999999997, |
| "loss": 4.6566, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 0.0002292, |
| "loss": 4.5193, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 0.00022979999999999997, |
| "loss": 4.4518, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 0.0002304, |
| "loss": 4.3638, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 0.00023099999999999998, |
| "loss": 4.3771, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 0.0002316, |
| "loss": 4.6337, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 0.00023219999999999998, |
| "loss": 4.3424, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 0.0002328, |
| "loss": 4.3622, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 0.00023339999999999998, |
| "loss": 4.5323, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 0.000234, |
| "loss": 4.4386, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 0.00023459999999999998, |
| "loss": 4.3732, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 0.0002352, |
| "loss": 4.5086, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 0.00023579999999999999, |
| "loss": 4.4829, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 0.0002364, |
| "loss": 4.2462, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 0.000237, |
| "loss": 4.5021, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 0.0002376, |
| "loss": 4.4215, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 0.0002382, |
| "loss": 4.4807, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 0.0002388, |
| "loss": 4.3731, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 0.0002394, |
| "loss": 4.471, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 0.00023999999999999998, |
| "loss": 4.5482, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 0.0002406, |
| "loss": 4.4718, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 0.00024119999999999998, |
| "loss": 4.3973, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 0.0002418, |
| "loss": 4.4493, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 0.00024239999999999998, |
| "loss": 4.4752, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 0.000243, |
| "loss": 4.4941, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 0.00024359999999999999, |
| "loss": 4.3308, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 0.00024419999999999997, |
| "loss": 4.3659, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 0.0002448, |
| "loss": 4.2834, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 0.00024539999999999995, |
| "loss": 4.3965, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 0.00024599999999999996, |
| "loss": 4.4057, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 0.0002466, |
| "loss": 4.4674, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 0.0002472, |
| "loss": 4.5238, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 0.00024779999999999995, |
| "loss": 4.3888, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 0.00024839999999999997, |
| "loss": 4.4525, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 0.000249, |
| "loss": 4.3695, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 0.00024959999999999994, |
| "loss": 4.1539, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 0.00025019999999999996, |
| "loss": 4.4721, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 0.00025079999999999997, |
| "loss": 4.3794, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 0.0002514, |
| "loss": 4.5437, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 0.00025199999999999995, |
| "loss": 4.5417, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 0.00025259999999999996, |
| "loss": 4.4576, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 0.0002532, |
| "loss": 4.3823, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 0.0002538, |
| "loss": 4.484, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 0.00025439999999999995, |
| "loss": 4.5069, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 0.00025499999999999996, |
| "loss": 4.4847, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 0.0002556, |
| "loss": 4.4307, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 0.0002562, |
| "loss": 4.512, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 0.00025679999999999995, |
| "loss": 4.546, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 0.00025739999999999997, |
| "loss": 4.4074, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 0.000258, |
| "loss": 4.4586, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 0.0002586, |
| "loss": 4.454, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 0.00025919999999999996, |
| "loss": 4.2716, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 0.00025979999999999997, |
| "loss": 4.3644, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 0.0002604, |
| "loss": 4.4493, |
| "step": 443 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 0.000261, |
| "loss": 4.34, |
| "step": 444 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 0.00026159999999999996, |
| "loss": 4.3848, |
| "step": 445 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 0.0002622, |
| "loss": 4.3646, |
| "step": 446 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 0.0002628, |
| "loss": 4.1848, |
| "step": 447 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 0.00026339999999999995, |
| "loss": 4.2035, |
| "step": 448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 0.00026399999999999997, |
| "loss": 4.2421, |
| "step": 449 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 0.0002646, |
| "loss": 4.0831, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 0.0002652, |
| "loss": 4.2555, |
| "step": 451 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 0.00026579999999999996, |
| "loss": 3.9589, |
| "step": 452 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 0.00026639999999999997, |
| "loss": 4.2033, |
| "step": 453 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 0.000267, |
| "loss": 4.1657, |
| "step": 454 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 0.0002676, |
| "loss": 4.039, |
| "step": 455 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 0.00026819999999999996, |
| "loss": 3.9653, |
| "step": 456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 0.0002688, |
| "loss": 4.2257, |
| "step": 457 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 0.0002694, |
| "loss": 4.0318, |
| "step": 458 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 0.00027, |
| "loss": 4.1317, |
| "step": 459 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 0.00027059999999999996, |
| "loss": 4.2431, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 0.0002712, |
| "loss": 4.1387, |
| "step": 461 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 0.0002718, |
| "loss": 4.1351, |
| "step": 462 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 0.0002724, |
| "loss": 4.2222, |
| "step": 463 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 0.00027299999999999997, |
| "loss": 4.1966, |
| "step": 464 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 0.0002736, |
| "loss": 4.1039, |
| "step": 465 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 0.0002742, |
| "loss": 4.2265, |
| "step": 466 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 0.0002748, |
| "loss": 4.1175, |
| "step": 467 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 0.00027539999999999997, |
| "loss": 3.8776, |
| "step": 468 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 0.000276, |
| "loss": 4.211, |
| "step": 469 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 0.0002766, |
| "loss": 4.103, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 0.0002772, |
| "loss": 3.8837, |
| "step": 471 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 0.0002778, |
| "loss": 4.3084, |
| "step": 472 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 0.0002784, |
| "loss": 4.1486, |
| "step": 473 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 0.000279, |
| "loss": 4.0692, |
| "step": 474 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 0.00027959999999999997, |
| "loss": 4.1777, |
| "step": 475 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 0.0002802, |
| "loss": 4.3202, |
| "step": 476 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 0.0002808, |
| "loss": 4.0694, |
| "step": 477 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 0.00028139999999999996, |
| "loss": 4.0776, |
| "step": 478 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 0.00028199999999999997, |
| "loss": 4.114, |
| "step": 479 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 0.0002826, |
| "loss": 4.0373, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 0.00028319999999999994, |
| "loss": 4.0654, |
| "step": 481 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 0.00028379999999999996, |
| "loss": 4.2251, |
| "step": 482 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 0.0002844, |
| "loss": 4.0701, |
| "step": 483 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 0.000285, |
| "loss": 4.0162, |
| "step": 484 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 0.00028559999999999995, |
| "loss": 4.2459, |
| "step": 485 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 0.00028619999999999996, |
| "loss": 4.228, |
| "step": 486 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 0.0002868, |
| "loss": 4.0956, |
| "step": 487 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 0.00028739999999999994, |
| "loss": 4.3432, |
| "step": 488 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 0.00028799999999999995, |
| "loss": 4.411, |
| "step": 489 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 0.00028859999999999997, |
| "loss": 4.0846, |
| "step": 490 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 0.0002892, |
| "loss": 4.1448, |
| "step": 491 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 0.00028979999999999994, |
| "loss": 4.1575, |
| "step": 492 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 0.00029039999999999996, |
| "loss": 4.0333, |
| "step": 493 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 0.00029099999999999997, |
| "loss": 4.2394, |
| "step": 494 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 0.0002916, |
| "loss": 3.9672, |
| "step": 495 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 0.00029219999999999995, |
| "loss": 3.8441, |
| "step": 496 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 0.00029279999999999996, |
| "loss": 4.2714, |
| "step": 497 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 0.0002934, |
| "loss": 4.2313, |
| "step": 498 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 0.000294, |
| "loss": 3.9759, |
| "step": 499 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 0.00029459999999999995, |
| "loss": 4.1983, |
| "step": 500 |
| }, |
| { |
| "epoch": 1.12, |
| "eval_loss": 4.117344856262207, |
| "eval_runtime": 108.5789, |
| "eval_samples_per_second": 24.333, |
| "eval_steps_per_second": 0.764, |
| "eval_wer": 0.8989488298294328, |
| "step": 500 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 0.00029519999999999997, |
| "loss": 4.1306, |
| "step": 501 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 0.0002958, |
| "loss": 4.0293, |
| "step": 502 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 0.0002964, |
| "loss": 4.0274, |
| "step": 503 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 0.00029699999999999996, |
| "loss": 4.1381, |
| "step": 504 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 0.00029759999999999997, |
| "loss": 4.1041, |
| "step": 505 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 0.0002982, |
| "loss": 3.946, |
| "step": 506 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 0.0002988, |
| "loss": 4.2828, |
| "step": 507 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 0.00029939999999999996, |
| "loss": 3.9268, |
| "step": 508 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 0.0003, |
| "loss": 4.1165, |
| "step": 509 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 0.00029982658959537567, |
| "loss": 4.2132, |
| "step": 510 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 0.0002996531791907514, |
| "loss": 4.1032, |
| "step": 511 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 0.00029947976878612716, |
| "loss": 4.1322, |
| "step": 512 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 0.00029930635838150286, |
| "loss": 4.2382, |
| "step": 513 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 0.0002991329479768786, |
| "loss": 4.0271, |
| "step": 514 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 0.0002989595375722543, |
| "loss": 4.1216, |
| "step": 515 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 0.00029878612716763005, |
| "loss": 4.2394, |
| "step": 516 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 0.00029861271676300574, |
| "loss": 4.2119, |
| "step": 517 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 0.0002984393063583815, |
| "loss": 4.0242, |
| "step": 518 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 0.0002982658959537572, |
| "loss": 4.1542, |
| "step": 519 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 0.00029809248554913293, |
| "loss": 4.0748, |
| "step": 520 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 0.0002979190751445086, |
| "loss": 4.0789, |
| "step": 521 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 0.00029774566473988437, |
| "loss": 4.3327, |
| "step": 522 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 0.00029757225433526006, |
| "loss": 4.1015, |
| "step": 523 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 0.0002973988439306358, |
| "loss": 4.059, |
| "step": 524 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 0.00029722543352601156, |
| "loss": 4.0931, |
| "step": 525 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 0.00029705202312138725, |
| "loss": 4.0885, |
| "step": 526 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 0.00029687861271676295, |
| "loss": 4.0645, |
| "step": 527 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 0.0002967052023121387, |
| "loss": 4.2272, |
| "step": 528 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 0.00029653179190751444, |
| "loss": 4.1368, |
| "step": 529 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 0.00029635838150289014, |
| "loss": 4.1602, |
| "step": 530 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 0.00029618497109826583, |
| "loss": 4.2267, |
| "step": 531 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 0.0002960115606936416, |
| "loss": 4.157, |
| "step": 532 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 0.0002958381502890173, |
| "loss": 4.0771, |
| "step": 533 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 0.000295664739884393, |
| "loss": 4.1109, |
| "step": 534 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 0.00029549132947976877, |
| "loss": 4.2845, |
| "step": 535 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 0.0002953179190751445, |
| "loss": 4.1589, |
| "step": 536 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 0.0002951445086705202, |
| "loss": 4.0772, |
| "step": 537 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 0.0002949710982658959, |
| "loss": 4.129, |
| "step": 538 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 0.00029479768786127165, |
| "loss": 4.2688, |
| "step": 539 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 0.0002946242774566474, |
| "loss": 4.1127, |
| "step": 540 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 0.0002944508670520231, |
| "loss": 4.2202, |
| "step": 541 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 0.00029427745664739884, |
| "loss": 4.0726, |
| "step": 542 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 0.00029410404624277453, |
| "loss": 3.9405, |
| "step": 543 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 0.0002939306358381503, |
| "loss": 4.1047, |
| "step": 544 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 0.00029375722543352597, |
| "loss": 4.198, |
| "step": 545 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 0.0002935838150289017, |
| "loss": 4.1176, |
| "step": 546 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 0.0002934104046242774, |
| "loss": 4.255, |
| "step": 547 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 0.00029323699421965316, |
| "loss": 4.1196, |
| "step": 548 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 0.00029306358381502886, |
| "loss": 3.9692, |
| "step": 549 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 0.0002928901734104046, |
| "loss": 4.1252, |
| "step": 550 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 0.0002927167630057803, |
| "loss": 4.1366, |
| "step": 551 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 0.00029254335260115604, |
| "loss": 4.0874, |
| "step": 552 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 0.0002923699421965318, |
| "loss": 4.045, |
| "step": 553 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 0.0002921965317919075, |
| "loss": 4.0562, |
| "step": 554 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 0.0002920231213872832, |
| "loss": 3.9972, |
| "step": 555 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 0.0002918497109826589, |
| "loss": 3.9458, |
| "step": 556 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 0.0002916763005780347, |
| "loss": 4.0199, |
| "step": 557 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 0.00029150289017341037, |
| "loss": 4.0637, |
| "step": 558 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 0.0002913294797687861, |
| "loss": 4.1677, |
| "step": 559 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 0.00029115606936416186, |
| "loss": 4.1027, |
| "step": 560 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 0.00029098265895953756, |
| "loss": 4.0931, |
| "step": 561 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 0.00029080924855491325, |
| "loss": 3.9447, |
| "step": 562 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 0.000290635838150289, |
| "loss": 4.1879, |
| "step": 563 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 0.00029046242774566475, |
| "loss": 4.1418, |
| "step": 564 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 0.00029028901734104044, |
| "loss": 3.9882, |
| "step": 565 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 0.00029011560693641613, |
| "loss": 4.1112, |
| "step": 566 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 0.0002899421965317919, |
| "loss": 4.0749, |
| "step": 567 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 0.00028976878612716763, |
| "loss": 4.0196, |
| "step": 568 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 0.0002895953757225433, |
| "loss": 4.1057, |
| "step": 569 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 0.00028942196531791907, |
| "loss": 4.0591, |
| "step": 570 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 0.00028924855491329476, |
| "loss": 3.892, |
| "step": 571 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 0.0002890751445086705, |
| "loss": 4.2725, |
| "step": 572 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 0.0002889017341040462, |
| "loss": 4.1078, |
| "step": 573 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 0.00028872832369942195, |
| "loss": 4.1539, |
| "step": 574 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 0.00028855491329479765, |
| "loss": 4.204, |
| "step": 575 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 0.0002883815028901734, |
| "loss": 4.161, |
| "step": 576 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 0.0002882080924855491, |
| "loss": 3.9632, |
| "step": 577 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 0.00028803468208092484, |
| "loss": 4.1234, |
| "step": 578 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 0.00028786127167630053, |
| "loss": 4.1015, |
| "step": 579 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 0.0002876878612716763, |
| "loss": 3.9423, |
| "step": 580 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 0.000287514450867052, |
| "loss": 3.9992, |
| "step": 581 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 0.0002873410404624277, |
| "loss": 4.0312, |
| "step": 582 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 0.0002871676300578034, |
| "loss": 3.9948, |
| "step": 583 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 0.00028699421965317916, |
| "loss": 4.0072, |
| "step": 584 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 0.0002868208092485549, |
| "loss": 4.1821, |
| "step": 585 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 0.0002866473988439306, |
| "loss": 4.1234, |
| "step": 586 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 0.00028647398843930635, |
| "loss": 4.0079, |
| "step": 587 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 0.0002863005780346821, |
| "loss": 4.0491, |
| "step": 588 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 0.0002861271676300578, |
| "loss": 4.1257, |
| "step": 589 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 0.0002859537572254335, |
| "loss": 3.94, |
| "step": 590 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 0.00028578034682080923, |
| "loss": 4.0918, |
| "step": 591 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 0.000285606936416185, |
| "loss": 4.0307, |
| "step": 592 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 0.0002854335260115607, |
| "loss": 3.8526, |
| "step": 593 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 0.00028526011560693637, |
| "loss": 4.1233, |
| "step": 594 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 0.0002850867052023121, |
| "loss": 4.1581, |
| "step": 595 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 0.00028491329479768786, |
| "loss": 3.886, |
| "step": 596 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 0.00028473988439306356, |
| "loss": 4.1113, |
| "step": 597 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 0.0002845664739884393, |
| "loss": 4.1737, |
| "step": 598 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 0.000284393063583815, |
| "loss": 3.8796, |
| "step": 599 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 0.00028421965317919075, |
| "loss": 4.0817, |
| "step": 600 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 0.00028404624277456644, |
| "loss": 4.1763, |
| "step": 601 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 0.0002838728323699422, |
| "loss": 4.0214, |
| "step": 602 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 0.0002836994219653179, |
| "loss": 4.0455, |
| "step": 603 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 0.00028352601156069363, |
| "loss": 3.9542, |
| "step": 604 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 0.0002833526011560693, |
| "loss": 3.815, |
| "step": 605 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 0.00028317919075144507, |
| "loss": 4.032, |
| "step": 606 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 0.00028300578034682076, |
| "loss": 4.0578, |
| "step": 607 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 0.0002828323699421965, |
| "loss": 3.9579, |
| "step": 608 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 0.00028265895953757226, |
| "loss": 3.7659, |
| "step": 609 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 0.00028248554913294795, |
| "loss": 4.0211, |
| "step": 610 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 0.00028231213872832365, |
| "loss": 3.8637, |
| "step": 611 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 0.0002821387283236994, |
| "loss": 3.8087, |
| "step": 612 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 0.00028196531791907514, |
| "loss": 3.9564, |
| "step": 613 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 0.00028179190751445083, |
| "loss": 3.9331, |
| "step": 614 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 0.0002816184971098266, |
| "loss": 3.7604, |
| "step": 615 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 0.0002814450867052023, |
| "loss": 4.0589, |
| "step": 616 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 0.000281271676300578, |
| "loss": 3.9125, |
| "step": 617 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 0.0002810982658959537, |
| "loss": 3.7168, |
| "step": 618 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 0.00028092485549132947, |
| "loss": 4.0136, |
| "step": 619 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 0.00028075144508670516, |
| "loss": 3.9194, |
| "step": 620 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 0.0002805780346820809, |
| "loss": 3.7356, |
| "step": 621 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 0.0002804046242774566, |
| "loss": 3.982, |
| "step": 622 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 0.00028023121387283235, |
| "loss": 4.031, |
| "step": 623 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 0.00028005780346820804, |
| "loss": 3.8451, |
| "step": 624 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 0.0002798843930635838, |
| "loss": 4.1838, |
| "step": 625 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 0.00027971098265895954, |
| "loss": 4.0628, |
| "step": 626 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 0.00027953757225433523, |
| "loss": 3.8254, |
| "step": 627 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 0.0002793641618497109, |
| "loss": 4.1763, |
| "step": 628 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 0.00027919075144508667, |
| "loss": 4.0991, |
| "step": 629 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 0.0002790173410404624, |
| "loss": 4.0244, |
| "step": 630 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 0.0002788439306358381, |
| "loss": 3.8255, |
| "step": 631 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 0.00027867052023121386, |
| "loss": 4.0771, |
| "step": 632 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 0.00027849710982658955, |
| "loss": 3.9336, |
| "step": 633 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 0.0002783236994219653, |
| "loss": 3.7912, |
| "step": 634 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 0.000278150289017341, |
| "loss": 4.0004, |
| "step": 635 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 0.00027797687861271674, |
| "loss": 3.9155, |
| "step": 636 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 0.0002778034682080925, |
| "loss": 3.7318, |
| "step": 637 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 0.0002776300578034682, |
| "loss": 3.8857, |
| "step": 638 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 0.0002774566473988439, |
| "loss": 3.903, |
| "step": 639 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 0.0002772832369942196, |
| "loss": 3.8255, |
| "step": 640 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 0.0002771098265895954, |
| "loss": 3.9039, |
| "step": 641 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 0.00027693641618497107, |
| "loss": 3.9036, |
| "step": 642 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 0.0002767630057803468, |
| "loss": 3.8427, |
| "step": 643 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 0.0002765895953757225, |
| "loss": 3.8774, |
| "step": 644 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 0.00027641618497109826, |
| "loss": 3.8732, |
| "step": 645 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 0.00027624277456647395, |
| "loss": 3.4868, |
| "step": 646 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 0.0002760693641618497, |
| "loss": 3.9736, |
| "step": 647 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 0.0002758959537572254, |
| "loss": 3.8157, |
| "step": 648 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 0.00027572254335260114, |
| "loss": 3.4958, |
| "step": 649 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 0.00027554913294797683, |
| "loss": 3.8825, |
| "step": 650 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 0.0002753757225433526, |
| "loss": 3.7931, |
| "step": 651 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 0.0002752023121387283, |
| "loss": 3.6236, |
| "step": 652 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 0.000275028901734104, |
| "loss": 3.8066, |
| "step": 653 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 0.00027485549132947977, |
| "loss": 3.884, |
| "step": 654 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 0.00027468208092485546, |
| "loss": 3.6747, |
| "step": 655 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 0.00027450867052023116, |
| "loss": 3.8094, |
| "step": 656 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 0.0002743352601156069, |
| "loss": 3.8162, |
| "step": 657 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 0.00027416184971098265, |
| "loss": 3.8007, |
| "step": 658 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 0.00027398843930635835, |
| "loss": 3.5526, |
| "step": 659 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 0.0002738150289017341, |
| "loss": 3.8138, |
| "step": 660 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 0.00027364161849710984, |
| "loss": 3.7085, |
| "step": 661 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 0.00027346820809248554, |
| "loss": 3.5649, |
| "step": 662 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 0.00027329479768786123, |
| "loss": 3.9117, |
| "step": 663 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 0.000273121387283237, |
| "loss": 3.6392, |
| "step": 664 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 0.0002729479768786127, |
| "loss": 3.4557, |
| "step": 665 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 0.0002727745664739884, |
| "loss": 3.8354, |
| "step": 666 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 0.0002726011560693641, |
| "loss": 3.7573, |
| "step": 667 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 0.00027242774566473986, |
| "loss": 3.4499, |
| "step": 668 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 0.0002722543352601156, |
| "loss": 3.8155, |
| "step": 669 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 0.0002720809248554913, |
| "loss": 3.8723, |
| "step": 670 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 0.00027190751445086705, |
| "loss": 3.4205, |
| "step": 671 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 0.00027173410404624274, |
| "loss": 3.7678, |
| "step": 672 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 0.0002715606936416185, |
| "loss": 3.6017, |
| "step": 673 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 0.0002713872832369942, |
| "loss": 3.5447, |
| "step": 674 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 0.00027121387283236993, |
| "loss": 3.7579, |
| "step": 675 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 0.0002710404624277456, |
| "loss": 3.7863, |
| "step": 676 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 0.0002708670520231214, |
| "loss": 3.5526, |
| "step": 677 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 0.00027069364161849707, |
| "loss": 3.7598, |
| "step": 678 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 0.0002705202312138728, |
| "loss": 3.7686, |
| "step": 679 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 0.0002703468208092485, |
| "loss": 3.4443, |
| "step": 680 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 0.00027017341040462426, |
| "loss": 3.6148, |
| "step": 681 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 0.00027, |
| "loss": 3.5095, |
| "step": 682 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 0.0002698265895953757, |
| "loss": 3.4507, |
| "step": 683 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 0.0002696531791907514, |
| "loss": 3.3469, |
| "step": 684 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 0.00026947976878612714, |
| "loss": 3.7119, |
| "step": 685 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 0.0002693063583815029, |
| "loss": 3.479, |
| "step": 686 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 0.0002691329479768786, |
| "loss": 3.4112, |
| "step": 687 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 0.00026895953757225433, |
| "loss": 3.543, |
| "step": 688 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 0.0002687861271676301, |
| "loss": 3.3952, |
| "step": 689 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 0.00026861271676300577, |
| "loss": 3.1037, |
| "step": 690 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 0.00026843930635838146, |
| "loss": 3.5882, |
| "step": 691 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 0.0002682658959537572, |
| "loss": 3.4422, |
| "step": 692 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 0.00026809248554913296, |
| "loss": 3.1499, |
| "step": 693 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 0.00026791907514450865, |
| "loss": 3.54, |
| "step": 694 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 0.00026774566473988435, |
| "loss": 3.4218, |
| "step": 695 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 0.0002675722543352601, |
| "loss": 3.0188, |
| "step": 696 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 0.00026739884393063584, |
| "loss": 3.4146, |
| "step": 697 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 0.00026722543352601153, |
| "loss": 3.2012, |
| "step": 698 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 0.0002670520231213873, |
| "loss": 3.1274, |
| "step": 699 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 0.000266878612716763, |
| "loss": 3.3585, |
| "step": 700 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 0.0002667052023121387, |
| "loss": 3.2497, |
| "step": 701 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 0.0002665317919075144, |
| "loss": 3.0877, |
| "step": 702 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 0.00026635838150289016, |
| "loss": 3.3097, |
| "step": 703 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 0.00026618497109826586, |
| "loss": 3.388, |
| "step": 704 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 0.00026601156069364155, |
| "loss": 3.1284, |
| "step": 705 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 0.0002658381502890173, |
| "loss": 3.2168, |
| "step": 706 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 0.00026566473988439305, |
| "loss": 3.1948, |
| "step": 707 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 0.00026549132947976874, |
| "loss": 3.1013, |
| "step": 708 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 0.0002653179190751445, |
| "loss": 2.9465, |
| "step": 709 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 0.00026514450867052024, |
| "loss": 3.2192, |
| "step": 710 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 0.00026497109826589593, |
| "loss": 3.0746, |
| "step": 711 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 0.0002647976878612716, |
| "loss": 2.7205, |
| "step": 712 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 0.00026462427745664737, |
| "loss": 3.616, |
| "step": 713 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 0.0002644508670520231, |
| "loss": 3.6917, |
| "step": 714 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 0.0002642774566473988, |
| "loss": 3.1044, |
| "step": 715 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 0.00026410404624277456, |
| "loss": 3.5462, |
| "step": 716 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 0.00026393063583815025, |
| "loss": 3.8199, |
| "step": 717 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 0.000263757225433526, |
| "loss": 2.928, |
| "step": 718 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 0.0002635838150289017, |
| "loss": 3.2372, |
| "step": 719 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 0.00026341040462427744, |
| "loss": 3.085, |
| "step": 720 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 0.00026323699421965314, |
| "loss": 3.0018, |
| "step": 721 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 0.0002630635838150289, |
| "loss": 3.2051, |
| "step": 722 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 0.0002628901734104046, |
| "loss": 3.1454, |
| "step": 723 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 0.0002627167630057803, |
| "loss": 2.7251, |
| "step": 724 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 0.000262543352601156, |
| "loss": 3.0659, |
| "step": 725 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 0.00026236994219653177, |
| "loss": 2.9709, |
| "step": 726 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 0.0002621965317919075, |
| "loss": 2.6604, |
| "step": 727 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 0.0002620231213872832, |
| "loss": 3.0403, |
| "step": 728 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 0.0002618497109826589, |
| "loss": 2.8739, |
| "step": 729 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 0.00026167630057803465, |
| "loss": 2.707, |
| "step": 730 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 0.0002615028901734104, |
| "loss": 2.8407, |
| "step": 731 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 0.0002613294797687861, |
| "loss": 2.9271, |
| "step": 732 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 0.00026115606936416184, |
| "loss": 2.8803, |
| "step": 733 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 0.0002609826589595376, |
| "loss": 2.4734, |
| "step": 734 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 0.0002608092485549133, |
| "loss": 2.8294, |
| "step": 735 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 0.000260635838150289, |
| "loss": 2.6224, |
| "step": 736 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 0.0002604624277456647, |
| "loss": 2.72, |
| "step": 737 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 0.00026028901734104047, |
| "loss": 2.7036, |
| "step": 738 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 0.00026011560693641616, |
| "loss": 2.5602, |
| "step": 739 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 0.00025994219653179186, |
| "loss": 2.4948, |
| "step": 740 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 0.0002597687861271676, |
| "loss": 2.8221, |
| "step": 741 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 0.00025959537572254335, |
| "loss": 2.5999, |
| "step": 742 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 0.00025942196531791905, |
| "loss": 2.2671, |
| "step": 743 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 0.0002592485549132948, |
| "loss": 2.5269, |
| "step": 744 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 0.0002590751445086705, |
| "loss": 2.4878, |
| "step": 745 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 0.00025890173410404624, |
| "loss": 2.2018, |
| "step": 746 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 0.00025872832369942193, |
| "loss": 2.4875, |
| "step": 747 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 0.0002585549132947977, |
| "loss": 2.4556, |
| "step": 748 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 0.00025838150289017337, |
| "loss": 2.0188, |
| "step": 749 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 0.0002582080924855491, |
| "loss": 2.4847, |
| "step": 750 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 0.0002580346820809248, |
| "loss": 2.1376, |
| "step": 751 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 0.00025786127167630056, |
| "loss": 2.3571, |
| "step": 752 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 0.00025768786127167625, |
| "loss": 2.6405, |
| "step": 753 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 0.000257514450867052, |
| "loss": 2.2147, |
| "step": 754 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 0.00025734104046242775, |
| "loss": 2.241, |
| "step": 755 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 0.00025716763005780344, |
| "loss": 2.4081, |
| "step": 756 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 0.00025699421965317914, |
| "loss": 2.3612, |
| "step": 757 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 0.0002568208092485549, |
| "loss": 2.2445, |
| "step": 758 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 0.00025664739884393063, |
| "loss": 2.0746, |
| "step": 759 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 0.0002564739884393063, |
| "loss": 2.2209, |
| "step": 760 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 0.00025630057803468207, |
| "loss": 2.2674, |
| "step": 761 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 0.0002561271676300578, |
| "loss": 1.8645, |
| "step": 762 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 0.0002559537572254335, |
| "loss": 2.0392, |
| "step": 763 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 0.0002557803468208092, |
| "loss": 1.8161, |
| "step": 764 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 0.00025560693641618496, |
| "loss": 1.9057, |
| "step": 765 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 0.0002554335260115607, |
| "loss": 2.016, |
| "step": 766 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 0.0002552601156069364, |
| "loss": 2.1905, |
| "step": 767 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 0.0002550867052023121, |
| "loss": 1.9702, |
| "step": 768 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 0.00025491329479768784, |
| "loss": 2.4542, |
| "step": 769 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 0.0002547398843930636, |
| "loss": 2.3284, |
| "step": 770 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 0.0002545664739884393, |
| "loss": 1.7485, |
| "step": 771 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 0.00025439306358381503, |
| "loss": 2.0999, |
| "step": 772 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 0.0002542196531791907, |
| "loss": 1.8097, |
| "step": 773 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 0.00025404624277456647, |
| "loss": 1.8122, |
| "step": 774 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 0.00025387283236994216, |
| "loss": 2.0159, |
| "step": 775 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 0.0002536994219653179, |
| "loss": 1.6755, |
| "step": 776 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 0.0002535260115606936, |
| "loss": 1.5611, |
| "step": 777 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 0.00025335260115606935, |
| "loss": 1.9187, |
| "step": 778 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 0.00025317919075144504, |
| "loss": 1.8663, |
| "step": 779 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 0.0002530057803468208, |
| "loss": 1.6627, |
| "step": 780 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 0.0002528323699421965, |
| "loss": 1.7399, |
| "step": 781 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 0.00025265895953757223, |
| "loss": 1.5797, |
| "step": 782 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 0.000252485549132948, |
| "loss": 1.4683, |
| "step": 783 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 0.0002523121387283237, |
| "loss": 1.4242, |
| "step": 784 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 0.00025213872832369937, |
| "loss": 1.6209, |
| "step": 785 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 0.0002519653179190751, |
| "loss": 1.408, |
| "step": 786 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 0.00025179190751445086, |
| "loss": 1.3823, |
| "step": 787 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 0.00025161849710982656, |
| "loss": 1.5972, |
| "step": 788 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 0.0002514450867052023, |
| "loss": 1.3938, |
| "step": 789 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 0.00025127167630057805, |
| "loss": 1.434, |
| "step": 790 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 0.00025109826589595375, |
| "loss": 1.5528, |
| "step": 791 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 0.00025092485549132944, |
| "loss": 1.2635, |
| "step": 792 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 0.0002507514450867052, |
| "loss": 1.2637, |
| "step": 793 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 0.00025057803468208094, |
| "loss": 1.4849, |
| "step": 794 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 0.00025040462427745663, |
| "loss": 1.445, |
| "step": 795 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 0.0002502312138728323, |
| "loss": 1.6101, |
| "step": 796 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 0.00025005780346820807, |
| "loss": 1.4999, |
| "step": 797 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 0.00024988439306358376, |
| "loss": 1.3681, |
| "step": 798 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 0.0002497109826589595, |
| "loss": 1.3321, |
| "step": 799 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 0.00024953757225433526, |
| "loss": 1.4236, |
| "step": 800 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 0.00024936416184971095, |
| "loss": 1.3819, |
| "step": 801 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 0.00024919075144508665, |
| "loss": 1.1145, |
| "step": 802 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 0.0002490173410404624, |
| "loss": 1.4309, |
| "step": 803 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 0.00024884393063583814, |
| "loss": 1.2536, |
| "step": 804 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 0.00024867052023121384, |
| "loss": 1.1945, |
| "step": 805 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 0.0002484971098265896, |
| "loss": 1.4259, |
| "step": 806 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 0.00024832369942196533, |
| "loss": 1.2401, |
| "step": 807 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 0.000248150289017341, |
| "loss": 1.0895, |
| "step": 808 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 0.0002479768786127167, |
| "loss": 1.1988, |
| "step": 809 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 0.00024780346820809247, |
| "loss": 1.2257, |
| "step": 810 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 0.0002476300578034682, |
| "loss": 1.26, |
| "step": 811 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 0.0002474566473988439, |
| "loss": 1.2901, |
| "step": 812 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 0.0002472832369942196, |
| "loss": 1.2058, |
| "step": 813 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 0.00024710982658959535, |
| "loss": 1.0282, |
| "step": 814 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 0.0002469364161849711, |
| "loss": 1.0176, |
| "step": 815 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 0.0002467630057803468, |
| "loss": 1.2517, |
| "step": 816 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 0.00024658959537572254, |
| "loss": 1.0255, |
| "step": 817 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 0.00024641618497109823, |
| "loss": 1.0586, |
| "step": 818 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 0.000246242774566474, |
| "loss": 1.1981, |
| "step": 819 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 0.0002460693641618497, |
| "loss": 1.0358, |
| "step": 820 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 0.0002458959537572254, |
| "loss": 1.1112, |
| "step": 821 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 0.0002457225433526011, |
| "loss": 1.116, |
| "step": 822 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 0.00024554913294797686, |
| "loss": 0.9539, |
| "step": 823 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 0.00024537572254335256, |
| "loss": 0.9223, |
| "step": 824 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 0.0002452023121387283, |
| "loss": 1.1896, |
| "step": 825 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 0.000245028901734104, |
| "loss": 1.1, |
| "step": 826 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 0.00024485549132947975, |
| "loss": 1.0543, |
| "step": 827 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 0.0002446820809248555, |
| "loss": 1.1655, |
| "step": 828 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 0.0002445086705202312, |
| "loss": 1.1417, |
| "step": 829 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 0.0002443352601156069, |
| "loss": 1.1439, |
| "step": 830 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 0.00024416184971098263, |
| "loss": 1.0787, |
| "step": 831 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 0.00024398843930635838, |
| "loss": 1.1379, |
| "step": 832 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 0.00024381502890173407, |
| "loss": 0.8677, |
| "step": 833 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 0.0002436416184971098, |
| "loss": 0.973, |
| "step": 834 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 0.00024346820809248554, |
| "loss": 0.9722, |
| "step": 835 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 0.00024329479768786126, |
| "loss": 0.9171, |
| "step": 836 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 0.00024312138728323698, |
| "loss": 0.8595, |
| "step": 837 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 0.00024294797687861267, |
| "loss": 0.8969, |
| "step": 838 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 0.00024277456647398842, |
| "loss": 0.8405, |
| "step": 839 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 0.00024260115606936414, |
| "loss": 0.8156, |
| "step": 840 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 0.00024242774566473986, |
| "loss": 0.9272, |
| "step": 841 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 0.00024225433526011558, |
| "loss": 0.9049, |
| "step": 842 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 0.00024208092485549133, |
| "loss": 0.696, |
| "step": 843 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 0.00024190751445086702, |
| "loss": 0.8948, |
| "step": 844 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 0.00024173410404624275, |
| "loss": 0.9013, |
| "step": 845 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 0.00024156069364161847, |
| "loss": 0.811, |
| "step": 846 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 0.0002413872832369942, |
| "loss": 0.8489, |
| "step": 847 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 0.00024121387283236993, |
| "loss": 0.896, |
| "step": 848 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 0.00024104046242774563, |
| "loss": 0.9425, |
| "step": 849 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 0.00024086705202312135, |
| "loss": 0.8923, |
| "step": 850 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 0.0002406936416184971, |
| "loss": 0.9176, |
| "step": 851 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 0.00024052023121387282, |
| "loss": 0.891, |
| "step": 852 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 0.00024034682080924854, |
| "loss": 0.85, |
| "step": 853 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 0.00024017341040462423, |
| "loss": 0.8904, |
| "step": 854 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 0.00023999999999999998, |
| "loss": 1.0049, |
| "step": 855 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 0.0002398265895953757, |
| "loss": 0.8203, |
| "step": 856 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 0.00023965317919075142, |
| "loss": 0.7922, |
| "step": 857 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 0.00023947976878612714, |
| "loss": 0.9131, |
| "step": 858 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 0.0002393063583815029, |
| "loss": 0.7321, |
| "step": 859 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 0.0002391329479768786, |
| "loss": 0.8948, |
| "step": 860 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 0.0002389595375722543, |
| "loss": 0.8581, |
| "step": 861 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 0.00023878612716763002, |
| "loss": 1.013, |
| "step": 862 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 0.00023861271676300577, |
| "loss": 1.0727, |
| "step": 863 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 0.0002384393063583815, |
| "loss": 0.8099, |
| "step": 864 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 0.0002382658959537572, |
| "loss": 0.8571, |
| "step": 865 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 0.0002380924855491329, |
| "loss": 0.7723, |
| "step": 866 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 0.00023791907514450865, |
| "loss": 0.9566, |
| "step": 867 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 0.00023774566473988437, |
| "loss": 0.8759, |
| "step": 868 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 0.0002375722543352601, |
| "loss": 0.8936, |
| "step": 869 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 0.00023739884393063582, |
| "loss": 0.7426, |
| "step": 870 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 0.00023722543352601156, |
| "loss": 0.6689, |
| "step": 871 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 0.00023705202312138726, |
| "loss": 0.774, |
| "step": 872 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 0.00023687861271676298, |
| "loss": 0.6769, |
| "step": 873 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 0.0002367052023121387, |
| "loss": 0.829, |
| "step": 874 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 0.00023653179190751445, |
| "loss": 0.7491, |
| "step": 875 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 0.00023635838150289017, |
| "loss": 0.7658, |
| "step": 876 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 0.00023618497109826586, |
| "loss": 0.7367, |
| "step": 877 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 0.00023601156069364158, |
| "loss": 0.7534, |
| "step": 878 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 0.00023583815028901733, |
| "loss": 0.762, |
| "step": 879 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 0.00023566473988439305, |
| "loss": 0.6712, |
| "step": 880 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 0.00023549132947976877, |
| "loss": 0.6542, |
| "step": 881 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 0.0002353179190751445, |
| "loss": 0.7995, |
| "step": 882 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 0.00023514450867052024, |
| "loss": 0.7966, |
| "step": 883 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 0.00023497109826589593, |
| "loss": 0.8174, |
| "step": 884 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 0.00023479768786127165, |
| "loss": 0.7297, |
| "step": 885 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 0.00023462427745664737, |
| "loss": 0.8345, |
| "step": 886 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 0.00023445086705202312, |
| "loss": 0.7886, |
| "step": 887 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 0.00023427745664739884, |
| "loss": 0.7566, |
| "step": 888 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 0.00023410404624277454, |
| "loss": 0.6875, |
| "step": 889 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 0.00023393063583815026, |
| "loss": 0.7308, |
| "step": 890 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 0.000233757225433526, |
| "loss": 0.6679, |
| "step": 891 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 0.00023358381502890173, |
| "loss": 0.7435, |
| "step": 892 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 0.00023341040462427745, |
| "loss": 0.6684, |
| "step": 893 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 0.00023323699421965314, |
| "loss": 0.5609, |
| "step": 894 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 0.00023306358381502886, |
| "loss": 0.523, |
| "step": 895 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 0.0002328901734104046, |
| "loss": 0.5643, |
| "step": 896 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 0.00023271676300578033, |
| "loss": 0.4925, |
| "step": 897 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 0.00023254335260115605, |
| "loss": 0.6966, |
| "step": 898 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 0.00023236994219653174, |
| "loss": 0.5729, |
| "step": 899 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 0.0002321965317919075, |
| "loss": 0.613, |
| "step": 900 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 0.0002320231213872832, |
| "loss": 0.4447, |
| "step": 901 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 0.00023184971098265893, |
| "loss": 0.4807, |
| "step": 902 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 0.00023167630057803465, |
| "loss": 0.6497, |
| "step": 903 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 0.0002315028901734104, |
| "loss": 0.7836, |
| "step": 904 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 0.00023132947976878612, |
| "loss": 0.5778, |
| "step": 905 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 0.00023115606936416181, |
| "loss": 0.564, |
| "step": 906 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 0.00023098265895953754, |
| "loss": 0.5799, |
| "step": 907 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 0.00023080924855491328, |
| "loss": 0.544, |
| "step": 908 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 0.000230635838150289, |
| "loss": 0.5315, |
| "step": 909 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 0.00023046242774566472, |
| "loss": 0.4631, |
| "step": 910 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 0.00023028901734104042, |
| "loss": 0.4721, |
| "step": 911 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 0.00023011560693641617, |
| "loss": 0.548, |
| "step": 912 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 0.0002299421965317919, |
| "loss": 0.6804, |
| "step": 913 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 0.0002297687861271676, |
| "loss": 0.5421, |
| "step": 914 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 0.00022959537572254333, |
| "loss": 0.518, |
| "step": 915 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 0.00022942196531791908, |
| "loss": 0.6228, |
| "step": 916 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 0.00022924855491329477, |
| "loss": 0.5709, |
| "step": 917 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 0.0002290751445086705, |
| "loss": 0.6619, |
| "step": 918 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 0.0002289017341040462, |
| "loss": 0.7196, |
| "step": 919 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 0.00022872832369942196, |
| "loss": 0.5508, |
| "step": 920 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 0.00022855491329479768, |
| "loss": 0.6211, |
| "step": 921 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 0.00022838150289017337, |
| "loss": 0.5181, |
| "step": 922 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 0.0002282080924855491, |
| "loss": 0.5331, |
| "step": 923 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 0.00022803468208092484, |
| "loss": 0.6462, |
| "step": 924 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 0.00022786127167630056, |
| "loss": 0.5711, |
| "step": 925 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 0.00022768786127167628, |
| "loss": 0.4471, |
| "step": 926 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 0.00022751445086705198, |
| "loss": 0.4737, |
| "step": 927 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 0.00022734104046242772, |
| "loss": 0.5854, |
| "step": 928 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 0.00022716763005780344, |
| "loss": 0.5274, |
| "step": 929 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 0.00022699421965317917, |
| "loss": 0.6685, |
| "step": 930 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 0.00022682080924855489, |
| "loss": 0.7175, |
| "step": 931 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 0.00022664739884393063, |
| "loss": 0.543, |
| "step": 932 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 0.00022647398843930635, |
| "loss": 0.5324, |
| "step": 933 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 0.00022630057803468205, |
| "loss": 0.5556, |
| "step": 934 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 0.00022612716763005777, |
| "loss": 0.6539, |
| "step": 935 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 0.00022595375722543352, |
| "loss": 0.5233, |
| "step": 936 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 0.00022578034682080924, |
| "loss": 0.5143, |
| "step": 937 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 0.00022560693641618496, |
| "loss": 0.4524, |
| "step": 938 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 0.00022543352601156065, |
| "loss": 0.4665, |
| "step": 939 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 0.0002252601156069364, |
| "loss": 0.4841, |
| "step": 940 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 0.00022508670520231212, |
| "loss": 0.6232, |
| "step": 941 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 0.00022491329479768784, |
| "loss": 0.4048, |
| "step": 942 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 0.00022473988439306356, |
| "loss": 0.4909, |
| "step": 943 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 0.0002245664739884393, |
| "loss": 0.4686, |
| "step": 944 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 0.000224393063583815, |
| "loss": 0.3291, |
| "step": 945 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 0.00022421965317919072, |
| "loss": 0.5089, |
| "step": 946 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 0.00022404624277456644, |
| "loss": 0.4787, |
| "step": 947 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 0.0002238728323699422, |
| "loss": 0.4379, |
| "step": 948 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 0.0002236994219653179, |
| "loss": 0.4958, |
| "step": 949 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 0.0002235260115606936, |
| "loss": 0.4476, |
| "step": 950 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 0.00022335260115606933, |
| "loss": 0.487, |
| "step": 951 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 0.00022317919075144507, |
| "loss": 0.5853, |
| "step": 952 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 0.0002230057803468208, |
| "loss": 0.505, |
| "step": 953 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 0.00022283236994219652, |
| "loss": 0.5515, |
| "step": 954 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 0.00022265895953757224, |
| "loss": 0.501, |
| "step": 955 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 0.00022248554913294798, |
| "loss": 0.5601, |
| "step": 956 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 0.00022231213872832368, |
| "loss": 0.3959, |
| "step": 957 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 0.0002221387283236994, |
| "loss": 0.4508, |
| "step": 958 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 0.00022196531791907512, |
| "loss": 0.4251, |
| "step": 959 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 0.00022179190751445087, |
| "loss": 0.4383, |
| "step": 960 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 0.0002216184971098266, |
| "loss": 0.4972, |
| "step": 961 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 0.00022144508670520228, |
| "loss": 0.5455, |
| "step": 962 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 0.000221271676300578, |
| "loss": 0.4428, |
| "step": 963 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 0.00022109826589595375, |
| "loss": 0.4096, |
| "step": 964 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 0.00022092485549132947, |
| "loss": 0.5214, |
| "step": 965 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 0.0002207514450867052, |
| "loss": 0.4911, |
| "step": 966 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 0.00022057803468208088, |
| "loss": 0.4292, |
| "step": 967 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 0.00022040462427745663, |
| "loss": 0.6358, |
| "step": 968 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 0.00022023121387283235, |
| "loss": 0.4455, |
| "step": 969 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 0.00022005780346820807, |
| "loss": 0.4576, |
| "step": 970 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 0.0002198843930635838, |
| "loss": 0.4652, |
| "step": 971 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 0.00021971098265895954, |
| "loss": 0.3858, |
| "step": 972 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 0.00021953757225433524, |
| "loss": 0.3973, |
| "step": 973 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 0.00021936416184971096, |
| "loss": 0.4022, |
| "step": 974 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 0.00021919075144508668, |
| "loss": 0.4554, |
| "step": 975 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 0.00021901734104046243, |
| "loss": 0.444, |
| "step": 976 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 0.00021884393063583815, |
| "loss": 0.5793, |
| "step": 977 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 0.00021867052023121384, |
| "loss": 0.4642, |
| "step": 978 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 0.00021849710982658956, |
| "loss": 0.4042, |
| "step": 979 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 0.0002183236994219653, |
| "loss": 0.4064, |
| "step": 980 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 0.00021815028901734103, |
| "loss": 0.6725, |
| "step": 981 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 0.00021797687861271675, |
| "loss": 0.4594, |
| "step": 982 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 0.00021780346820809247, |
| "loss": 0.5236, |
| "step": 983 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 0.00021763005780346822, |
| "loss": 0.5213, |
| "step": 984 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 0.0002174566473988439, |
| "loss": 0.4491, |
| "step": 985 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 0.00021728323699421963, |
| "loss": 0.4024, |
| "step": 986 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 0.00021710982658959535, |
| "loss": 0.3839, |
| "step": 987 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 0.0002169364161849711, |
| "loss": 0.5189, |
| "step": 988 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 0.00021676300578034682, |
| "loss": 0.397, |
| "step": 989 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 0.00021658959537572251, |
| "loss": 0.4426, |
| "step": 990 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 0.00021641618497109824, |
| "loss": 0.5494, |
| "step": 991 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 0.00021624277456647396, |
| "loss": 0.4565, |
| "step": 992 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 0.0002160693641618497, |
| "loss": 0.5105, |
| "step": 993 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 0.00021589595375722542, |
| "loss": 0.4438, |
| "step": 994 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 0.00021572254335260112, |
| "loss": 0.4674, |
| "step": 995 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 0.00021554913294797684, |
| "loss": 0.4639, |
| "step": 996 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 0.00021537572254335259, |
| "loss": 0.4709, |
| "step": 997 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 0.0002152023121387283, |
| "loss": 0.4408, |
| "step": 998 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 0.00021502890173410403, |
| "loss": 0.4162, |
| "step": 999 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 0.00021485549132947972, |
| "loss": 0.5548, |
| "step": 1000 |
| }, |
| { |
| "epoch": 2.24, |
| "eval_loss": 0.5270602703094482, |
| "eval_runtime": 118.9392, |
| "eval_samples_per_second": 22.213, |
| "eval_steps_per_second": 0.698, |
| "eval_wer": 0.14593415311384372, |
| "step": 1000 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 0.00021468208092485547, |
| "loss": 0.4707, |
| "step": 1001 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 0.0002145086705202312, |
| "loss": 0.5127, |
| "step": 1002 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 0.0002143352601156069, |
| "loss": 0.4853, |
| "step": 1003 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 0.00021416184971098263, |
| "loss": 0.4646, |
| "step": 1004 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 0.00021398843930635838, |
| "loss": 0.4634, |
| "step": 1005 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 0.0002138150289017341, |
| "loss": 0.4358, |
| "step": 1006 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 0.0002136416184971098, |
| "loss": 0.3871, |
| "step": 1007 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 0.00021346820809248551, |
| "loss": 0.4333, |
| "step": 1008 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 0.00021329479768786126, |
| "loss": 0.4355, |
| "step": 1009 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 0.00021312138728323698, |
| "loss": 0.3735, |
| "step": 1010 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 0.0002129479768786127, |
| "loss": 0.3619, |
| "step": 1011 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 0.0002127745664739884, |
| "loss": 0.4014, |
| "step": 1012 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 0.00021260115606936414, |
| "loss": 0.4061, |
| "step": 1013 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 0.00021242774566473987, |
| "loss": 0.4948, |
| "step": 1014 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 0.00021225433526011559, |
| "loss": 0.4899, |
| "step": 1015 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 0.0002120809248554913, |
| "loss": 0.4048, |
| "step": 1016 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 0.00021190751445086705, |
| "loss": 0.4533, |
| "step": 1017 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 0.00021173410404624275, |
| "loss": 0.4909, |
| "step": 1018 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 0.00021156069364161847, |
| "loss": 0.4013, |
| "step": 1019 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 0.0002113872832369942, |
| "loss": 0.4059, |
| "step": 1020 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 0.00021121387283236994, |
| "loss": 0.4134, |
| "step": 1021 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 0.00021104046242774566, |
| "loss": 0.4133, |
| "step": 1022 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 0.00021086705202312135, |
| "loss": 0.4246, |
| "step": 1023 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 0.00021069364161849707, |
| "loss": 0.4937, |
| "step": 1024 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 0.00021052023121387282, |
| "loss": 0.3916, |
| "step": 1025 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 0.00021034682080924854, |
| "loss": 0.4211, |
| "step": 1026 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 0.00021017341040462426, |
| "loss": 0.4359, |
| "step": 1027 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 0.00020999999999999998, |
| "loss": 0.4315, |
| "step": 1028 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 0.00020982658959537573, |
| "loss": 0.3939, |
| "step": 1029 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 0.00020965317919075142, |
| "loss": 0.4799, |
| "step": 1030 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 0.00020947976878612714, |
| "loss": 0.4378, |
| "step": 1031 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 0.00020930635838150286, |
| "loss": 0.3595, |
| "step": 1032 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 0.0002091329479768786, |
| "loss": 0.3942, |
| "step": 1033 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 0.00020895953757225433, |
| "loss": 0.4238, |
| "step": 1034 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 0.00020878612716763003, |
| "loss": 0.4965, |
| "step": 1035 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 0.00020861271676300575, |
| "loss": 0.4925, |
| "step": 1036 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 0.0002084393063583815, |
| "loss": 0.3804, |
| "step": 1037 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 0.00020826589595375722, |
| "loss": 0.4052, |
| "step": 1038 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 0.00020809248554913294, |
| "loss": 0.4838, |
| "step": 1039 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 0.00020791907514450863, |
| "loss": 0.5659, |
| "step": 1040 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 0.00020774566473988438, |
| "loss": 0.4035, |
| "step": 1041 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 0.0002075722543352601, |
| "loss": 0.4541, |
| "step": 1042 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 0.00020739884393063582, |
| "loss": 0.4914, |
| "step": 1043 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 0.00020722543352601154, |
| "loss": 0.5366, |
| "step": 1044 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 0.0002070520231213873, |
| "loss": 0.3606, |
| "step": 1045 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 0.00020687861271676298, |
| "loss": 0.5669, |
| "step": 1046 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 0.0002067052023121387, |
| "loss": 0.4359, |
| "step": 1047 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 0.00020653179190751442, |
| "loss": 0.3518, |
| "step": 1048 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 0.00020635838150289017, |
| "loss": 0.3817, |
| "step": 1049 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 0.0002061849710982659, |
| "loss": 0.381, |
| "step": 1050 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 0.00020601156069364158, |
| "loss": 0.3448, |
| "step": 1051 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 0.0002058381502890173, |
| "loss": 0.3824, |
| "step": 1052 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 0.00020566473988439305, |
| "loss": 0.3985, |
| "step": 1053 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 0.00020549132947976877, |
| "loss": 0.4028, |
| "step": 1054 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 0.0002053179190751445, |
| "loss": 0.4693, |
| "step": 1055 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 0.00020514450867052021, |
| "loss": 0.4034, |
| "step": 1056 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 0.00020497109826589596, |
| "loss": 0.4666, |
| "step": 1057 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 0.00020479768786127166, |
| "loss": 0.4672, |
| "step": 1058 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 0.00020462427745664738, |
| "loss": 0.3359, |
| "step": 1059 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 0.0002044508670520231, |
| "loss": 0.3192, |
| "step": 1060 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 0.00020427745664739885, |
| "loss": 0.416, |
| "step": 1061 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 0.00020410404624277457, |
| "loss": 0.435, |
| "step": 1062 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 0.00020393063583815026, |
| "loss": 0.4046, |
| "step": 1063 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 0.00020375722543352598, |
| "loss": 0.4171, |
| "step": 1064 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 0.00020358381502890173, |
| "loss": 0.3897, |
| "step": 1065 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 0.00020341040462427745, |
| "loss": 0.3527, |
| "step": 1066 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 0.00020323699421965317, |
| "loss": 0.3379, |
| "step": 1067 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 0.00020306358381502886, |
| "loss": 0.4694, |
| "step": 1068 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 0.0002028901734104046, |
| "loss": 0.3206, |
| "step": 1069 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 0.00020271676300578033, |
| "loss": 0.3575, |
| "step": 1070 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 0.00020254335260115605, |
| "loss": 0.4137, |
| "step": 1071 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 0.00020236994219653177, |
| "loss": 0.3825, |
| "step": 1072 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 0.00020219653179190752, |
| "loss": 0.37, |
| "step": 1073 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 0.00020202312138728321, |
| "loss": 0.3727, |
| "step": 1074 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 0.00020184971098265893, |
| "loss": 0.3525, |
| "step": 1075 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 0.00020167630057803466, |
| "loss": 0.3301, |
| "step": 1076 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 0.0002015028901734104, |
| "loss": 0.3617, |
| "step": 1077 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 0.00020132947976878612, |
| "loss": 0.3489, |
| "step": 1078 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 0.00020115606936416184, |
| "loss": 0.4305, |
| "step": 1079 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 0.00020098265895953754, |
| "loss": 0.5362, |
| "step": 1080 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 0.00020080924855491329, |
| "loss": 0.4995, |
| "step": 1081 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 0.000200635838150289, |
| "loss": 0.3763, |
| "step": 1082 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 0.00020046242774566473, |
| "loss": 0.3276, |
| "step": 1083 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 0.00020028901734104045, |
| "loss": 0.3887, |
| "step": 1084 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 0.0002001156069364162, |
| "loss": 0.3147, |
| "step": 1085 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 0.0001999421965317919, |
| "loss": 0.4487, |
| "step": 1086 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 0.0001997687861271676, |
| "loss": 0.3799, |
| "step": 1087 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 0.00019959537572254333, |
| "loss": 0.4059, |
| "step": 1088 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 0.00019942196531791905, |
| "loss": 0.4773, |
| "step": 1089 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 0.0001992485549132948, |
| "loss": 0.4415, |
| "step": 1090 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 0.0001990751445086705, |
| "loss": 0.3739, |
| "step": 1091 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 0.0001989017341040462, |
| "loss": 0.5133, |
| "step": 1092 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 0.00019872832369942193, |
| "loss": 0.5235, |
| "step": 1093 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 0.00019855491329479768, |
| "loss": 0.3454, |
| "step": 1094 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 0.0001983815028901734, |
| "loss": 0.5169, |
| "step": 1095 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 0.0001982080924855491, |
| "loss": 0.3759, |
| "step": 1096 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 0.00019803468208092482, |
| "loss": 0.3667, |
| "step": 1097 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 0.00019786127167630056, |
| "loss": 0.4812, |
| "step": 1098 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 0.00019768786127167629, |
| "loss": 0.342, |
| "step": 1099 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 0.000197514450867052, |
| "loss": 0.4435, |
| "step": 1100 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 0.0001973410404624277, |
| "loss": 0.3145, |
| "step": 1101 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 0.00019716763005780345, |
| "loss": 0.363, |
| "step": 1102 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 0.00019699421965317917, |
| "loss": 0.4689, |
| "step": 1103 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 0.0001968208092485549, |
| "loss": 0.3947, |
| "step": 1104 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 0.0001966473988439306, |
| "loss": 0.5535, |
| "step": 1105 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 0.00019647398843930636, |
| "loss": 0.4287, |
| "step": 1106 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 0.00019630057803468208, |
| "loss": 0.3676, |
| "step": 1107 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 0.00019612716763005777, |
| "loss": 0.3291, |
| "step": 1108 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 0.0001959537572254335, |
| "loss": 0.4332, |
| "step": 1109 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 0.00019578034682080924, |
| "loss": 0.3734, |
| "step": 1110 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 0.00019560693641618496, |
| "loss": 0.3242, |
| "step": 1111 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 0.00019543352601156068, |
| "loss": 0.3738, |
| "step": 1112 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 0.00019526011560693637, |
| "loss": 0.3526, |
| "step": 1113 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 0.00019508670520231212, |
| "loss": 0.4022, |
| "step": 1114 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 0.00019491329479768784, |
| "loss": 0.4671, |
| "step": 1115 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 0.00019473988439306356, |
| "loss": 0.4165, |
| "step": 1116 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 0.00019456647398843928, |
| "loss": 0.8639, |
| "step": 1117 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 0.00019439306358381503, |
| "loss": 0.6203, |
| "step": 1118 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 0.00019421965317919073, |
| "loss": 0.3654, |
| "step": 1119 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 0.00019404624277456645, |
| "loss": 0.716, |
| "step": 1120 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 0.00019387283236994217, |
| "loss": 0.475, |
| "step": 1121 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 0.00019369942196531792, |
| "loss": 0.6724, |
| "step": 1122 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 0.00019352601156069364, |
| "loss": 0.4456, |
| "step": 1123 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 0.00019335260115606933, |
| "loss": 0.5936, |
| "step": 1124 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 0.00019317919075144505, |
| "loss": 0.3774, |
| "step": 1125 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 0.0001930057803468208, |
| "loss": 0.3277, |
| "step": 1126 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 0.00019283236994219652, |
| "loss": 0.3743, |
| "step": 1127 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 0.00019265895953757224, |
| "loss": 0.4933, |
| "step": 1128 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 0.00019248554913294796, |
| "loss": 0.4362, |
| "step": 1129 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 0.0001923121387283237, |
| "loss": 0.4059, |
| "step": 1130 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 0.0001921387283236994, |
| "loss": 0.6497, |
| "step": 1131 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 0.00019196531791907512, |
| "loss": 0.5414, |
| "step": 1132 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 0.00019179190751445084, |
| "loss": 0.4282, |
| "step": 1133 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 0.0001916184971098266, |
| "loss": 0.3485, |
| "step": 1134 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 0.0001914450867052023, |
| "loss": 0.5445, |
| "step": 1135 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 0.000191271676300578, |
| "loss": 0.3332, |
| "step": 1136 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 0.000191271676300578, |
| "loss": 0.4662, |
| "step": 1137 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 0.00019109826589595373, |
| "loss": 0.6327, |
| "step": 1138 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 0.00019092485549132947, |
| "loss": 0.467, |
| "step": 1139 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 0.0001907514450867052, |
| "loss": 0.4963, |
| "step": 1140 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 0.00019057803468208091, |
| "loss": 0.4271, |
| "step": 1141 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 0.0001904046242774566, |
| "loss": 0.5581, |
| "step": 1142 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 0.00019023121387283236, |
| "loss": 0.4352, |
| "step": 1143 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 0.00019005780346820808, |
| "loss": 0.5164, |
| "step": 1144 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 0.0001898843930635838, |
| "loss": 0.3327, |
| "step": 1145 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 0.00018971098265895952, |
| "loss": 0.3323, |
| "step": 1146 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 0.00018953757225433527, |
| "loss": 0.3687, |
| "step": 1147 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 0.00018936416184971096, |
| "loss": 0.3628, |
| "step": 1148 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 0.00018919075144508668, |
| "loss": 0.364, |
| "step": 1149 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 0.0001890173410404624, |
| "loss": 0.3263, |
| "step": 1150 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 0.00018884393063583815, |
| "loss": 0.3732, |
| "step": 1151 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 0.00018867052023121387, |
| "loss": 0.313, |
| "step": 1152 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 0.0001884971098265896, |
| "loss": 0.6869, |
| "step": 1153 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 0.00018832369942196528, |
| "loss": 0.3929, |
| "step": 1154 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 0.00018815028901734103, |
| "loss": 0.4072, |
| "step": 1155 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 0.00018797687861271675, |
| "loss": 0.3608, |
| "step": 1156 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 0.00018780346820809247, |
| "loss": 0.5853, |
| "step": 1157 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 0.0001876300578034682, |
| "loss": 0.3369, |
| "step": 1158 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 0.00018745664739884394, |
| "loss": 0.393, |
| "step": 1159 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 0.00018728323699421963, |
| "loss": 0.367, |
| "step": 1160 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 0.00018710982658959536, |
| "loss": 0.3639, |
| "step": 1161 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 0.00018693641618497108, |
| "loss": 0.3241, |
| "step": 1162 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 0.00018676300578034682, |
| "loss": 0.4866, |
| "step": 1163 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 0.00018658959537572254, |
| "loss": 0.4412, |
| "step": 1164 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 0.00018641618497109824, |
| "loss": 0.6046, |
| "step": 1165 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 0.00018624277456647396, |
| "loss": 0.3704, |
| "step": 1166 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 0.0001860693641618497, |
| "loss": 0.4062, |
| "step": 1167 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 0.00018589595375722543, |
| "loss": 0.494, |
| "step": 1168 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 0.00018572254335260115, |
| "loss": 0.3205, |
| "step": 1169 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 0.00018554913294797684, |
| "loss": 0.4347, |
| "step": 1170 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 0.0001853757225433526, |
| "loss": 0.3697, |
| "step": 1171 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 0.0001852023121387283, |
| "loss": 0.3323, |
| "step": 1172 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 0.00018502890173410403, |
| "loss": 0.2362, |
| "step": 1173 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 0.00018485549132947975, |
| "loss": 0.3694, |
| "step": 1174 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 0.0001846820809248555, |
| "loss": 0.3117, |
| "step": 1175 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 0.0001845086705202312, |
| "loss": 0.4406, |
| "step": 1176 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 0.0001843352601156069, |
| "loss": 0.3276, |
| "step": 1177 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 0.00018416184971098263, |
| "loss": 0.486, |
| "step": 1178 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 0.00018398843930635838, |
| "loss": 0.4367, |
| "step": 1179 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 0.0001838150289017341, |
| "loss": 0.5246, |
| "step": 1180 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 0.00018364161849710982, |
| "loss": 0.2472, |
| "step": 1181 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 0.00018346820809248552, |
| "loss": 0.6065, |
| "step": 1182 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 0.00018329479768786124, |
| "loss": 0.4468, |
| "step": 1183 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 0.00018312138728323698, |
| "loss": 0.6998, |
| "step": 1184 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 0.0001829479768786127, |
| "loss": 0.6184, |
| "step": 1185 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 0.00018277456647398843, |
| "loss": 0.9225, |
| "step": 1186 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 0.00018260115606936412, |
| "loss": 1.4052, |
| "step": 1187 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 0.00018242774566473987, |
| "loss": 1.6673, |
| "step": 1188 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 0.0001822543352601156, |
| "loss": 1.2018, |
| "step": 1189 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 0.0001820809248554913, |
| "loss": 0.8561, |
| "step": 1190 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 0.00018190751445086703, |
| "loss": 0.5433, |
| "step": 1191 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 0.00018173410404624278, |
| "loss": 0.7035, |
| "step": 1192 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 0.00018156069364161847, |
| "loss": 0.5432, |
| "step": 1193 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 0.0001813872832369942, |
| "loss": 0.4427, |
| "step": 1194 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 0.0001812138728323699, |
| "loss": 0.6715, |
| "step": 1195 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 0.00018104046242774566, |
| "loss": 0.6216, |
| "step": 1196 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 0.00018086705202312138, |
| "loss": 0.4459, |
| "step": 1197 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 0.00018069364161849707, |
| "loss": 0.2628, |
| "step": 1198 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 0.0001805202312138728, |
| "loss": 0.4558, |
| "step": 1199 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 0.00018034682080924854, |
| "loss": 0.654, |
| "step": 1200 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 0.00018017341040462426, |
| "loss": 0.3886, |
| "step": 1201 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 0.00017999999999999998, |
| "loss": 0.3788, |
| "step": 1202 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 0.0001798265895953757, |
| "loss": 0.363, |
| "step": 1203 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 0.00017965317919075145, |
| "loss": 0.5902, |
| "step": 1204 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 0.00017947976878612715, |
| "loss": 0.6474, |
| "step": 1205 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 0.00017930635838150287, |
| "loss": 0.4237, |
| "step": 1206 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 0.0001791329479768786, |
| "loss": 0.3768, |
| "step": 1207 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 0.00017895953757225434, |
| "loss": 0.4415, |
| "step": 1208 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 0.00017878612716763006, |
| "loss": 0.3807, |
| "step": 1209 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 0.00017861271676300575, |
| "loss": 0.3183, |
| "step": 1210 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 0.00017843930635838147, |
| "loss": 0.3533, |
| "step": 1211 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 0.00017826589595375722, |
| "loss": 0.2891, |
| "step": 1212 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 0.00017809248554913294, |
| "loss": 0.5209, |
| "step": 1213 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 0.00017791907514450866, |
| "loss": 0.4162, |
| "step": 1214 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 0.00017774566473988435, |
| "loss": 0.4814, |
| "step": 1215 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 0.0001775722543352601, |
| "loss": 0.492, |
| "step": 1216 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 0.00017739884393063582, |
| "loss": 0.3531, |
| "step": 1217 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 0.00017722543352601154, |
| "loss": 0.5345, |
| "step": 1218 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 0.00017705202312138726, |
| "loss": 0.3931, |
| "step": 1219 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 0.000176878612716763, |
| "loss": 0.3551, |
| "step": 1220 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 0.0001767052023121387, |
| "loss": 0.3931, |
| "step": 1221 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 0.00017653179190751442, |
| "loss": 0.2904, |
| "step": 1222 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 0.00017635838150289015, |
| "loss": 0.3068, |
| "step": 1223 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 0.0001761849710982659, |
| "loss": 0.3169, |
| "step": 1224 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 0.00017601156069364161, |
| "loss": 0.2912, |
| "step": 1225 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 0.0001758381502890173, |
| "loss": 0.4069, |
| "step": 1226 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 0.00017566473988439303, |
| "loss": 0.3083, |
| "step": 1227 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 0.00017549132947976878, |
| "loss": 0.2627, |
| "step": 1228 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 0.0001753179190751445, |
| "loss": 0.2926, |
| "step": 1229 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 0.00017514450867052022, |
| "loss": 0.3185, |
| "step": 1230 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 0.00017497109826589594, |
| "loss": 0.3032, |
| "step": 1231 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 0.00017479768786127169, |
| "loss": 0.2638, |
| "step": 1232 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 0.00017462427745664738, |
| "loss": 0.2871, |
| "step": 1233 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 0.0001744508670520231, |
| "loss": 0.2996, |
| "step": 1234 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 0.00017427745664739882, |
| "loss": 0.294, |
| "step": 1235 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 0.00017410404624277457, |
| "loss": 0.3181, |
| "step": 1236 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 0.0001739306358381503, |
| "loss": 0.4262, |
| "step": 1237 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 0.00017375722543352598, |
| "loss": 0.3019, |
| "step": 1238 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 0.0001735838150289017, |
| "loss": 0.2952, |
| "step": 1239 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 0.00017341040462427745, |
| "loss": 0.2982, |
| "step": 1240 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 0.00017323699421965317, |
| "loss": 0.3166, |
| "step": 1241 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 0.0001730635838150289, |
| "loss": 0.3866, |
| "step": 1242 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 0.00017289017341040459, |
| "loss": 0.351, |
| "step": 1243 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 0.00017271676300578033, |
| "loss": 0.2785, |
| "step": 1244 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 0.00017254335260115605, |
| "loss": 0.3543, |
| "step": 1245 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 0.00017236994219653178, |
| "loss": 0.2908, |
| "step": 1246 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 0.0001721965317919075, |
| "loss": 0.3357, |
| "step": 1247 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 0.00017202312138728324, |
| "loss": 0.2826, |
| "step": 1248 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 0.00017184971098265894, |
| "loss": 0.3079, |
| "step": 1249 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 0.00017167630057803466, |
| "loss": 0.3205, |
| "step": 1250 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 0.00017150289017341038, |
| "loss": 0.2911, |
| "step": 1251 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 0.00017132947976878613, |
| "loss": 0.3796, |
| "step": 1252 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 0.00017115606936416185, |
| "loss": 0.2993, |
| "step": 1253 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 0.00017098265895953757, |
| "loss": 0.2476, |
| "step": 1254 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 0.00017080924855491326, |
| "loss": 0.2763, |
| "step": 1255 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 0.000170635838150289, |
| "loss": 0.3013, |
| "step": 1256 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 0.00017046242774566473, |
| "loss": 0.3339, |
| "step": 1257 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 0.00017028901734104045, |
| "loss": 0.2878, |
| "step": 1258 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 0.00017011560693641617, |
| "loss": 0.3425, |
| "step": 1259 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 0.00016994219653179192, |
| "loss": 0.2596, |
| "step": 1260 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 0.0001697687861271676, |
| "loss": 0.2953, |
| "step": 1261 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 0.00016959537572254333, |
| "loss": 0.2909, |
| "step": 1262 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 0.00016942196531791905, |
| "loss": 0.2896, |
| "step": 1263 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 0.0001692485549132948, |
| "loss": 0.2979, |
| "step": 1264 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 0.00016907514450867052, |
| "loss": 0.282, |
| "step": 1265 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 0.00016890173410404622, |
| "loss": 0.29, |
| "step": 1266 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 0.00016872832369942194, |
| "loss": 0.3307, |
| "step": 1267 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 0.00016855491329479768, |
| "loss": 0.263, |
| "step": 1268 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 0.0001683815028901734, |
| "loss": 0.2849, |
| "step": 1269 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 0.00016820809248554913, |
| "loss": 0.3126, |
| "step": 1270 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 0.00016803468208092482, |
| "loss": 0.3519, |
| "step": 1271 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 0.00016786127167630057, |
| "loss": 0.282, |
| "step": 1272 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 0.0001676878612716763, |
| "loss": 0.4052, |
| "step": 1273 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 0.000167514450867052, |
| "loss": 0.3026, |
| "step": 1274 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 0.00016734104046242773, |
| "loss": 0.253, |
| "step": 1275 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 0.00016716763005780348, |
| "loss": 0.2237, |
| "step": 1276 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 0.0001669942196531792, |
| "loss": 0.2948, |
| "step": 1277 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 0.0001668208092485549, |
| "loss": 0.2899, |
| "step": 1278 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 0.0001666473988439306, |
| "loss": 0.2538, |
| "step": 1279 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 0.00016647398843930633, |
| "loss": 0.2898, |
| "step": 1280 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 0.00016630057803468208, |
| "loss": 0.2751, |
| "step": 1281 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 0.0001661271676300578, |
| "loss": 0.3052, |
| "step": 1282 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 0.0001659537572254335, |
| "loss": 0.4211, |
| "step": 1283 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 0.00016578034682080922, |
| "loss": 0.2663, |
| "step": 1284 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 0.00016560693641618496, |
| "loss": 0.2965, |
| "step": 1285 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 0.00016543352601156068, |
| "loss": 0.3267, |
| "step": 1286 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 0.0001652601156069364, |
| "loss": 0.3255, |
| "step": 1287 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 0.0001650867052023121, |
| "loss": 0.326, |
| "step": 1288 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 0.00016491329479768785, |
| "loss": 0.3655, |
| "step": 1289 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 0.00016473988439306357, |
| "loss": 0.3054, |
| "step": 1290 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 0.0001645664739884393, |
| "loss": 0.2307, |
| "step": 1291 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 0.000164393063583815, |
| "loss": 0.2537, |
| "step": 1292 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 0.00016421965317919076, |
| "loss": 0.3364, |
| "step": 1293 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 0.00016404624277456645, |
| "loss": 0.2798, |
| "step": 1294 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 0.00016387283236994217, |
| "loss": 0.389, |
| "step": 1295 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 0.0001636994219653179, |
| "loss": 0.2872, |
| "step": 1296 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 0.00016352601156069364, |
| "loss": 0.2433, |
| "step": 1297 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 0.00016335260115606936, |
| "loss": 0.3039, |
| "step": 1298 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 0.00016317919075144505, |
| "loss": 0.2641, |
| "step": 1299 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 0.00016300578034682077, |
| "loss": 0.2713, |
| "step": 1300 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 0.00016283236994219652, |
| "loss": 0.2484, |
| "step": 1301 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 0.00016265895953757224, |
| "loss": 0.3162, |
| "step": 1302 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 0.00016248554913294796, |
| "loss": 0.2975, |
| "step": 1303 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 0.00016231213872832368, |
| "loss": 0.2791, |
| "step": 1304 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 0.00016213872832369943, |
| "loss": 0.4289, |
| "step": 1305 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 0.00016196531791907512, |
| "loss": 0.2557, |
| "step": 1306 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 0.00016179190751445085, |
| "loss": 0.2643, |
| "step": 1307 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 0.00016161849710982657, |
| "loss": 0.342, |
| "step": 1308 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 0.00016144508670520231, |
| "loss": 0.289, |
| "step": 1309 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 0.00016127167630057803, |
| "loss": 0.2532, |
| "step": 1310 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 0.00016109826589595373, |
| "loss": 0.3021, |
| "step": 1311 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 0.00016092485549132945, |
| "loss": 0.2713, |
| "step": 1312 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 0.0001607514450867052, |
| "loss": 0.2929, |
| "step": 1313 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 0.00016057803468208092, |
| "loss": 0.2949, |
| "step": 1314 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 0.00016040462427745664, |
| "loss": 0.2371, |
| "step": 1315 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 0.00016023121387283233, |
| "loss": 0.2751, |
| "step": 1316 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 0.00016005780346820808, |
| "loss": 0.1453, |
| "step": 1317 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 0.0001598843930635838, |
| "loss": 0.2809, |
| "step": 1318 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 0.00015971098265895952, |
| "loss": 0.3211, |
| "step": 1319 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 0.00015953757225433524, |
| "loss": 0.2433, |
| "step": 1320 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 0.000159364161849711, |
| "loss": 0.3673, |
| "step": 1321 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 0.00015919075144508668, |
| "loss": 0.2716, |
| "step": 1322 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 0.0001590173410404624, |
| "loss": 0.3018, |
| "step": 1323 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 0.00015884393063583812, |
| "loss": 0.2872, |
| "step": 1324 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 0.00015867052023121387, |
| "loss": 0.275, |
| "step": 1325 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 0.0001584971098265896, |
| "loss": 0.2788, |
| "step": 1326 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 0.0001583236994219653, |
| "loss": 0.3164, |
| "step": 1327 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 0.000158150289017341, |
| "loss": 0.2567, |
| "step": 1328 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 0.00015797687861271675, |
| "loss": 0.2428, |
| "step": 1329 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 0.00015780346820809248, |
| "loss": 0.2842, |
| "step": 1330 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 0.0001576300578034682, |
| "loss": 0.2928, |
| "step": 1331 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 0.00015745664739884392, |
| "loss": 0.2571, |
| "step": 1332 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 0.00015728323699421966, |
| "loss": 0.2763, |
| "step": 1333 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 0.00015710982658959536, |
| "loss": 0.2756, |
| "step": 1334 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 0.00015693641618497108, |
| "loss": 0.2868, |
| "step": 1335 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 0.0001567630057803468, |
| "loss": 0.4741, |
| "step": 1336 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 0.00015658959537572255, |
| "loss": 0.2944, |
| "step": 1337 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 0.00015641618497109827, |
| "loss": 0.2631, |
| "step": 1338 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 0.00015624277456647396, |
| "loss": 0.1996, |
| "step": 1339 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 0.00015606936416184968, |
| "loss": 0.1874, |
| "step": 1340 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 0.00015589595375722543, |
| "loss": 0.1377, |
| "step": 1341 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 0.00015572254335260115, |
| "loss": 0.2273, |
| "step": 1342 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 0.00015554913294797687, |
| "loss": 0.2258, |
| "step": 1343 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 0.00015537572254335256, |
| "loss": 0.2377, |
| "step": 1344 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 0.0001552023121387283, |
| "loss": 0.2182, |
| "step": 1345 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 0.00015502890173410403, |
| "loss": 0.1965, |
| "step": 1346 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 0.00015485549132947975, |
| "loss": 0.3015, |
| "step": 1347 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 0.00015468208092485547, |
| "loss": 0.2984, |
| "step": 1348 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 0.00015450867052023122, |
| "loss": 0.2581, |
| "step": 1349 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 0.00015433526011560692, |
| "loss": 0.2413, |
| "step": 1350 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 0.00015416184971098264, |
| "loss": 0.2837, |
| "step": 1351 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 0.00015398843930635836, |
| "loss": 0.2595, |
| "step": 1352 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 0.0001538150289017341, |
| "loss": 0.4308, |
| "step": 1353 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 0.00015364161849710983, |
| "loss": 0.4388, |
| "step": 1354 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 0.00015346820809248555, |
| "loss": 0.385, |
| "step": 1355 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 0.00015329479768786124, |
| "loss": 0.3827, |
| "step": 1356 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 0.000153121387283237, |
| "loss": 0.3636, |
| "step": 1357 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 0.0001529479768786127, |
| "loss": 0.2955, |
| "step": 1358 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 0.00015277456647398843, |
| "loss": 0.2628, |
| "step": 1359 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 0.00015260115606936415, |
| "loss": 0.3776, |
| "step": 1360 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 0.0001524277456647399, |
| "loss": 0.2972, |
| "step": 1361 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 0.0001522543352601156, |
| "loss": 0.3507, |
| "step": 1362 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 0.0001520809248554913, |
| "loss": 0.2963, |
| "step": 1363 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 0.00015190751445086703, |
| "loss": 0.2439, |
| "step": 1364 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 0.00015173410404624278, |
| "loss": 0.2085, |
| "step": 1365 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 0.0001515606936416185, |
| "loss": 0.1758, |
| "step": 1366 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 0.0001513872832369942, |
| "loss": 0.1667, |
| "step": 1367 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 0.00015121387283236992, |
| "loss": 0.2066, |
| "step": 1368 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 0.00015104046242774566, |
| "loss": 0.1509, |
| "step": 1369 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 0.00015086705202312138, |
| "loss": 0.1903, |
| "step": 1370 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 0.0001506936416184971, |
| "loss": 0.1822, |
| "step": 1371 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 0.0001505202312138728, |
| "loss": 0.1901, |
| "step": 1372 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 0.00015034682080924855, |
| "loss": 0.2128, |
| "step": 1373 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 0.00015017341040462427, |
| "loss": 0.1537, |
| "step": 1374 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 0.00015, |
| "loss": 0.14, |
| "step": 1375 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 0.0001498265895953757, |
| "loss": 0.1598, |
| "step": 1376 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 0.00014965317919075143, |
| "loss": 0.1753, |
| "step": 1377 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 0.00014947976878612715, |
| "loss": 0.1595, |
| "step": 1378 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 0.00014930635838150287, |
| "loss": 0.2426, |
| "step": 1379 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 0.0001491329479768786, |
| "loss": 0.1954, |
| "step": 1380 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 0.0001489595375722543, |
| "loss": 0.1562, |
| "step": 1381 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 0.00014878612716763003, |
| "loss": 0.1535, |
| "step": 1382 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 0.00014861271676300578, |
| "loss": 0.1878, |
| "step": 1383 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 0.00014843930635838147, |
| "loss": 0.18, |
| "step": 1384 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 0.00014826589595375722, |
| "loss": 0.2594, |
| "step": 1385 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 0.00014809248554913291, |
| "loss": 0.1945, |
| "step": 1386 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 0.00014791907514450866, |
| "loss": 0.1456, |
| "step": 1387 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 0.00014774566473988438, |
| "loss": 0.185, |
| "step": 1388 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 0.0001475722543352601, |
| "loss": 0.2666, |
| "step": 1389 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 0.00014739884393063582, |
| "loss": 0.168, |
| "step": 1390 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 0.00014722543352601154, |
| "loss": 0.2095, |
| "step": 1391 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 0.00014705202312138727, |
| "loss": 0.1323, |
| "step": 1392 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 0.00014687861271676299, |
| "loss": 0.1846, |
| "step": 1393 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 0.0001467052023121387, |
| "loss": 0.1856, |
| "step": 1394 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 0.00014653179190751443, |
| "loss": 0.2443, |
| "step": 1395 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 0.00014635838150289015, |
| "loss": 0.1861, |
| "step": 1396 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 0.0001461849710982659, |
| "loss": 0.1837, |
| "step": 1397 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 0.0001460115606936416, |
| "loss": 0.1941, |
| "step": 1398 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 0.00014583815028901734, |
| "loss": 0.1246, |
| "step": 1399 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 0.00014566473988439306, |
| "loss": 0.2797, |
| "step": 1400 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 0.00014549132947976878, |
| "loss": 0.1976, |
| "step": 1401 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 0.0001453179190751445, |
| "loss": 0.71, |
| "step": 1402 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 0.00014514450867052022, |
| "loss": 0.1882, |
| "step": 1403 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 0.00014497109826589594, |
| "loss": 0.1466, |
| "step": 1404 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 0.00014479768786127166, |
| "loss": 0.2475, |
| "step": 1405 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 0.00014462427745664738, |
| "loss": 0.5029, |
| "step": 1406 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 0.0001444508670520231, |
| "loss": 0.3222, |
| "step": 1407 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 0.00014427745664739882, |
| "loss": 0.7426, |
| "step": 1408 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 0.00014410404624277454, |
| "loss": 0.2922, |
| "step": 1409 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 0.00014393063583815026, |
| "loss": 0.4414, |
| "step": 1410 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 0.000143757225433526, |
| "loss": 0.6412, |
| "step": 1411 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 0.0001435838150289017, |
| "loss": 0.4429, |
| "step": 1412 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 0.00014341040462427745, |
| "loss": 0.4897, |
| "step": 1413 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 0.00014323699421965317, |
| "loss": 0.533, |
| "step": 1414 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 0.0001430635838150289, |
| "loss": 0.2692, |
| "step": 1415 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 0.00014289017341040462, |
| "loss": 0.2146, |
| "step": 1416 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 0.00014271676300578034, |
| "loss": 0.4122, |
| "step": 1417 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 0.00014254335260115606, |
| "loss": 0.2113, |
| "step": 1418 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 0.00014236994219653178, |
| "loss": 0.2685, |
| "step": 1419 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 0.0001421965317919075, |
| "loss": 0.2451, |
| "step": 1420 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 0.00014202312138728322, |
| "loss": 0.2165, |
| "step": 1421 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 0.00014184971098265894, |
| "loss": 0.1565, |
| "step": 1422 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 0.00014167630057803466, |
| "loss": 0.2684, |
| "step": 1423 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 0.00014150289017341038, |
| "loss": 0.1941, |
| "step": 1424 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 0.00014132947976878613, |
| "loss": 0.3006, |
| "step": 1425 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 0.00014115606936416182, |
| "loss": 0.1705, |
| "step": 1426 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 0.00014098265895953757, |
| "loss": 0.2131, |
| "step": 1427 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 0.0001408092485549133, |
| "loss": 0.1889, |
| "step": 1428 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 0.000140635838150289, |
| "loss": 0.1561, |
| "step": 1429 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 0.00014046242774566473, |
| "loss": 0.2063, |
| "step": 1430 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 0.00014028901734104045, |
| "loss": 0.2067, |
| "step": 1431 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 0.00014011560693641617, |
| "loss": 0.1743, |
| "step": 1432 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 0.0001399421965317919, |
| "loss": 0.1503, |
| "step": 1433 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 0.00013976878612716762, |
| "loss": 0.1646, |
| "step": 1434 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 0.00013959537572254334, |
| "loss": 0.2729, |
| "step": 1435 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 0.00013942196531791906, |
| "loss": 0.174, |
| "step": 1436 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 0.00013924855491329478, |
| "loss": 0.2032, |
| "step": 1437 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 0.0001390751445086705, |
| "loss": 0.1512, |
| "step": 1438 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 0.00013890173410404625, |
| "loss": 0.1666, |
| "step": 1439 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 0.00013872832369942194, |
| "loss": 0.1734, |
| "step": 1440 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 0.0001385549132947977, |
| "loss": 0.203, |
| "step": 1441 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 0.0001383815028901734, |
| "loss": 0.1985, |
| "step": 1442 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 0.00013820809248554913, |
| "loss": 0.1538, |
| "step": 1443 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 0.00013803468208092485, |
| "loss": 0.1772, |
| "step": 1444 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 0.00013786127167630057, |
| "loss": 0.1438, |
| "step": 1445 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 0.0001376878612716763, |
| "loss": 0.1327, |
| "step": 1446 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 0.000137514450867052, |
| "loss": 0.2023, |
| "step": 1447 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 0.00013734104046242773, |
| "loss": 0.2226, |
| "step": 1448 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 0.00013716763005780345, |
| "loss": 0.183, |
| "step": 1449 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 0.00013699421965317917, |
| "loss": 0.15, |
| "step": 1450 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 0.00013682080924855492, |
| "loss": 0.2031, |
| "step": 1451 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 0.00013664739884393061, |
| "loss": 0.2016, |
| "step": 1452 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 0.00013647398843930636, |
| "loss": 0.1787, |
| "step": 1453 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 0.00013630057803468206, |
| "loss": 0.2116, |
| "step": 1454 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 0.0001361271676300578, |
| "loss": 0.1641, |
| "step": 1455 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 0.00013595375722543352, |
| "loss": 0.266, |
| "step": 1456 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 0.00013578034682080925, |
| "loss": 0.2049, |
| "step": 1457 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 0.00013560693641618497, |
| "loss": 0.1715, |
| "step": 1458 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 0.0001354335260115607, |
| "loss": 0.1945, |
| "step": 1459 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 0.0001352601156069364, |
| "loss": 0.1659, |
| "step": 1460 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 0.00013508670520231213, |
| "loss": 0.2319, |
| "step": 1461 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 0.00013491329479768785, |
| "loss": 0.2238, |
| "step": 1462 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 0.00013473988439306357, |
| "loss": 0.1964, |
| "step": 1463 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 0.0001345664739884393, |
| "loss": 0.1946, |
| "step": 1464 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 0.00013439306358381504, |
| "loss": 0.2195, |
| "step": 1465 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 0.00013421965317919073, |
| "loss": 0.2402, |
| "step": 1466 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 0.00013404624277456648, |
| "loss": 0.1901, |
| "step": 1467 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 0.00013387283236994217, |
| "loss": 0.2038, |
| "step": 1468 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 0.00013369942196531792, |
| "loss": 0.2147, |
| "step": 1469 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 0.00013352601156069364, |
| "loss": 0.1965, |
| "step": 1470 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 0.00013335260115606936, |
| "loss": 0.1742, |
| "step": 1471 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 0.00013317919075144508, |
| "loss": 0.142, |
| "step": 1472 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 0.00013300578034682078, |
| "loss": 0.2054, |
| "step": 1473 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 0.00013283236994219652, |
| "loss": 0.1783, |
| "step": 1474 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 0.00013265895953757224, |
| "loss": 0.161, |
| "step": 1475 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 0.00013248554913294797, |
| "loss": 0.2465, |
| "step": 1476 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 0.00013231213872832369, |
| "loss": 0.2841, |
| "step": 1477 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 0.0001321387283236994, |
| "loss": 0.314, |
| "step": 1478 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 0.00013196531791907513, |
| "loss": 0.2673, |
| "step": 1479 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 0.00013179190751445085, |
| "loss": 0.2832, |
| "step": 1480 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 0.00013161849710982657, |
| "loss": 0.1815, |
| "step": 1481 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 0.0001314450867052023, |
| "loss": 0.2394, |
| "step": 1482 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 0.000131271676300578, |
| "loss": 0.2533, |
| "step": 1483 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 0.00013109826589595376, |
| "loss": 0.1837, |
| "step": 1484 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 0.00013092485549132945, |
| "loss": 0.2855, |
| "step": 1485 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 0.0001307514450867052, |
| "loss": 0.1662, |
| "step": 1486 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 0.00013057803468208092, |
| "loss": 0.1815, |
| "step": 1487 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 0.00013040462427745664, |
| "loss": 0.1733, |
| "step": 1488 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 0.00013023121387283236, |
| "loss": 0.1708, |
| "step": 1489 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 0.00013005780346820808, |
| "loss": 0.3695, |
| "step": 1490 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 0.0001298843930635838, |
| "loss": 0.1956, |
| "step": 1491 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 0.00012971098265895952, |
| "loss": 0.1617, |
| "step": 1492 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 0.00012953757225433524, |
| "loss": 0.1645, |
| "step": 1493 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 0.00012936416184971096, |
| "loss": 0.1368, |
| "step": 1494 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 0.00012919075144508669, |
| "loss": 0.1788, |
| "step": 1495 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 0.0001290173410404624, |
| "loss": 0.1806, |
| "step": 1496 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 0.00012884393063583813, |
| "loss": 0.2489, |
| "step": 1497 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 0.00012867052023121387, |
| "loss": 0.1555, |
| "step": 1498 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 0.00012849710982658957, |
| "loss": 0.185, |
| "step": 1499 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 0.00012832369942196532, |
| "loss": 0.15, |
| "step": 1500 |
| }, |
| { |
| "epoch": 3.36, |
| "eval_loss": 0.3500145673751831, |
| "eval_runtime": 119.1623, |
| "eval_samples_per_second": 22.171, |
| "eval_steps_per_second": 0.697, |
| "eval_wer": 0.09924633082110274, |
| "step": 1500 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 0.00012815028901734104, |
| "loss": 0.18, |
| "step": 1501 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 0.00012797687861271676, |
| "loss": 0.1619, |
| "step": 1502 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 0.00012780346820809248, |
| "loss": 0.1341, |
| "step": 1503 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 0.0001276300578034682, |
| "loss": 0.1504, |
| "step": 1504 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 0.00012745664739884392, |
| "loss": 0.312, |
| "step": 1505 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 0.00012728323699421964, |
| "loss": 0.1561, |
| "step": 1506 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 0.00012710982658959536, |
| "loss": 0.1403, |
| "step": 1507 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 0.00012693641618497108, |
| "loss": 0.1682, |
| "step": 1508 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 0.0001267630057803468, |
| "loss": 0.1957, |
| "step": 1509 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 0.00012658959537572252, |
| "loss": 0.1462, |
| "step": 1510 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 0.00012641618497109824, |
| "loss": 0.164, |
| "step": 1511 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 0.000126242774566474, |
| "loss": 0.1641, |
| "step": 1512 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 0.00012606936416184968, |
| "loss": 0.2032, |
| "step": 1513 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 0.00012589595375722543, |
| "loss": 0.1307, |
| "step": 1514 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 0.00012572254335260115, |
| "loss": 0.1494, |
| "step": 1515 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 0.00012554913294797687, |
| "loss": 0.2691, |
| "step": 1516 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 0.0001253757225433526, |
| "loss": 0.1967, |
| "step": 1517 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 0.00012520231213872831, |
| "loss": 0.1436, |
| "step": 1518 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 0.00012502890173410404, |
| "loss": 0.1726, |
| "step": 1519 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 0.00012485549132947976, |
| "loss": 0.1431, |
| "step": 1520 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 0.00012468208092485548, |
| "loss": 0.2163, |
| "step": 1521 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 0.0001245086705202312, |
| "loss": 0.181, |
| "step": 1522 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 0.00012433526011560692, |
| "loss": 0.1716, |
| "step": 1523 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 0.00012416184971098267, |
| "loss": 0.1766, |
| "step": 1524 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 0.00012398843930635836, |
| "loss": 0.1528, |
| "step": 1525 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 0.0001238150289017341, |
| "loss": 0.1977, |
| "step": 1526 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 0.0001236416184971098, |
| "loss": 0.1476, |
| "step": 1527 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 0.00012346820809248555, |
| "loss": 0.1232, |
| "step": 1528 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 0.00012329479768786127, |
| "loss": 0.2041, |
| "step": 1529 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 0.000123121387283237, |
| "loss": 0.183, |
| "step": 1530 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 0.0001229479768786127, |
| "loss": 0.1954, |
| "step": 1531 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 0.00012277456647398843, |
| "loss": 0.1887, |
| "step": 1532 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 0.00012260115606936415, |
| "loss": 0.1225, |
| "step": 1533 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 0.00012242774566473987, |
| "loss": 0.1696, |
| "step": 1534 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 0.0001222543352601156, |
| "loss": 0.1423, |
| "step": 1535 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 0.00012208092485549131, |
| "loss": 0.2061, |
| "step": 1536 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 0.00012190751445086703, |
| "loss": 0.1454, |
| "step": 1537 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 0.00012173410404624277, |
| "loss": 0.1243, |
| "step": 1538 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 0.00012156069364161849, |
| "loss": 0.1679, |
| "step": 1539 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 0.00012138728323699421, |
| "loss": 0.1726, |
| "step": 1540 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 0.00012121387283236993, |
| "loss": 0.1884, |
| "step": 1541 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 0.00012104046242774567, |
| "loss": 0.1513, |
| "step": 1542 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 0.00012086705202312137, |
| "loss": 0.1646, |
| "step": 1543 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 0.0001206936416184971, |
| "loss": 0.163, |
| "step": 1544 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 0.00012052023121387281, |
| "loss": 0.2059, |
| "step": 1545 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 0.00012034682080924855, |
| "loss": 0.1516, |
| "step": 1546 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 0.00012017341040462427, |
| "loss": 0.1409, |
| "step": 1547 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 0.00011999999999999999, |
| "loss": 0.1668, |
| "step": 1548 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 0.00011982658959537571, |
| "loss": 0.2127, |
| "step": 1549 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 0.00011965317919075144, |
| "loss": 0.132, |
| "step": 1550 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 0.00011947976878612715, |
| "loss": 0.1861, |
| "step": 1551 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 0.00011930635838150289, |
| "loss": 0.1282, |
| "step": 1552 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 0.0001191329479768786, |
| "loss": 0.1375, |
| "step": 1553 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 0.00011895953757225433, |
| "loss": 0.1762, |
| "step": 1554 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 0.00011878612716763005, |
| "loss": 0.1831, |
| "step": 1555 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 0.00011861271676300578, |
| "loss": 0.2032, |
| "step": 1556 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 0.00011843930635838149, |
| "loss": 0.1788, |
| "step": 1557 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 0.00011826589595375722, |
| "loss": 0.1373, |
| "step": 1558 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 0.00011809248554913293, |
| "loss": 0.1996, |
| "step": 1559 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 0.00011791907514450866, |
| "loss": 0.1721, |
| "step": 1560 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 0.00011774566473988439, |
| "loss": 0.1604, |
| "step": 1561 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 0.00011757225433526012, |
| "loss": 0.204, |
| "step": 1562 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 0.00011739884393063583, |
| "loss": 0.1565, |
| "step": 1563 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 0.00011722543352601156, |
| "loss": 0.2582, |
| "step": 1564 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 0.00011705202312138727, |
| "loss": 0.145, |
| "step": 1565 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 0.000116878612716763, |
| "loss": 0.1838, |
| "step": 1566 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 0.00011670520231213872, |
| "loss": 0.1598, |
| "step": 1567 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 0.00011653179190751443, |
| "loss": 0.1285, |
| "step": 1568 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 0.00011635838150289016, |
| "loss": 0.1481, |
| "step": 1569 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 0.00011618497109826587, |
| "loss": 0.1971, |
| "step": 1570 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 0.0001160115606936416, |
| "loss": 0.16, |
| "step": 1571 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 0.00011583815028901733, |
| "loss": 0.119, |
| "step": 1572 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 0.00011566473988439306, |
| "loss": 0.1403, |
| "step": 1573 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 0.00011549132947976877, |
| "loss": 0.1325, |
| "step": 1574 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 0.0001153179190751445, |
| "loss": 0.2049, |
| "step": 1575 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 0.00011514450867052021, |
| "loss": 0.1349, |
| "step": 1576 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 0.00011497109826589594, |
| "loss": 0.177, |
| "step": 1577 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 0.00011479768786127166, |
| "loss": 0.1341, |
| "step": 1578 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 0.00011462427745664738, |
| "loss": 0.2149, |
| "step": 1579 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 0.0001144508670520231, |
| "loss": 0.1602, |
| "step": 1580 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 0.00011427745664739884, |
| "loss": 0.1543, |
| "step": 1581 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 0.00011410404624277455, |
| "loss": 0.1955, |
| "step": 1582 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 0.00011393063583815028, |
| "loss": 0.1457, |
| "step": 1583 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 0.00011375722543352599, |
| "loss": 0.1612, |
| "step": 1584 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 0.00011358381502890172, |
| "loss": 0.14, |
| "step": 1585 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 0.00011341040462427744, |
| "loss": 0.1862, |
| "step": 1586 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 0.00011323699421965318, |
| "loss": 0.1929, |
| "step": 1587 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 0.00011306358381502888, |
| "loss": 0.2027, |
| "step": 1588 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 0.00011289017341040462, |
| "loss": 0.192, |
| "step": 1589 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 0.00011271676300578033, |
| "loss": 0.1719, |
| "step": 1590 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 0.00011254335260115606, |
| "loss": 0.1499, |
| "step": 1591 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 0.00011236994219653178, |
| "loss": 0.1966, |
| "step": 1592 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 0.0001121965317919075, |
| "loss": 0.1358, |
| "step": 1593 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 0.00011202312138728322, |
| "loss": 0.154, |
| "step": 1594 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 0.00011184971098265896, |
| "loss": 0.1589, |
| "step": 1595 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 0.00011167630057803466, |
| "loss": 0.1438, |
| "step": 1596 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 0.0001115028901734104, |
| "loss": 0.1447, |
| "step": 1597 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 0.00011132947976878612, |
| "loss": 0.1544, |
| "step": 1598 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 0.00011115606936416184, |
| "loss": 0.1711, |
| "step": 1599 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 0.00011098265895953756, |
| "loss": 0.1228, |
| "step": 1600 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 0.0001108092485549133, |
| "loss": 0.1774, |
| "step": 1601 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 0.000110635838150289, |
| "loss": 0.1767, |
| "step": 1602 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 0.00011046242774566474, |
| "loss": 0.1241, |
| "step": 1603 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 0.00011028901734104044, |
| "loss": 0.1156, |
| "step": 1604 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 0.00011011560693641618, |
| "loss": 0.1443, |
| "step": 1605 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 0.0001099421965317919, |
| "loss": 0.1338, |
| "step": 1606 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 0.00010976878612716762, |
| "loss": 0.1528, |
| "step": 1607 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 0.00010959537572254334, |
| "loss": 0.206, |
| "step": 1608 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 0.00010942196531791907, |
| "loss": 0.1535, |
| "step": 1609 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 0.00010924855491329478, |
| "loss": 0.1599, |
| "step": 1610 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 0.00010907514450867051, |
| "loss": 0.1776, |
| "step": 1611 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 0.00010890173410404623, |
| "loss": 0.216, |
| "step": 1612 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 0.00010872832369942196, |
| "loss": 0.2176, |
| "step": 1613 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 0.00010855491329479768, |
| "loss": 0.2054, |
| "step": 1614 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 0.00010838150289017341, |
| "loss": 0.1925, |
| "step": 1615 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 0.00010820809248554912, |
| "loss": 0.2008, |
| "step": 1616 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 0.00010803468208092485, |
| "loss": 0.1605, |
| "step": 1617 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 0.00010786127167630056, |
| "loss": 0.1692, |
| "step": 1618 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 0.00010768786127167629, |
| "loss": 0.153, |
| "step": 1619 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 0.00010751445086705201, |
| "loss": 0.1447, |
| "step": 1620 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 0.00010734104046242773, |
| "loss": 0.1496, |
| "step": 1621 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 0.00010716763005780346, |
| "loss": 0.1427, |
| "step": 1622 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 0.00010699421965317919, |
| "loss": 0.1399, |
| "step": 1623 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 0.0001068208092485549, |
| "loss": 0.1643, |
| "step": 1624 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 0.00010664739884393063, |
| "loss": 0.1372, |
| "step": 1625 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 0.00010647398843930635, |
| "loss": 0.1735, |
| "step": 1626 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 0.00010630057803468207, |
| "loss": 0.1346, |
| "step": 1627 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 0.00010612716763005779, |
| "loss": 0.1478, |
| "step": 1628 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 0.00010595375722543353, |
| "loss": 0.1135, |
| "step": 1629 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 0.00010578034682080923, |
| "loss": 0.1815, |
| "step": 1630 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 0.00010560693641618497, |
| "loss": 0.1768, |
| "step": 1631 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 0.00010543352601156068, |
| "loss": 0.144, |
| "step": 1632 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 0.00010526011560693641, |
| "loss": 0.1809, |
| "step": 1633 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 0.00010508670520231213, |
| "loss": 0.133, |
| "step": 1634 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 0.00010491329479768786, |
| "loss": 0.2283, |
| "step": 1635 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 0.00010473988439306357, |
| "loss": 0.1784, |
| "step": 1636 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 0.0001045664739884393, |
| "loss": 0.1348, |
| "step": 1637 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 0.00010439306358381501, |
| "loss": 0.156, |
| "step": 1638 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 0.00010421965317919075, |
| "loss": 0.1543, |
| "step": 1639 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 0.00010404624277456647, |
| "loss": 0.1533, |
| "step": 1640 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 0.00010387283236994219, |
| "loss": 0.1464, |
| "step": 1641 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 0.00010369942196531791, |
| "loss": 0.1688, |
| "step": 1642 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 0.00010352601156069364, |
| "loss": 0.208, |
| "step": 1643 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 0.00010335260115606935, |
| "loss": 0.1418, |
| "step": 1644 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 0.00010317919075144509, |
| "loss": 0.1537, |
| "step": 1645 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 0.00010300578034682079, |
| "loss": 0.1438, |
| "step": 1646 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 0.00010283236994219653, |
| "loss": 0.1513, |
| "step": 1647 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 0.00010265895953757225, |
| "loss": 0.1724, |
| "step": 1648 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 0.00010248554913294798, |
| "loss": 0.138, |
| "step": 1649 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 0.00010231213872832369, |
| "loss": 0.163, |
| "step": 1650 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 0.00010213872832369942, |
| "loss": 0.1652, |
| "step": 1651 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 0.00010196531791907513, |
| "loss": 0.1394, |
| "step": 1652 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 0.00010179190751445086, |
| "loss": 0.1468, |
| "step": 1653 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 0.00010161849710982658, |
| "loss": 0.1937, |
| "step": 1654 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 0.0001014450867052023, |
| "loss": 0.1515, |
| "step": 1655 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 0.00010127167630057803, |
| "loss": 0.118, |
| "step": 1656 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 0.00010109826589595376, |
| "loss": 0.1322, |
| "step": 1657 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 0.00010092485549132947, |
| "loss": 0.2358, |
| "step": 1658 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 0.0001007514450867052, |
| "loss": 0.1622, |
| "step": 1659 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 0.00010057803468208092, |
| "loss": 0.1568, |
| "step": 1660 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 0.00010040462427745664, |
| "loss": 0.1878, |
| "step": 1661 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 0.00010023121387283236, |
| "loss": 0.1759, |
| "step": 1662 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 0.0001000578034682081, |
| "loss": 0.1645, |
| "step": 1663 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 9.98843930635838e-05, |
| "loss": 0.2082, |
| "step": 1664 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 9.971098265895953e-05, |
| "loss": 0.2411, |
| "step": 1665 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 9.953757225433525e-05, |
| "loss": 0.1479, |
| "step": 1666 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 9.936416184971097e-05, |
| "loss": 0.1409, |
| "step": 1667 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 9.91907514450867e-05, |
| "loss": 0.1622, |
| "step": 1668 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 9.901734104046241e-05, |
| "loss": 0.1231, |
| "step": 1669 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 9.884393063583814e-05, |
| "loss": 0.1741, |
| "step": 1670 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 9.867052023121385e-05, |
| "loss": 0.1403, |
| "step": 1671 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 9.849710982658958e-05, |
| "loss": 0.1578, |
| "step": 1672 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 9.83236994219653e-05, |
| "loss": 0.1121, |
| "step": 1673 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 9.815028901734104e-05, |
| "loss": 0.153, |
| "step": 1674 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 9.797687861271675e-05, |
| "loss": 0.1868, |
| "step": 1675 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 9.780346820809248e-05, |
| "loss": 0.1246, |
| "step": 1676 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 9.763005780346819e-05, |
| "loss": 0.1514, |
| "step": 1677 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 9.745664739884392e-05, |
| "loss": 0.1693, |
| "step": 1678 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 9.728323699421964e-05, |
| "loss": 0.1596, |
| "step": 1679 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 9.710982658959536e-05, |
| "loss": 0.1452, |
| "step": 1680 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 9.693641618497108e-05, |
| "loss": 0.1416, |
| "step": 1681 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 9.676300578034682e-05, |
| "loss": 0.1543, |
| "step": 1682 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 9.658959537572253e-05, |
| "loss": 0.1621, |
| "step": 1683 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 9.641618497109826e-05, |
| "loss": 0.1325, |
| "step": 1684 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 9.624277456647398e-05, |
| "loss": 0.1154, |
| "step": 1685 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 9.60693641618497e-05, |
| "loss": 0.1519, |
| "step": 1686 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 9.589595375722542e-05, |
| "loss": 0.1167, |
| "step": 1687 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 9.572254335260116e-05, |
| "loss": 0.1214, |
| "step": 1688 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 9.554913294797686e-05, |
| "loss": 0.1248, |
| "step": 1689 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 9.53757225433526e-05, |
| "loss": 0.1069, |
| "step": 1690 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 9.52023121387283e-05, |
| "loss": 0.1852, |
| "step": 1691 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 9.502890173410404e-05, |
| "loss": 0.1947, |
| "step": 1692 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 9.485549132947976e-05, |
| "loss": 0.1323, |
| "step": 1693 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 9.468208092485548e-05, |
| "loss": 0.1219, |
| "step": 1694 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 9.45086705202312e-05, |
| "loss": 0.1638, |
| "step": 1695 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 9.433526011560693e-05, |
| "loss": 0.1515, |
| "step": 1696 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 9.416184971098264e-05, |
| "loss": 0.0967, |
| "step": 1697 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 9.398843930635838e-05, |
| "loss": 0.1253, |
| "step": 1698 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 9.38150289017341e-05, |
| "loss": 0.1614, |
| "step": 1699 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 9.364161849710982e-05, |
| "loss": 0.133, |
| "step": 1700 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 9.346820809248554e-05, |
| "loss": 0.1673, |
| "step": 1701 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 9.329479768786127e-05, |
| "loss": 0.1344, |
| "step": 1702 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 9.312138728323698e-05, |
| "loss": 0.151, |
| "step": 1703 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 9.294797687861271e-05, |
| "loss": 0.1569, |
| "step": 1704 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 9.277456647398842e-05, |
| "loss": 0.1722, |
| "step": 1705 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 9.260115606936415e-05, |
| "loss": 0.1114, |
| "step": 1706 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 9.242774566473988e-05, |
| "loss": 0.1562, |
| "step": 1707 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 9.22543352601156e-05, |
| "loss": 0.108, |
| "step": 1708 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 9.208092485549132e-05, |
| "loss": 0.1579, |
| "step": 1709 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 9.190751445086705e-05, |
| "loss": 0.1275, |
| "step": 1710 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 9.173410404624276e-05, |
| "loss": 0.164, |
| "step": 1711 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 9.156069364161849e-05, |
| "loss": 0.1032, |
| "step": 1712 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 9.138728323699421e-05, |
| "loss": 0.1304, |
| "step": 1713 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 9.121387283236993e-05, |
| "loss": 0.1411, |
| "step": 1714 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 9.104046242774565e-05, |
| "loss": 0.1246, |
| "step": 1715 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 9.086705202312139e-05, |
| "loss": 0.1199, |
| "step": 1716 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 9.06936416184971e-05, |
| "loss": 0.1587, |
| "step": 1717 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 9.052023121387283e-05, |
| "loss": 0.1549, |
| "step": 1718 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 9.034682080924854e-05, |
| "loss": 0.1169, |
| "step": 1719 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 9.017341040462427e-05, |
| "loss": 0.1163, |
| "step": 1720 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 8.999999999999999e-05, |
| "loss": 0.1194, |
| "step": 1721 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 8.982658959537573e-05, |
| "loss": 0.1438, |
| "step": 1722 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 8.965317919075143e-05, |
| "loss": 0.1597, |
| "step": 1723 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 8.947976878612717e-05, |
| "loss": 0.1318, |
| "step": 1724 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 8.930635838150287e-05, |
| "loss": 0.1993, |
| "step": 1725 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 8.913294797687861e-05, |
| "loss": 0.1452, |
| "step": 1726 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 8.895953757225433e-05, |
| "loss": 0.1918, |
| "step": 1727 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 8.878612716763005e-05, |
| "loss": 0.1616, |
| "step": 1728 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 8.861271676300577e-05, |
| "loss": 0.1179, |
| "step": 1729 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 8.84393063583815e-05, |
| "loss": 0.1418, |
| "step": 1730 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 8.826589595375721e-05, |
| "loss": 0.1427, |
| "step": 1731 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 8.809248554913295e-05, |
| "loss": 0.1248, |
| "step": 1732 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 8.791907514450865e-05, |
| "loss": 0.1295, |
| "step": 1733 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 8.774566473988439e-05, |
| "loss": 0.137, |
| "step": 1734 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 8.757225433526011e-05, |
| "loss": 0.1276, |
| "step": 1735 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 8.739884393063584e-05, |
| "loss": 0.1562, |
| "step": 1736 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 8.722543352601155e-05, |
| "loss": 0.1575, |
| "step": 1737 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 8.705202312138728e-05, |
| "loss": 0.1435, |
| "step": 1738 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 8.687861271676299e-05, |
| "loss": 0.1174, |
| "step": 1739 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 8.670520231213873e-05, |
| "loss": 0.1329, |
| "step": 1740 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 8.653179190751445e-05, |
| "loss": 0.1766, |
| "step": 1741 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 8.635838150289017e-05, |
| "loss": 0.1786, |
| "step": 1742 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 8.618497109826589e-05, |
| "loss": 0.1174, |
| "step": 1743 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 8.601156069364162e-05, |
| "loss": 0.1559, |
| "step": 1744 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 8.583815028901733e-05, |
| "loss": 0.1336, |
| "step": 1745 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 8.566473988439306e-05, |
| "loss": 0.1574, |
| "step": 1746 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 8.549132947976878e-05, |
| "loss": 0.1187, |
| "step": 1747 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 8.53179190751445e-05, |
| "loss": 0.1231, |
| "step": 1748 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 8.514450867052023e-05, |
| "loss": 0.1459, |
| "step": 1749 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 8.497109826589596e-05, |
| "loss": 0.1394, |
| "step": 1750 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 8.479768786127167e-05, |
| "loss": 0.1883, |
| "step": 1751 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 8.46242774566474e-05, |
| "loss": 0.132, |
| "step": 1752 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 8.445086705202311e-05, |
| "loss": 0.1308, |
| "step": 1753 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 8.427745664739884e-05, |
| "loss": 0.1356, |
| "step": 1754 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 8.410404624277456e-05, |
| "loss": 0.1436, |
| "step": 1755 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 8.393063583815028e-05, |
| "loss": 0.1147, |
| "step": 1756 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 8.3757225433526e-05, |
| "loss": 0.1269, |
| "step": 1757 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 8.358381502890174e-05, |
| "loss": 0.1558, |
| "step": 1758 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 8.341040462427745e-05, |
| "loss": 0.1215, |
| "step": 1759 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 8.323699421965317e-05, |
| "loss": 0.0895, |
| "step": 1760 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 8.30635838150289e-05, |
| "loss": 0.1421, |
| "step": 1761 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 8.289017341040461e-05, |
| "loss": 0.175, |
| "step": 1762 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 8.271676300578034e-05, |
| "loss": 0.1192, |
| "step": 1763 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 8.254335260115605e-05, |
| "loss": 0.1383, |
| "step": 1764 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 8.236994219653178e-05, |
| "loss": 0.1617, |
| "step": 1765 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 8.21965317919075e-05, |
| "loss": 0.1177, |
| "step": 1766 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 8.202312138728322e-05, |
| "loss": 0.148, |
| "step": 1767 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 8.184971098265895e-05, |
| "loss": 0.1671, |
| "step": 1768 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 8.167630057803468e-05, |
| "loss": 0.0987, |
| "step": 1769 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 8.150289017341039e-05, |
| "loss": 0.122, |
| "step": 1770 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 8.132947976878612e-05, |
| "loss": 0.3126, |
| "step": 1771 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 8.115606936416184e-05, |
| "loss": 0.1462, |
| "step": 1772 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 8.098265895953756e-05, |
| "loss": 0.1206, |
| "step": 1773 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 8.080924855491328e-05, |
| "loss": 0.1572, |
| "step": 1774 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 8.063583815028902e-05, |
| "loss": 0.1255, |
| "step": 1775 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 8.046242774566472e-05, |
| "loss": 0.1281, |
| "step": 1776 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 8.028901734104046e-05, |
| "loss": 0.142, |
| "step": 1777 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 8.011560693641617e-05, |
| "loss": 0.1518, |
| "step": 1778 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 7.99421965317919e-05, |
| "loss": 0.1478, |
| "step": 1779 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 7.976878612716762e-05, |
| "loss": 0.1355, |
| "step": 1780 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 7.959537572254334e-05, |
| "loss": 0.1491, |
| "step": 1781 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 7.942196531791906e-05, |
| "loss": 0.1098, |
| "step": 1782 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 7.92485549132948e-05, |
| "loss": 0.124, |
| "step": 1783 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 7.90751445086705e-05, |
| "loss": 0.1309, |
| "step": 1784 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 7.890173410404624e-05, |
| "loss": 0.0982, |
| "step": 1785 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 7.872832369942196e-05, |
| "loss": 0.1216, |
| "step": 1786 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 7.855491329479768e-05, |
| "loss": 0.0917, |
| "step": 1787 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 7.83815028901734e-05, |
| "loss": 0.0959, |
| "step": 1788 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 7.820809248554913e-05, |
| "loss": 0.0865, |
| "step": 1789 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 7.803468208092484e-05, |
| "loss": 0.0964, |
| "step": 1790 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 7.786127167630058e-05, |
| "loss": 0.0859, |
| "step": 1791 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 7.768786127167628e-05, |
| "loss": 0.0801, |
| "step": 1792 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 7.751445086705202e-05, |
| "loss": 0.0803, |
| "step": 1793 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 7.734104046242774e-05, |
| "loss": 0.1071, |
| "step": 1794 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 7.716763005780346e-05, |
| "loss": 0.0979, |
| "step": 1795 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 7.699421965317918e-05, |
| "loss": 0.0981, |
| "step": 1796 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 7.682080924855491e-05, |
| "loss": 0.0873, |
| "step": 1797 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 7.664739884393062e-05, |
| "loss": 0.2591, |
| "step": 1798 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 7.647398843930635e-05, |
| "loss": 0.0941, |
| "step": 1799 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 7.630057803468207e-05, |
| "loss": 0.0807, |
| "step": 1800 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 7.61271676300578e-05, |
| "loss": 0.1066, |
| "step": 1801 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 7.595375722543352e-05, |
| "loss": 0.0583, |
| "step": 1802 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 7.578034682080925e-05, |
| "loss": 0.0889, |
| "step": 1803 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 7.560693641618496e-05, |
| "loss": 0.1116, |
| "step": 1804 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 7.543352601156069e-05, |
| "loss": 0.0883, |
| "step": 1805 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 7.52601156069364e-05, |
| "loss": 0.1055, |
| "step": 1806 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 7.508670520231213e-05, |
| "loss": 0.1226, |
| "step": 1807 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 7.491329479768785e-05, |
| "loss": 0.1014, |
| "step": 1808 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 7.473988439306357e-05, |
| "loss": 0.0666, |
| "step": 1809 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 7.45664739884393e-05, |
| "loss": 0.0956, |
| "step": 1810 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 7.439306358381502e-05, |
| "loss": 0.1139, |
| "step": 1811 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 7.421965317919074e-05, |
| "loss": 0.0729, |
| "step": 1812 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 7.404624277456646e-05, |
| "loss": 0.0888, |
| "step": 1813 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 7.387283236994219e-05, |
| "loss": 0.0972, |
| "step": 1814 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 7.369942196531791e-05, |
| "loss": 0.1059, |
| "step": 1815 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 7.352601156069363e-05, |
| "loss": 0.1385, |
| "step": 1816 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 7.335260115606935e-05, |
| "loss": 0.0712, |
| "step": 1817 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 7.317919075144507e-05, |
| "loss": 0.0821, |
| "step": 1818 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 7.30057803468208e-05, |
| "loss": 0.2666, |
| "step": 1819 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 7.283236994219653e-05, |
| "loss": 0.0846, |
| "step": 1820 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 7.265895953757225e-05, |
| "loss": 0.0695, |
| "step": 1821 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 7.248554913294797e-05, |
| "loss": 0.0771, |
| "step": 1822 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 7.231213872832369e-05, |
| "loss": 0.0943, |
| "step": 1823 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 7.213872832369941e-05, |
| "loss": 0.0891, |
| "step": 1824 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 7.196531791907513e-05, |
| "loss": 0.0667, |
| "step": 1825 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 7.179190751445085e-05, |
| "loss": 0.0962, |
| "step": 1826 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 7.161849710982659e-05, |
| "loss": 0.0968, |
| "step": 1827 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 7.144508670520231e-05, |
| "loss": 0.0893, |
| "step": 1828 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 7.127167630057803e-05, |
| "loss": 0.0961, |
| "step": 1829 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 7.109826589595375e-05, |
| "loss": 0.1052, |
| "step": 1830 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 7.092485549132947e-05, |
| "loss": 0.0931, |
| "step": 1831 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 7.075144508670519e-05, |
| "loss": 0.0923, |
| "step": 1832 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 7.057803468208091e-05, |
| "loss": 0.1023, |
| "step": 1833 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 7.040462427745665e-05, |
| "loss": 0.0849, |
| "step": 1834 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 7.023121387283237e-05, |
| "loss": 0.1163, |
| "step": 1835 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 7.005780346820809e-05, |
| "loss": 0.0883, |
| "step": 1836 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 6.988439306358381e-05, |
| "loss": 0.0769, |
| "step": 1837 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 6.971098265895953e-05, |
| "loss": 0.115, |
| "step": 1838 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 6.953757225433525e-05, |
| "loss": 0.0903, |
| "step": 1839 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 6.936416184971097e-05, |
| "loss": 0.0673, |
| "step": 1840 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 6.91907514450867e-05, |
| "loss": 0.08, |
| "step": 1841 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 6.901734104046242e-05, |
| "loss": 0.1066, |
| "step": 1842 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 6.884393063583815e-05, |
| "loss": 0.0958, |
| "step": 1843 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 6.867052023121387e-05, |
| "loss": 0.1081, |
| "step": 1844 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 6.849710982658959e-05, |
| "loss": 0.117, |
| "step": 1845 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 6.832369942196531e-05, |
| "loss": 0.0961, |
| "step": 1846 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 6.815028901734103e-05, |
| "loss": 0.0801, |
| "step": 1847 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 6.797687861271676e-05, |
| "loss": 0.0996, |
| "step": 1848 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 6.780346820809248e-05, |
| "loss": 0.1191, |
| "step": 1849 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 6.76300578034682e-05, |
| "loss": 0.0823, |
| "step": 1850 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 6.745664739884392e-05, |
| "loss": 0.1133, |
| "step": 1851 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 6.728323699421964e-05, |
| "loss": 0.0762, |
| "step": 1852 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 6.710982658959537e-05, |
| "loss": 0.0897, |
| "step": 1853 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 6.693641618497109e-05, |
| "loss": 0.1158, |
| "step": 1854 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 6.676300578034682e-05, |
| "loss": 0.0903, |
| "step": 1855 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 6.658959537572254e-05, |
| "loss": 0.1022, |
| "step": 1856 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 6.641618497109826e-05, |
| "loss": 0.0955, |
| "step": 1857 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 6.624277456647398e-05, |
| "loss": 0.0839, |
| "step": 1858 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 6.60693641618497e-05, |
| "loss": 0.1227, |
| "step": 1859 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 6.589595375722542e-05, |
| "loss": 0.1244, |
| "step": 1860 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 6.572254335260114e-05, |
| "loss": 0.0772, |
| "step": 1861 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 6.554913294797688e-05, |
| "loss": 0.0653, |
| "step": 1862 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 6.53757225433526e-05, |
| "loss": 0.0966, |
| "step": 1863 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 6.520231213872832e-05, |
| "loss": 0.1069, |
| "step": 1864 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 6.502890173410404e-05, |
| "loss": 0.0962, |
| "step": 1865 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 6.485549132947976e-05, |
| "loss": 0.1655, |
| "step": 1866 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 6.468208092485548e-05, |
| "loss": 0.1218, |
| "step": 1867 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 6.45086705202312e-05, |
| "loss": 0.0774, |
| "step": 1868 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 6.433526011560694e-05, |
| "loss": 0.0858, |
| "step": 1869 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 6.416184971098266e-05, |
| "loss": 0.0925, |
| "step": 1870 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 6.398843930635838e-05, |
| "loss": 0.0844, |
| "step": 1871 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 6.38150289017341e-05, |
| "loss": 0.1023, |
| "step": 1872 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 6.364161849710982e-05, |
| "loss": 0.0882, |
| "step": 1873 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 6.346820809248554e-05, |
| "loss": 0.0706, |
| "step": 1874 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 6.329479768786126e-05, |
| "loss": 0.0809, |
| "step": 1875 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 6.3121387283237e-05, |
| "loss": 0.1, |
| "step": 1876 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 6.294797687861272e-05, |
| "loss": 0.0893, |
| "step": 1877 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 6.277456647398844e-05, |
| "loss": 0.0745, |
| "step": 1878 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 6.260115606936416e-05, |
| "loss": 0.088, |
| "step": 1879 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 6.242774566473988e-05, |
| "loss": 0.1111, |
| "step": 1880 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 6.22543352601156e-05, |
| "loss": 0.0581, |
| "step": 1881 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 6.208092485549133e-05, |
| "loss": 0.0726, |
| "step": 1882 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 6.190751445086705e-05, |
| "loss": 0.0644, |
| "step": 1883 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 6.173410404624277e-05, |
| "loss": 0.1387, |
| "step": 1884 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 6.15606936416185e-05, |
| "loss": 0.1132, |
| "step": 1885 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 6.138728323699422e-05, |
| "loss": 0.0643, |
| "step": 1886 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 6.121387283236994e-05, |
| "loss": 0.1063, |
| "step": 1887 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 6.104046242774566e-05, |
| "loss": 0.0921, |
| "step": 1888 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 6.0867052023121385e-05, |
| "loss": 0.0744, |
| "step": 1889 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 6.0693641618497105e-05, |
| "loss": 0.0676, |
| "step": 1890 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 6.052023121387283e-05, |
| "loss": 0.092, |
| "step": 1891 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 6.034682080924855e-05, |
| "loss": 0.0908, |
| "step": 1892 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 6.0173410404624274e-05, |
| "loss": 0.0844, |
| "step": 1893 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 5.9999999999999995e-05, |
| "loss": 0.0992, |
| "step": 1894 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 5.982658959537572e-05, |
| "loss": 0.2986, |
| "step": 1895 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 5.965317919075144e-05, |
| "loss": 0.0784, |
| "step": 1896 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 5.9479768786127164e-05, |
| "loss": 0.0835, |
| "step": 1897 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 5.930635838150289e-05, |
| "loss": 0.0896, |
| "step": 1898 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 5.913294797687861e-05, |
| "loss": 0.0864, |
| "step": 1899 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 5.895953757225433e-05, |
| "loss": 0.0807, |
| "step": 1900 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 5.878612716763006e-05, |
| "loss": 0.113, |
| "step": 1901 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 5.861271676300578e-05, |
| "loss": 0.0791, |
| "step": 1902 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 5.84393063583815e-05, |
| "loss": 0.1119, |
| "step": 1903 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 5.8265895953757215e-05, |
| "loss": 0.0941, |
| "step": 1904 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 5.8092485549132936e-05, |
| "loss": 0.0866, |
| "step": 1905 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 5.791907514450866e-05, |
| "loss": 0.0874, |
| "step": 1906 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 5.7745664739884384e-05, |
| "loss": 0.1118, |
| "step": 1907 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 5.7572254335260105e-05, |
| "loss": 0.1185, |
| "step": 1908 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 5.739884393063583e-05, |
| "loss": 0.0736, |
| "step": 1909 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 5.722543352601155e-05, |
| "loss": 0.2191, |
| "step": 1910 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 5.705202312138727e-05, |
| "loss": 0.097, |
| "step": 1911 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 5.6878612716762994e-05, |
| "loss": 0.0946, |
| "step": 1912 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 5.670520231213872e-05, |
| "loss": 0.0992, |
| "step": 1913 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 5.653179190751444e-05, |
| "loss": 0.095, |
| "step": 1914 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 5.635838150289016e-05, |
| "loss": 0.0838, |
| "step": 1915 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 5.618497109826589e-05, |
| "loss": 0.0887, |
| "step": 1916 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 5.601156069364161e-05, |
| "loss": 0.0876, |
| "step": 1917 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 5.583815028901733e-05, |
| "loss": 0.0955, |
| "step": 1918 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 5.566473988439306e-05, |
| "loss": 0.1013, |
| "step": 1919 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 5.549132947976878e-05, |
| "loss": 0.0903, |
| "step": 1920 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 5.53179190751445e-05, |
| "loss": 0.0788, |
| "step": 1921 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 5.514450867052022e-05, |
| "loss": 0.1073, |
| "step": 1922 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 5.497109826589595e-05, |
| "loss": 0.0729, |
| "step": 1923 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 5.479768786127167e-05, |
| "loss": 0.0862, |
| "step": 1924 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 5.462427745664739e-05, |
| "loss": 0.0748, |
| "step": 1925 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 5.445086705202312e-05, |
| "loss": 0.0993, |
| "step": 1926 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 5.427745664739884e-05, |
| "loss": 0.1051, |
| "step": 1927 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 5.410404624277456e-05, |
| "loss": 0.1236, |
| "step": 1928 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 5.393063583815028e-05, |
| "loss": 0.1253, |
| "step": 1929 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 5.375722543352601e-05, |
| "loss": 0.091, |
| "step": 1930 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 5.358381502890173e-05, |
| "loss": 0.0842, |
| "step": 1931 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 5.341040462427745e-05, |
| "loss": 0.0822, |
| "step": 1932 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 5.3236994219653176e-05, |
| "loss": 0.0972, |
| "step": 1933 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 5.3063583815028896e-05, |
| "loss": 0.0922, |
| "step": 1934 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 5.289017341040462e-05, |
| "loss": 0.1471, |
| "step": 1935 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 5.271676300578034e-05, |
| "loss": 0.0732, |
| "step": 1936 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 5.2543352601156065e-05, |
| "loss": 0.0856, |
| "step": 1937 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 5.2369942196531786e-05, |
| "loss": 0.0998, |
| "step": 1938 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 5.2196531791907507e-05, |
| "loss": 0.079, |
| "step": 1939 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 5.2023121387283234e-05, |
| "loss": 0.0804, |
| "step": 1940 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 5.1849710982658955e-05, |
| "loss": 0.0793, |
| "step": 1941 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 5.1676300578034675e-05, |
| "loss": 0.0853, |
| "step": 1942 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 5.1502890173410396e-05, |
| "loss": 0.0558, |
| "step": 1943 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 5.1329479768786124e-05, |
| "loss": 0.0735, |
| "step": 1944 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 5.1156069364161844e-05, |
| "loss": 0.0744, |
| "step": 1945 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 5.0982658959537565e-05, |
| "loss": 0.0722, |
| "step": 1946 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 5.080924855491329e-05, |
| "loss": 0.1048, |
| "step": 1947 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 5.063583815028901e-05, |
| "loss": 0.07, |
| "step": 1948 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 5.0462427745664734e-05, |
| "loss": 0.0965, |
| "step": 1949 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 5.028901734104046e-05, |
| "loss": 0.1039, |
| "step": 1950 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 5.011560693641618e-05, |
| "loss": 0.105, |
| "step": 1951 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 4.99421965317919e-05, |
| "loss": 0.0895, |
| "step": 1952 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 4.976878612716762e-05, |
| "loss": 0.0865, |
| "step": 1953 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 4.959537572254335e-05, |
| "loss": 0.0963, |
| "step": 1954 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 4.942196531791907e-05, |
| "loss": 0.1016, |
| "step": 1955 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 4.924855491329479e-05, |
| "loss": 0.0622, |
| "step": 1956 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 4.907514450867052e-05, |
| "loss": 0.0774, |
| "step": 1957 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 4.890173410404624e-05, |
| "loss": 0.077, |
| "step": 1958 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 4.872832369942196e-05, |
| "loss": 0.0828, |
| "step": 1959 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 4.855491329479768e-05, |
| "loss": 0.0972, |
| "step": 1960 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 4.838150289017341e-05, |
| "loss": 0.0883, |
| "step": 1961 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 4.820809248554913e-05, |
| "loss": 0.1118, |
| "step": 1962 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 4.803468208092485e-05, |
| "loss": 0.1209, |
| "step": 1963 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 4.786127167630058e-05, |
| "loss": 0.0744, |
| "step": 1964 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 4.76878612716763e-05, |
| "loss": 0.0602, |
| "step": 1965 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 4.751445086705202e-05, |
| "loss": 0.1033, |
| "step": 1966 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 4.734104046242774e-05, |
| "loss": 0.0828, |
| "step": 1967 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 4.716763005780347e-05, |
| "loss": 0.0767, |
| "step": 1968 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 4.699421965317919e-05, |
| "loss": 0.0837, |
| "step": 1969 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 4.682080924855491e-05, |
| "loss": 0.0779, |
| "step": 1970 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 4.6647398843930636e-05, |
| "loss": 0.0932, |
| "step": 1971 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 4.647398843930636e-05, |
| "loss": 0.086, |
| "step": 1972 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 4.630057803468208e-05, |
| "loss": 0.0654, |
| "step": 1973 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 4.61271676300578e-05, |
| "loss": 0.0925, |
| "step": 1974 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 4.5953757225433526e-05, |
| "loss": 0.1078, |
| "step": 1975 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 4.5780346820809246e-05, |
| "loss": 0.0916, |
| "step": 1976 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 4.560693641618497e-05, |
| "loss": 0.0907, |
| "step": 1977 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 4.5433526011560694e-05, |
| "loss": 0.0946, |
| "step": 1978 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 4.5260115606936415e-05, |
| "loss": 0.0999, |
| "step": 1979 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 4.5086705202312136e-05, |
| "loss": 0.0651, |
| "step": 1980 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 4.491329479768786e-05, |
| "loss": 0.0967, |
| "step": 1981 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 4.4739884393063584e-05, |
| "loss": 0.0768, |
| "step": 1982 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 4.4566473988439305e-05, |
| "loss": 0.0853, |
| "step": 1983 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 4.4393063583815025e-05, |
| "loss": 0.1044, |
| "step": 1984 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 4.421965317919075e-05, |
| "loss": 0.1131, |
| "step": 1985 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 4.404624277456647e-05, |
| "loss": 0.1116, |
| "step": 1986 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 4.3872832369942194e-05, |
| "loss": 0.0568, |
| "step": 1987 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 4.369942196531792e-05, |
| "loss": 0.0725, |
| "step": 1988 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 4.352601156069364e-05, |
| "loss": 0.0715, |
| "step": 1989 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 4.335260115606936e-05, |
| "loss": 0.0784, |
| "step": 1990 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 4.3179190751445084e-05, |
| "loss": 0.0873, |
| "step": 1991 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 4.300578034682081e-05, |
| "loss": 0.0852, |
| "step": 1992 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 4.283236994219653e-05, |
| "loss": 0.0966, |
| "step": 1993 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 4.265895953757225e-05, |
| "loss": 0.1078, |
| "step": 1994 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 4.248554913294798e-05, |
| "loss": 0.0795, |
| "step": 1995 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 4.23121387283237e-05, |
| "loss": 0.0991, |
| "step": 1996 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 4.213872832369942e-05, |
| "loss": 0.0851, |
| "step": 1997 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 4.196531791907514e-05, |
| "loss": 0.1034, |
| "step": 1998 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 4.179190751445087e-05, |
| "loss": 0.1271, |
| "step": 1999 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 4.161849710982658e-05, |
| "loss": 0.0854, |
| "step": 2000 |
| }, |
| { |
| "epoch": 4.48, |
| "eval_loss": 0.3241537809371948, |
| "eval_runtime": 118.5981, |
| "eval_samples_per_second": 22.277, |
| "eval_steps_per_second": 0.7, |
| "eval_wer": 0.09617215390717969, |
| "step": 2000 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 4.1445086705202304e-05, |
| "loss": 0.0686, |
| "step": 2001 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 4.1271676300578025e-05, |
| "loss": 0.0767, |
| "step": 2002 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 4.109826589595375e-05, |
| "loss": 0.0671, |
| "step": 2003 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 4.092485549132947e-05, |
| "loss": 0.0795, |
| "step": 2004 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 4.075144508670519e-05, |
| "loss": 0.0778, |
| "step": 2005 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 4.057803468208092e-05, |
| "loss": 0.0848, |
| "step": 2006 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 4.040462427745664e-05, |
| "loss": 0.0779, |
| "step": 2007 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 4.023121387283236e-05, |
| "loss": 0.0919, |
| "step": 2008 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 4.005780346820808e-05, |
| "loss": 0.0797, |
| "step": 2009 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 3.988439306358381e-05, |
| "loss": 0.1024, |
| "step": 2010 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 3.971098265895953e-05, |
| "loss": 0.0783, |
| "step": 2011 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 3.953757225433525e-05, |
| "loss": 0.121, |
| "step": 2012 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 3.936416184971098e-05, |
| "loss": 0.0923, |
| "step": 2013 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 3.91907514450867e-05, |
| "loss": 0.0894, |
| "step": 2014 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 3.901734104046242e-05, |
| "loss": 0.0853, |
| "step": 2015 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 3.884393063583814e-05, |
| "loss": 0.0817, |
| "step": 2016 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 3.867052023121387e-05, |
| "loss": 0.0807, |
| "step": 2017 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 3.849710982658959e-05, |
| "loss": 0.0937, |
| "step": 2018 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 3.832369942196531e-05, |
| "loss": 0.0616, |
| "step": 2019 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 3.815028901734104e-05, |
| "loss": 0.1103, |
| "step": 2020 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 3.797687861271676e-05, |
| "loss": 0.0692, |
| "step": 2021 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 3.780346820809248e-05, |
| "loss": 0.0711, |
| "step": 2022 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 3.76300578034682e-05, |
| "loss": 0.0831, |
| "step": 2023 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 3.745664739884393e-05, |
| "loss": 0.0595, |
| "step": 2024 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 3.728323699421965e-05, |
| "loss": 0.1068, |
| "step": 2025 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 3.710982658959537e-05, |
| "loss": 0.1231, |
| "step": 2026 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 3.6936416184971096e-05, |
| "loss": 0.0825, |
| "step": 2027 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 3.6763005780346816e-05, |
| "loss": 0.0655, |
| "step": 2028 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 3.658959537572254e-05, |
| "loss": 0.0783, |
| "step": 2029 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 3.6416184971098265e-05, |
| "loss": 0.0802, |
| "step": 2030 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 3.6242774566473985e-05, |
| "loss": 0.0894, |
| "step": 2031 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 3.6069364161849706e-05, |
| "loss": 0.1315, |
| "step": 2032 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 3.5895953757225427e-05, |
| "loss": 0.0817, |
| "step": 2033 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 3.5722543352601154e-05, |
| "loss": 0.0775, |
| "step": 2034 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 3.5549132947976875e-05, |
| "loss": 0.0617, |
| "step": 2035 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 3.5375722543352595e-05, |
| "loss": 0.0888, |
| "step": 2036 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 3.520231213872832e-05, |
| "loss": 0.0773, |
| "step": 2037 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 3.5028901734104043e-05, |
| "loss": 0.0805, |
| "step": 2038 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 3.4855491329479764e-05, |
| "loss": 0.0825, |
| "step": 2039 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 3.4682080924855485e-05, |
| "loss": 0.1024, |
| "step": 2040 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 3.450867052023121e-05, |
| "loss": 0.0904, |
| "step": 2041 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 3.433526011560693e-05, |
| "loss": 0.0792, |
| "step": 2042 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 3.4161849710982654e-05, |
| "loss": 0.0815, |
| "step": 2043 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 3.398843930635838e-05, |
| "loss": 0.1538, |
| "step": 2044 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 3.38150289017341e-05, |
| "loss": 0.1054, |
| "step": 2045 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 3.364161849710982e-05, |
| "loss": 0.1048, |
| "step": 2046 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 3.346820809248554e-05, |
| "loss": 0.0601, |
| "step": 2047 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 3.329479768786127e-05, |
| "loss": 0.0902, |
| "step": 2048 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 3.312138728323699e-05, |
| "loss": 0.0643, |
| "step": 2049 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 3.294797687861271e-05, |
| "loss": 0.0758, |
| "step": 2050 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 3.277456647398844e-05, |
| "loss": 0.0995, |
| "step": 2051 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 3.260115606936416e-05, |
| "loss": 0.0782, |
| "step": 2052 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 3.242774566473988e-05, |
| "loss": 0.0938, |
| "step": 2053 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 3.22543352601156e-05, |
| "loss": 0.0799, |
| "step": 2054 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 3.208092485549133e-05, |
| "loss": 0.0786, |
| "step": 2055 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 3.190751445086705e-05, |
| "loss": 0.0871, |
| "step": 2056 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 3.173410404624277e-05, |
| "loss": 0.0861, |
| "step": 2057 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 3.15606936416185e-05, |
| "loss": 0.0865, |
| "step": 2058 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 3.138728323699422e-05, |
| "loss": 0.0937, |
| "step": 2059 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 3.121387283236994e-05, |
| "loss": 0.079, |
| "step": 2060 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 3.1040462427745667e-05, |
| "loss": 0.0828, |
| "step": 2061 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 3.086705202312139e-05, |
| "loss": 0.069, |
| "step": 2062 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 3.069364161849711e-05, |
| "loss": 0.0711, |
| "step": 2063 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 3.052023121387283e-05, |
| "loss": 0.0809, |
| "step": 2064 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 3.0346820809248553e-05, |
| "loss": 0.0918, |
| "step": 2065 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 3.0173410404624277e-05, |
| "loss": 0.0872, |
| "step": 2066 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 2.9999999999999997e-05, |
| "loss": 0.137, |
| "step": 2067 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 2.982658959537572e-05, |
| "loss": 0.0927, |
| "step": 2068 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 2.9653179190751446e-05, |
| "loss": 0.0503, |
| "step": 2069 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 2.9479768786127166e-05, |
| "loss": 0.0724, |
| "step": 2070 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 2.930635838150289e-05, |
| "loss": 0.0876, |
| "step": 2071 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 2.9132947976878608e-05, |
| "loss": 0.0625, |
| "step": 2072 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 2.895953757225433e-05, |
| "loss": 0.1153, |
| "step": 2073 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 2.8786127167630052e-05, |
| "loss": 0.1029, |
| "step": 2074 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 2.8612716763005776e-05, |
| "loss": 0.0751, |
| "step": 2075 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 2.8439306358381497e-05, |
| "loss": 0.095, |
| "step": 2076 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 2.826589595375722e-05, |
| "loss": 0.0992, |
| "step": 2077 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 2.8092485549132945e-05, |
| "loss": 0.0865, |
| "step": 2078 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 2.7919075144508666e-05, |
| "loss": 0.0952, |
| "step": 2079 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 2.774566473988439e-05, |
| "loss": 0.0839, |
| "step": 2080 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 2.757225433526011e-05, |
| "loss": 0.1188, |
| "step": 2081 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 2.7398843930635835e-05, |
| "loss": 0.0662, |
| "step": 2082 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 2.722543352601156e-05, |
| "loss": 0.0712, |
| "step": 2083 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 2.705202312138728e-05, |
| "loss": 0.0783, |
| "step": 2084 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 2.6878612716763003e-05, |
| "loss": 0.0807, |
| "step": 2085 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 2.6705202312138724e-05, |
| "loss": 0.0794, |
| "step": 2086 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 2.6531791907514448e-05, |
| "loss": 0.0884, |
| "step": 2087 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 2.635838150289017e-05, |
| "loss": 0.0744, |
| "step": 2088 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 2.6184971098265893e-05, |
| "loss": 0.0884, |
| "step": 2089 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 2.6011560693641617e-05, |
| "loss": 0.086, |
| "step": 2090 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 2.5838150289017338e-05, |
| "loss": 0.072, |
| "step": 2091 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 2.5664739884393062e-05, |
| "loss": 0.0781, |
| "step": 2092 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 2.5491329479768782e-05, |
| "loss": 0.0783, |
| "step": 2093 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 2.5317919075144507e-05, |
| "loss": 0.0863, |
| "step": 2094 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 2.514450867052023e-05, |
| "loss": 0.0981, |
| "step": 2095 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 2.497109826589595e-05, |
| "loss": 0.0823, |
| "step": 2096 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 2.4797687861271675e-05, |
| "loss": 0.0782, |
| "step": 2097 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 2.4624277456647396e-05, |
| "loss": 0.0872, |
| "step": 2098 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 2.445086705202312e-05, |
| "loss": 0.1038, |
| "step": 2099 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 2.427745664739884e-05, |
| "loss": 0.0779, |
| "step": 2100 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 2.4104046242774565e-05, |
| "loss": 0.0934, |
| "step": 2101 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 2.393063583815029e-05, |
| "loss": 0.0844, |
| "step": 2102 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 2.375722543352601e-05, |
| "loss": 0.0749, |
| "step": 2103 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 2.3583815028901734e-05, |
| "loss": 0.0885, |
| "step": 2104 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 2.3410404624277454e-05, |
| "loss": 0.0809, |
| "step": 2105 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 2.323699421965318e-05, |
| "loss": 0.0717, |
| "step": 2106 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 2.30635838150289e-05, |
| "loss": 0.0812, |
| "step": 2107 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 2.2890173410404623e-05, |
| "loss": 0.1016, |
| "step": 2108 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 2.2716763005780347e-05, |
| "loss": 0.0852, |
| "step": 2109 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 2.2543352601156068e-05, |
| "loss": 0.0918, |
| "step": 2110 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 2.2369942196531792e-05, |
| "loss": 0.0618, |
| "step": 2111 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 2.2196531791907513e-05, |
| "loss": 0.0806, |
| "step": 2112 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 2.2023121387283237e-05, |
| "loss": 0.1146, |
| "step": 2113 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 2.184971098265896e-05, |
| "loss": 0.0722, |
| "step": 2114 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 2.167630057803468e-05, |
| "loss": 0.0876, |
| "step": 2115 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 2.1502890173410405e-05, |
| "loss": 0.0946, |
| "step": 2116 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 2.1329479768786126e-05, |
| "loss": 0.0994, |
| "step": 2117 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 2.115606936416185e-05, |
| "loss": 0.0972, |
| "step": 2118 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 2.098265895953757e-05, |
| "loss": 0.0764, |
| "step": 2119 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 2.080924855491329e-05, |
| "loss": 0.0783, |
| "step": 2120 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 2.0635838150289012e-05, |
| "loss": 0.0645, |
| "step": 2121 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 2.0462427745664736e-05, |
| "loss": 0.0881, |
| "step": 2122 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 2.028901734104046e-05, |
| "loss": 0.0993, |
| "step": 2123 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 2.011560693641618e-05, |
| "loss": 0.0808, |
| "step": 2124 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 1.9942196531791905e-05, |
| "loss": 0.0626, |
| "step": 2125 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 1.9768786127167626e-05, |
| "loss": 0.0951, |
| "step": 2126 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 1.959537572254335e-05, |
| "loss": 0.0612, |
| "step": 2127 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 1.942196531791907e-05, |
| "loss": 0.0624, |
| "step": 2128 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 1.9248554913294795e-05, |
| "loss": 0.0874, |
| "step": 2129 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 1.907514450867052e-05, |
| "loss": 0.068, |
| "step": 2130 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 1.890173410404624e-05, |
| "loss": 0.1055, |
| "step": 2131 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 1.8728323699421963e-05, |
| "loss": 0.0707, |
| "step": 2132 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 1.8554913294797684e-05, |
| "loss": 0.0969, |
| "step": 2133 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 1.8381502890173408e-05, |
| "loss": 0.0499, |
| "step": 2134 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 1.8208092485549132e-05, |
| "loss": 0.1058, |
| "step": 2135 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 1.8034682080924853e-05, |
| "loss": 0.0862, |
| "step": 2136 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 1.7861271676300577e-05, |
| "loss": 0.0658, |
| "step": 2137 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 1.7687861271676298e-05, |
| "loss": 0.0766, |
| "step": 2138 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 1.7514450867052022e-05, |
| "loss": 0.1162, |
| "step": 2139 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 1.7341040462427742e-05, |
| "loss": 0.0988, |
| "step": 2140 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 1.7167630057803466e-05, |
| "loss": 0.0975, |
| "step": 2141 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 1.699421965317919e-05, |
| "loss": 0.1651, |
| "step": 2142 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 1.682080924855491e-05, |
| "loss": 0.105, |
| "step": 2143 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 1.6647398843930635e-05, |
| "loss": 0.0957, |
| "step": 2144 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 1.6473988439306356e-05, |
| "loss": 0.0726, |
| "step": 2145 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 1.630057803468208e-05, |
| "loss": 0.0528, |
| "step": 2146 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 1.61271676300578e-05, |
| "loss": 0.0666, |
| "step": 2147 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 1.5953757225433525e-05, |
| "loss": 0.0781, |
| "step": 2148 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 1.578034682080925e-05, |
| "loss": 0.0557, |
| "step": 2149 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 1.560693641618497e-05, |
| "loss": 0.0985, |
| "step": 2150 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 1.5433526011560694e-05, |
| "loss": 0.0905, |
| "step": 2151 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 1.5260115606936414e-05, |
| "loss": 0.0789, |
| "step": 2152 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 1.5086705202312138e-05, |
| "loss": 0.0936, |
| "step": 2153 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 1.491329479768786e-05, |
| "loss": 0.1076, |
| "step": 2154 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 1.4739884393063583e-05, |
| "loss": 0.0775, |
| "step": 2155 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 1.4566473988439304e-05, |
| "loss": 0.0695, |
| "step": 2156 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 1.4393063583815026e-05, |
| "loss": 0.1108, |
| "step": 2157 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 1.4219653179190749e-05, |
| "loss": 0.0746, |
| "step": 2158 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 1.4046242774566473e-05, |
| "loss": 0.0869, |
| "step": 2159 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 1.3872832369942195e-05, |
| "loss": 0.0947, |
| "step": 2160 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 1.3699421965317917e-05, |
| "loss": 0.076, |
| "step": 2161 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 1.352601156069364e-05, |
| "loss": 0.1075, |
| "step": 2162 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 1.3352601156069362e-05, |
| "loss": 0.08, |
| "step": 2163 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 1.3179190751445084e-05, |
| "loss": 0.0527, |
| "step": 2164 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 1.3005780346820809e-05, |
| "loss": 0.0526, |
| "step": 2165 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 1.2832369942196531e-05, |
| "loss": 0.0812, |
| "step": 2166 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 1.2658959537572253e-05, |
| "loss": 0.0943, |
| "step": 2167 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 1.2485549132947976e-05, |
| "loss": 0.0511, |
| "step": 2168 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 1.2312138728323698e-05, |
| "loss": 0.0936, |
| "step": 2169 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 1.213872832369942e-05, |
| "loss": 0.0876, |
| "step": 2170 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 1.1965317919075144e-05, |
| "loss": 0.0957, |
| "step": 2171 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 1.1791907514450867e-05, |
| "loss": 0.0711, |
| "step": 2172 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 1.161849710982659e-05, |
| "loss": 0.0782, |
| "step": 2173 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 1.1445086705202312e-05, |
| "loss": 0.0779, |
| "step": 2174 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 1.1271676300578034e-05, |
| "loss": 0.0751, |
| "step": 2175 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 1.1098265895953756e-05, |
| "loss": 0.0838, |
| "step": 2176 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 1.092485549132948e-05, |
| "loss": 0.088, |
| "step": 2177 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 1.0751445086705203e-05, |
| "loss": 0.0682, |
| "step": 2178 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 1.0578034682080925e-05, |
| "loss": 0.0926, |
| "step": 2179 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 1.0404624277456646e-05, |
| "loss": 0.0709, |
| "step": 2180 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 1.0231213872832368e-05, |
| "loss": 0.0636, |
| "step": 2181 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 1.005780346820809e-05, |
| "loss": 0.082, |
| "step": 2182 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 9.884393063583813e-06, |
| "loss": 0.0703, |
| "step": 2183 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 9.710982658959535e-06, |
| "loss": 0.0871, |
| "step": 2184 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 9.53757225433526e-06, |
| "loss": 0.0811, |
| "step": 2185 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 9.364161849710982e-06, |
| "loss": 0.0853, |
| "step": 2186 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 9.190751445086704e-06, |
| "loss": 0.1084, |
| "step": 2187 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 9.017341040462426e-06, |
| "loss": 0.0891, |
| "step": 2188 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 8.843930635838149e-06, |
| "loss": 0.0723, |
| "step": 2189 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 8.670520231213871e-06, |
| "loss": 0.0716, |
| "step": 2190 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 8.497109826589595e-06, |
| "loss": 0.0787, |
| "step": 2191 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 8.323699421965318e-06, |
| "loss": 0.0874, |
| "step": 2192 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 8.15028901734104e-06, |
| "loss": 0.1161, |
| "step": 2193 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 7.976878612716762e-06, |
| "loss": 0.1037, |
| "step": 2194 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 7.803468208092485e-06, |
| "loss": 0.0911, |
| "step": 2195 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 7.630057803468207e-06, |
| "loss": 0.0722, |
| "step": 2196 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 7.45664739884393e-06, |
| "loss": 0.0905, |
| "step": 2197 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 7.283236994219652e-06, |
| "loss": 0.0813, |
| "step": 2198 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 7.109826589595374e-06, |
| "loss": 0.0838, |
| "step": 2199 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 6.9364161849710975e-06, |
| "loss": 0.073, |
| "step": 2200 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 6.76300578034682e-06, |
| "loss": 0.1058, |
| "step": 2201 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 6.589595375722542e-06, |
| "loss": 0.0893, |
| "step": 2202 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 6.4161849710982654e-06, |
| "loss": 0.0905, |
| "step": 2203 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 6.242774566473988e-06, |
| "loss": 0.0726, |
| "step": 2204 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 6.06936416184971e-06, |
| "loss": 0.0691, |
| "step": 2205 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 5.895953757225433e-06, |
| "loss": 0.0743, |
| "step": 2206 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 5.722543352601156e-06, |
| "loss": 0.0791, |
| "step": 2207 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 5.549132947976878e-06, |
| "loss": 0.0703, |
| "step": 2208 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 5.375722543352601e-06, |
| "loss": 0.1209, |
| "step": 2209 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 5.202312138728323e-06, |
| "loss": 0.1226, |
| "step": 2210 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 5.028901734104045e-06, |
| "loss": 0.0521, |
| "step": 2211 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 4.855491329479768e-06, |
| "loss": 0.0755, |
| "step": 2212 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 4.682080924855491e-06, |
| "loss": 0.0935, |
| "step": 2213 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 4.508670520231213e-06, |
| "loss": 0.0896, |
| "step": 2214 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 4.335260115606936e-06, |
| "loss": 0.0813, |
| "step": 2215 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 4.161849710982659e-06, |
| "loss": 0.0872, |
| "step": 2216 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 3.988439306358381e-06, |
| "loss": 0.0836, |
| "step": 2217 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 3.8150289017341036e-06, |
| "loss": 0.0734, |
| "step": 2218 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 3.641618497109826e-06, |
| "loss": 0.0942, |
| "step": 2219 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 3.4682080924855487e-06, |
| "loss": 0.0653, |
| "step": 2220 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 3.294797687861271e-06, |
| "loss": 0.0691, |
| "step": 2221 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 3.121387283236994e-06, |
| "loss": 0.0733, |
| "step": 2222 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 2.9479768786127167e-06, |
| "loss": 0.0699, |
| "step": 2223 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 2.774566473988439e-06, |
| "loss": 0.0791, |
| "step": 2224 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 2.6011560693641614e-06, |
| "loss": 0.0762, |
| "step": 2225 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 2.427745664739884e-06, |
| "loss": 0.0789, |
| "step": 2226 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 2.2543352601156066e-06, |
| "loss": 0.0904, |
| "step": 2227 |
| }, |
| { |
| "epoch": 5.0, |
| "learning_rate": 2.0809248554913294e-06, |
| "loss": 0.0661, |
| "step": 2228 |
| }, |
| { |
| "epoch": 5.0, |
| "learning_rate": 1.9075144508670518e-06, |
| "loss": 0.0824, |
| "step": 2229 |
| }, |
| { |
| "epoch": 5.0, |
| "learning_rate": 1.7341040462427744e-06, |
| "loss": 0.0929, |
| "step": 2230 |
| }, |
| { |
| "epoch": 5.0, |
| "step": 2230, |
| "total_flos": 9.987211386388113e+19, |
| "train_loss": 1.844849973621323, |
| "train_runtime": 5604.6492, |
| "train_samples_per_second": 25.459, |
| "train_steps_per_second": 0.398 |
| } |
| ], |
| "max_steps": 2230, |
| "num_train_epochs": 5, |
| "total_flos": 9.987211386388113e+19, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|