| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 4.962025316455696, |
| "eval_steps": 500, |
| "global_step": 490, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.01, |
| "learning_rate": 0.0, |
| "loss": 2.2689, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.306765580733931e-06, |
| "loss": 1.9111, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 6.826061944859854e-06, |
| "loss": 2.0757, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 8.613531161467863e-06, |
| "loss": 1.8188, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1e-05, |
| "loss": 1.7535, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.1132827525593786e-05, |
| "loss": 1.6784, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.2090619551221676e-05, |
| "loss": 1.8318, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.2920296742201793e-05, |
| "loss": 1.7084, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.3652123889719709e-05, |
| "loss": 1.6416, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.4306765580733935e-05, |
| "loss": 1.6962, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.4898961024049785e-05, |
| "loss": 1.5732, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.543959310632772e-05, |
| "loss": 1.6528, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.5936926411670824e-05, |
| "loss": 1.716, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.639738513195561e-05, |
| "loss": 1.5802, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.6826061944859853e-05, |
| "loss": 1.6403, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.7227062322935725e-05, |
| "loss": 1.5923, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.7603744277225883e-05, |
| "loss": 1.6141, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.7958889470453637e-05, |
| "loss": 1.6916, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.8294828004351506e-05, |
| "loss": 1.5832, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.8613531161467863e-05, |
| "loss": 1.6152, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.891668149608153e-05, |
| "loss": 1.6234, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.9205726604783716e-05, |
| "loss": 1.6703, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.94819209346638e-05, |
| "loss": 1.6648, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.9746358687061647e-05, |
| "loss": 1.7037, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 2e-05, |
| "loss": 1.6608, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 2e-05, |
| "loss": 1.5428, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.995698924731183e-05, |
| "loss": 1.6165, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.991397849462366e-05, |
| "loss": 1.5947, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.9870967741935484e-05, |
| "loss": 1.6807, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.9827956989247316e-05, |
| "loss": 1.4922, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.978494623655914e-05, |
| "loss": 1.5233, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.974193548387097e-05, |
| "loss": 1.6164, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.96989247311828e-05, |
| "loss": 1.4448, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.9655913978494624e-05, |
| "loss": 1.7434, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.9612903225806452e-05, |
| "loss": 1.596, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.956989247311828e-05, |
| "loss": 1.7181, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.952688172043011e-05, |
| "loss": 1.5812, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.948387096774194e-05, |
| "loss": 1.6811, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.9440860215053764e-05, |
| "loss": 1.5775, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.9397849462365592e-05, |
| "loss": 1.6463, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.935483870967742e-05, |
| "loss": 1.5282, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.931182795698925e-05, |
| "loss": 1.6476, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.9268817204301078e-05, |
| "loss": 1.4159, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.9225806451612907e-05, |
| "loss": 1.51, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.9182795698924732e-05, |
| "loss": 1.5285, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.913978494623656e-05, |
| "loss": 1.7255, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.909677419354839e-05, |
| "loss": 1.6362, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.9053763440860214e-05, |
| "loss": 1.5058, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.9010752688172046e-05, |
| "loss": 1.6969, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.896774193548387e-05, |
| "loss": 1.7406, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.89247311827957e-05, |
| "loss": 1.595, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.888172043010753e-05, |
| "loss": 1.6272, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.8838709677419354e-05, |
| "loss": 1.6688, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.8795698924731186e-05, |
| "loss": 1.4956, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.875268817204301e-05, |
| "loss": 1.6564, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.870967741935484e-05, |
| "loss": 1.6055, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.866666666666667e-05, |
| "loss": 1.3791, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.8623655913978497e-05, |
| "loss": 1.4353, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.8580645161290326e-05, |
| "loss": 1.4798, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.853763440860215e-05, |
| "loss": 1.4247, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.849462365591398e-05, |
| "loss": 1.5351, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.845161290322581e-05, |
| "loss": 1.5758, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.8408602150537637e-05, |
| "loss": 1.6437, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.8365591397849466e-05, |
| "loss": 1.6213, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.832258064516129e-05, |
| "loss": 1.6822, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.827956989247312e-05, |
| "loss": 1.7296, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.8236559139784948e-05, |
| "loss": 1.7892, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.8193548387096777e-05, |
| "loss": 1.6259, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.8150537634408602e-05, |
| "loss": 1.4952, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.810752688172043e-05, |
| "loss": 1.6235, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.806451612903226e-05, |
| "loss": 1.6421, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.8021505376344088e-05, |
| "loss": 1.6154, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.7978494623655916e-05, |
| "loss": 1.47, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.7935483870967742e-05, |
| "loss": 1.4677, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.789247311827957e-05, |
| "loss": 1.4999, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.78494623655914e-05, |
| "loss": 1.6743, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.7806451612903228e-05, |
| "loss": 1.4953, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.7763440860215056e-05, |
| "loss": 1.6313, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.772043010752688e-05, |
| "loss": 1.5617, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.7677419354838713e-05, |
| "loss": 1.5945, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.763440860215054e-05, |
| "loss": 1.5671, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.7591397849462367e-05, |
| "loss": 1.4768, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.7548387096774196e-05, |
| "loss": 1.5294, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.750537634408602e-05, |
| "loss": 1.499, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.7462365591397853e-05, |
| "loss": 1.5867, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.741935483870968e-05, |
| "loss": 1.4942, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.7376344086021507e-05, |
| "loss": 1.5331, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.7333333333333336e-05, |
| "loss": 1.7325, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.729032258064516e-05, |
| "loss": 1.6354, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.724731182795699e-05, |
| "loss": 1.6768, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.7204301075268818e-05, |
| "loss": 1.4602, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.7161290322580647e-05, |
| "loss": 1.4933, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.7118279569892475e-05, |
| "loss": 1.4517, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.7075268817204304e-05, |
| "loss": 1.7103, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.703225806451613e-05, |
| "loss": 1.5579, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.6989247311827958e-05, |
| "loss": 1.4942, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.6946236559139786e-05, |
| "loss": 1.5904, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.6903225806451615e-05, |
| "loss": 1.5026, |
| "step": 98 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.6860215053763444e-05, |
| "loss": 1.3493, |
| "step": 99 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.681720430107527e-05, |
| "loss": 0.9543, |
| "step": 100 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.6774193548387098e-05, |
| "loss": 0.9765, |
| "step": 101 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.6731182795698926e-05, |
| "loss": 0.9687, |
| "step": 102 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.668817204301075e-05, |
| "loss": 1.0385, |
| "step": 103 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.6645161290322583e-05, |
| "loss": 1.0394, |
| "step": 104 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.660215053763441e-05, |
| "loss": 0.9033, |
| "step": 105 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.6559139784946237e-05, |
| "loss": 0.8905, |
| "step": 106 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.6516129032258066e-05, |
| "loss": 1.4783, |
| "step": 107 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.6473118279569895e-05, |
| "loss": 1.1471, |
| "step": 108 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.6430107526881723e-05, |
| "loss": 1.061, |
| "step": 109 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.638709677419355e-05, |
| "loss": 1.0713, |
| "step": 110 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.6344086021505377e-05, |
| "loss": 0.918, |
| "step": 111 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 1.6301075268817206e-05, |
| "loss": 1.0217, |
| "step": 112 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.6258064516129034e-05, |
| "loss": 0.9848, |
| "step": 113 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 1.6215053763440863e-05, |
| "loss": 1.0507, |
| "step": 114 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.6172043010752688e-05, |
| "loss": 1.2116, |
| "step": 115 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 1.6129032258064517e-05, |
| "loss": 1.1489, |
| "step": 116 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 1.6086021505376345e-05, |
| "loss": 0.9653, |
| "step": 117 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.6043010752688174e-05, |
| "loss": 0.912, |
| "step": 118 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.6000000000000003e-05, |
| "loss": 1.1097, |
| "step": 119 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.5956989247311828e-05, |
| "loss": 1.0009, |
| "step": 120 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 1.5913978494623657e-05, |
| "loss": 1.1487, |
| "step": 121 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.5870967741935485e-05, |
| "loss": 0.9222, |
| "step": 122 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 1.5827956989247314e-05, |
| "loss": 0.9213, |
| "step": 123 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 1.5784946236559142e-05, |
| "loss": 1.0347, |
| "step": 124 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.5741935483870968e-05, |
| "loss": 1.014, |
| "step": 125 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 1.5698924731182796e-05, |
| "loss": 0.9657, |
| "step": 126 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.5655913978494625e-05, |
| "loss": 1.0616, |
| "step": 127 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.5612903225806454e-05, |
| "loss": 0.8538, |
| "step": 128 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 1.556989247311828e-05, |
| "loss": 0.8506, |
| "step": 129 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.552688172043011e-05, |
| "loss": 1.0627, |
| "step": 130 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.5483870967741936e-05, |
| "loss": 0.8975, |
| "step": 131 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 1.5440860215053765e-05, |
| "loss": 0.8824, |
| "step": 132 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 1.5397849462365593e-05, |
| "loss": 1.0609, |
| "step": 133 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 1.535483870967742e-05, |
| "loss": 0.8495, |
| "step": 134 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.531182795698925e-05, |
| "loss": 0.9624, |
| "step": 135 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.5268817204301076e-05, |
| "loss": 0.9412, |
| "step": 136 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 1.5225806451612903e-05, |
| "loss": 0.8386, |
| "step": 137 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.5182795698924733e-05, |
| "loss": 0.912, |
| "step": 138 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.513978494623656e-05, |
| "loss": 0.8699, |
| "step": 139 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 1.5096774193548389e-05, |
| "loss": 0.9416, |
| "step": 140 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 1.5053763440860215e-05, |
| "loss": 1.0873, |
| "step": 141 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 1.5010752688172044e-05, |
| "loss": 0.9139, |
| "step": 142 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.4967741935483873e-05, |
| "loss": 0.9538, |
| "step": 143 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 1.49247311827957e-05, |
| "loss": 0.9326, |
| "step": 144 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 1.4881720430107528e-05, |
| "loss": 1.0359, |
| "step": 145 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.4838709677419357e-05, |
| "loss": 1.0978, |
| "step": 146 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.4795698924731184e-05, |
| "loss": 0.922, |
| "step": 147 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 1.4752688172043012e-05, |
| "loss": 0.9583, |
| "step": 148 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.470967741935484e-05, |
| "loss": 0.9822, |
| "step": 149 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 1.4666666666666666e-05, |
| "loss": 0.8844, |
| "step": 150 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.4623655913978497e-05, |
| "loss": 0.9124, |
| "step": 151 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 1.4580645161290324e-05, |
| "loss": 0.915, |
| "step": 152 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 1.4537634408602152e-05, |
| "loss": 1.0011, |
| "step": 153 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 1.4494623655913979e-05, |
| "loss": 0.9509, |
| "step": 154 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 1.4451612903225806e-05, |
| "loss": 0.875, |
| "step": 155 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 1.4408602150537636e-05, |
| "loss": 0.9167, |
| "step": 156 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 1.4365591397849463e-05, |
| "loss": 1.0028, |
| "step": 157 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 1.4322580645161292e-05, |
| "loss": 1.0519, |
| "step": 158 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 1.4279569892473119e-05, |
| "loss": 0.9254, |
| "step": 159 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 1.4236559139784947e-05, |
| "loss": 0.9952, |
| "step": 160 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 1.4193548387096776e-05, |
| "loss": 0.8365, |
| "step": 161 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 1.4150537634408603e-05, |
| "loss": 0.8959, |
| "step": 162 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 1.410752688172043e-05, |
| "loss": 0.906, |
| "step": 163 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 1.406451612903226e-05, |
| "loss": 0.9985, |
| "step": 164 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 1.4021505376344087e-05, |
| "loss": 1.0443, |
| "step": 165 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 1.3978494623655916e-05, |
| "loss": 0.8452, |
| "step": 166 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 1.3935483870967743e-05, |
| "loss": 0.9754, |
| "step": 167 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 1.389247311827957e-05, |
| "loss": 1.029, |
| "step": 168 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 1.38494623655914e-05, |
| "loss": 0.9344, |
| "step": 169 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 1.3806451612903227e-05, |
| "loss": 0.8564, |
| "step": 170 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 1.3763440860215056e-05, |
| "loss": 0.8746, |
| "step": 171 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 1.3720430107526882e-05, |
| "loss": 0.9671, |
| "step": 172 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 1.367741935483871e-05, |
| "loss": 0.9063, |
| "step": 173 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 1.363440860215054e-05, |
| "loss": 0.8366, |
| "step": 174 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 1.3591397849462367e-05, |
| "loss": 0.9146, |
| "step": 175 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 1.3548387096774194e-05, |
| "loss": 0.9435, |
| "step": 176 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 1.3505376344086024e-05, |
| "loss": 1.0586, |
| "step": 177 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 1.346236559139785e-05, |
| "loss": 0.9272, |
| "step": 178 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 1.341935483870968e-05, |
| "loss": 0.9492, |
| "step": 179 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 1.3376344086021506e-05, |
| "loss": 0.939, |
| "step": 180 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 1.3333333333333333e-05, |
| "loss": 0.9346, |
| "step": 181 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 1.3290322580645164e-05, |
| "loss": 0.9455, |
| "step": 182 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 1.324731182795699e-05, |
| "loss": 0.8325, |
| "step": 183 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 1.3204301075268817e-05, |
| "loss": 1.074, |
| "step": 184 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 1.3161290322580646e-05, |
| "loss": 1.1218, |
| "step": 185 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 1.3118279569892473e-05, |
| "loss": 0.9108, |
| "step": 186 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 1.3075268817204303e-05, |
| "loss": 0.9461, |
| "step": 187 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 1.303225806451613e-05, |
| "loss": 0.9079, |
| "step": 188 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 1.2989247311827957e-05, |
| "loss": 0.8472, |
| "step": 189 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 1.2946236559139786e-05, |
| "loss": 0.8697, |
| "step": 190 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 1.2903225806451613e-05, |
| "loss": 0.9297, |
| "step": 191 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 1.2860215053763443e-05, |
| "loss": 0.8979, |
| "step": 192 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 1.281720430107527e-05, |
| "loss": 0.7667, |
| "step": 193 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 1.2774193548387097e-05, |
| "loss": 0.9026, |
| "step": 194 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 1.2731182795698927e-05, |
| "loss": 0.9931, |
| "step": 195 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 1.2688172043010754e-05, |
| "loss": 1.0038, |
| "step": 196 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 1.2645161290322581e-05, |
| "loss": 0.9209, |
| "step": 197 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 1.260215053763441e-05, |
| "loss": 0.7258, |
| "step": 198 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 1.2559139784946237e-05, |
| "loss": 0.491, |
| "step": 199 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 1.2516129032258067e-05, |
| "loss": 0.6607, |
| "step": 200 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 1.2473118279569894e-05, |
| "loss": 0.5564, |
| "step": 201 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 1.243010752688172e-05, |
| "loss": 0.5636, |
| "step": 202 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 1.238709677419355e-05, |
| "loss": 0.5425, |
| "step": 203 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 1.2344086021505376e-05, |
| "loss": 0.5245, |
| "step": 204 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 1.2301075268817207e-05, |
| "loss": 0.4281, |
| "step": 205 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 1.2258064516129034e-05, |
| "loss": 0.5537, |
| "step": 206 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 1.221505376344086e-05, |
| "loss": 0.5108, |
| "step": 207 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 1.217204301075269e-05, |
| "loss": 0.5137, |
| "step": 208 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 1.2129032258064518e-05, |
| "loss": 0.5287, |
| "step": 209 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 1.2086021505376345e-05, |
| "loss": 0.5948, |
| "step": 210 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 1.2043010752688173e-05, |
| "loss": 0.5296, |
| "step": 211 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 1.2e-05, |
| "loss": 0.5619, |
| "step": 212 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 1.195698924731183e-05, |
| "loss": 0.5297, |
| "step": 213 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 1.1913978494623658e-05, |
| "loss": 0.5508, |
| "step": 214 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 1.1870967741935484e-05, |
| "loss": 0.5285, |
| "step": 215 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 1.1827956989247313e-05, |
| "loss": 0.5449, |
| "step": 216 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 1.178494623655914e-05, |
| "loss": 0.5877, |
| "step": 217 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 1.1741935483870967e-05, |
| "loss": 0.6124, |
| "step": 218 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 1.1698924731182797e-05, |
| "loss": 0.4578, |
| "step": 219 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 1.1655913978494624e-05, |
| "loss": 0.5648, |
| "step": 220 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 1.1612903225806453e-05, |
| "loss": 0.5497, |
| "step": 221 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 1.156989247311828e-05, |
| "loss": 0.4563, |
| "step": 222 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 1.1526881720430108e-05, |
| "loss": 0.5004, |
| "step": 223 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 1.1483870967741937e-05, |
| "loss": 0.5579, |
| "step": 224 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 1.1440860215053764e-05, |
| "loss": 0.5161, |
| "step": 225 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 1.1397849462365593e-05, |
| "loss": 0.4854, |
| "step": 226 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 1.1354838709677421e-05, |
| "loss": 0.5003, |
| "step": 227 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 1.1311827956989248e-05, |
| "loss": 0.5601, |
| "step": 228 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 1.1268817204301077e-05, |
| "loss": 0.4731, |
| "step": 229 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 1.1225806451612904e-05, |
| "loss": 0.5628, |
| "step": 230 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 1.118279569892473e-05, |
| "loss": 0.502, |
| "step": 231 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 1.1139784946236561e-05, |
| "loss": 0.4052, |
| "step": 232 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 1.1096774193548388e-05, |
| "loss": 0.5881, |
| "step": 233 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 1.1053763440860216e-05, |
| "loss": 0.5344, |
| "step": 234 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 1.1010752688172043e-05, |
| "loss": 0.5503, |
| "step": 235 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 1.096774193548387e-05, |
| "loss": 0.4168, |
| "step": 236 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 1.09247311827957e-05, |
| "loss": 0.4879, |
| "step": 237 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 1.0881720430107528e-05, |
| "loss": 0.4623, |
| "step": 238 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 1.0838709677419356e-05, |
| "loss": 0.4799, |
| "step": 239 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 1.0795698924731183e-05, |
| "loss": 0.5277, |
| "step": 240 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 1.0752688172043012e-05, |
| "loss": 0.6051, |
| "step": 241 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 1.070967741935484e-05, |
| "loss": 0.5192, |
| "step": 242 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 1.0666666666666667e-05, |
| "loss": 0.4111, |
| "step": 243 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 1.0623655913978494e-05, |
| "loss": 0.5806, |
| "step": 244 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 1.0580645161290325e-05, |
| "loss": 0.5616, |
| "step": 245 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 1.0537634408602151e-05, |
| "loss": 0.5773, |
| "step": 246 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 1.049462365591398e-05, |
| "loss": 0.4419, |
| "step": 247 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 1.0451612903225807e-05, |
| "loss": 0.6086, |
| "step": 248 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 1.0408602150537634e-05, |
| "loss": 0.5957, |
| "step": 249 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 1.0365591397849464e-05, |
| "loss": 0.4609, |
| "step": 250 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 1.0322580645161291e-05, |
| "loss": 0.4956, |
| "step": 251 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 1.027956989247312e-05, |
| "loss": 0.5612, |
| "step": 252 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 1.0236559139784947e-05, |
| "loss": 0.5157, |
| "step": 253 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 1.0193548387096774e-05, |
| "loss": 0.4884, |
| "step": 254 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 1.0150537634408604e-05, |
| "loss": 0.4576, |
| "step": 255 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 1.0107526881720431e-05, |
| "loss": 0.4093, |
| "step": 256 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 1.0064516129032258e-05, |
| "loss": 0.5521, |
| "step": 257 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 1.0021505376344087e-05, |
| "loss": 0.6088, |
| "step": 258 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 9.978494623655915e-06, |
| "loss": 0.4471, |
| "step": 259 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 9.935483870967742e-06, |
| "loss": 0.488, |
| "step": 260 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 9.89247311827957e-06, |
| "loss": 0.552, |
| "step": 261 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 9.8494623655914e-06, |
| "loss": 0.4774, |
| "step": 262 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 9.806451612903226e-06, |
| "loss": 0.5985, |
| "step": 263 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 9.763440860215055e-06, |
| "loss": 0.4899, |
| "step": 264 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 9.720430107526882e-06, |
| "loss": 0.5514, |
| "step": 265 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 9.67741935483871e-06, |
| "loss": 0.5196, |
| "step": 266 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 9.634408602150539e-06, |
| "loss": 0.4633, |
| "step": 267 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 9.591397849462366e-06, |
| "loss": 0.4797, |
| "step": 268 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 9.548387096774195e-06, |
| "loss": 0.6344, |
| "step": 269 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 9.505376344086023e-06, |
| "loss": 0.4854, |
| "step": 270 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 9.46236559139785e-06, |
| "loss": 0.4978, |
| "step": 271 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 9.419354838709677e-06, |
| "loss": 0.4673, |
| "step": 272 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 9.376344086021506e-06, |
| "loss": 0.5449, |
| "step": 273 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 9.333333333333334e-06, |
| "loss": 0.4383, |
| "step": 274 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 9.290322580645163e-06, |
| "loss": 0.4366, |
| "step": 275 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 9.24731182795699e-06, |
| "loss": 0.496, |
| "step": 276 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 9.204301075268819e-06, |
| "loss": 0.4166, |
| "step": 277 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 9.161290322580645e-06, |
| "loss": 0.5266, |
| "step": 278 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 9.118279569892474e-06, |
| "loss": 0.6554, |
| "step": 279 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 9.075268817204301e-06, |
| "loss": 0.5783, |
| "step": 280 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 9.03225806451613e-06, |
| "loss": 0.4905, |
| "step": 281 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 8.989247311827958e-06, |
| "loss": 0.4702, |
| "step": 282 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 8.946236559139785e-06, |
| "loss": 0.4972, |
| "step": 283 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.903225806451614e-06, |
| "loss": 0.4287, |
| "step": 284 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 8.86021505376344e-06, |
| "loss": 0.6365, |
| "step": 285 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 8.81720430107527e-06, |
| "loss": 0.5102, |
| "step": 286 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 8.774193548387098e-06, |
| "loss": 0.4436, |
| "step": 287 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 8.731182795698927e-06, |
| "loss": 0.4853, |
| "step": 288 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 8.688172043010754e-06, |
| "loss": 0.612, |
| "step": 289 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 8.64516129032258e-06, |
| "loss": 0.51, |
| "step": 290 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 8.602150537634409e-06, |
| "loss": 0.552, |
| "step": 291 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 8.559139784946238e-06, |
| "loss": 0.5142, |
| "step": 292 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 8.516129032258065e-06, |
| "loss": 0.5301, |
| "step": 293 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 8.473118279569893e-06, |
| "loss": 0.5718, |
| "step": 294 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 8.430107526881722e-06, |
| "loss": 0.5609, |
| "step": 295 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 8.387096774193549e-06, |
| "loss": 0.6477, |
| "step": 296 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 8.344086021505376e-06, |
| "loss": 0.2912, |
| "step": 297 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 8.301075268817204e-06, |
| "loss": 0.2973, |
| "step": 298 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 8.258064516129033e-06, |
| "loss": 0.2227, |
| "step": 299 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 8.215053763440862e-06, |
| "loss": 0.2563, |
| "step": 300 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 8.172043010752689e-06, |
| "loss": 0.2987, |
| "step": 301 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 8.129032258064517e-06, |
| "loss": 0.2424, |
| "step": 302 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 8.086021505376344e-06, |
| "loss": 0.284, |
| "step": 303 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 8.043010752688173e-06, |
| "loss": 0.2172, |
| "step": 304 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 8.000000000000001e-06, |
| "loss": 0.2786, |
| "step": 305 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 7.956989247311828e-06, |
| "loss": 0.245, |
| "step": 306 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 7.913978494623657e-06, |
| "loss": 0.241, |
| "step": 307 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 7.870967741935484e-06, |
| "loss": 0.2075, |
| "step": 308 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 7.827956989247312e-06, |
| "loss": 0.243, |
| "step": 309 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 7.78494623655914e-06, |
| "loss": 0.2792, |
| "step": 310 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 7.741935483870968e-06, |
| "loss": 0.2247, |
| "step": 311 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 7.698924731182797e-06, |
| "loss": 0.2449, |
| "step": 312 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 7.655913978494625e-06, |
| "loss": 0.2253, |
| "step": 313 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 7.612903225806451e-06, |
| "loss": 0.3203, |
| "step": 314 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 7.56989247311828e-06, |
| "loss": 0.1737, |
| "step": 315 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 7.526881720430108e-06, |
| "loss": 0.2394, |
| "step": 316 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 7.483870967741936e-06, |
| "loss": 0.3014, |
| "step": 317 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 7.440860215053764e-06, |
| "loss": 0.2347, |
| "step": 318 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 7.397849462365592e-06, |
| "loss": 0.2315, |
| "step": 319 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 7.35483870967742e-06, |
| "loss": 0.2328, |
| "step": 320 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 7.311827956989248e-06, |
| "loss": 0.2297, |
| "step": 321 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 7.268817204301076e-06, |
| "loss": 0.3137, |
| "step": 322 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 7.225806451612903e-06, |
| "loss": 0.2103, |
| "step": 323 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 7.182795698924732e-06, |
| "loss": 0.1881, |
| "step": 324 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 7.139784946236559e-06, |
| "loss": 0.2187, |
| "step": 325 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 7.096774193548388e-06, |
| "loss": 0.2592, |
| "step": 326 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 7.053763440860215e-06, |
| "loss": 0.1173, |
| "step": 327 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 7.010752688172044e-06, |
| "loss": 0.1956, |
| "step": 328 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 6.967741935483871e-06, |
| "loss": 0.214, |
| "step": 329 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 6.9247311827957e-06, |
| "loss": 0.211, |
| "step": 330 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 6.881720430107528e-06, |
| "loss": 0.2597, |
| "step": 331 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 6.838709677419355e-06, |
| "loss": 0.1829, |
| "step": 332 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 6.795698924731183e-06, |
| "loss": 0.2693, |
| "step": 333 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 6.752688172043012e-06, |
| "loss": 0.2176, |
| "step": 334 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 6.70967741935484e-06, |
| "loss": 0.1621, |
| "step": 335 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 6.666666666666667e-06, |
| "loss": 0.2793, |
| "step": 336 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 6.623655913978495e-06, |
| "loss": 0.22, |
| "step": 337 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 6.580645161290323e-06, |
| "loss": 0.2257, |
| "step": 338 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 6.537634408602152e-06, |
| "loss": 0.2905, |
| "step": 339 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 6.494623655913979e-06, |
| "loss": 0.2309, |
| "step": 340 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 6.451612903225806e-06, |
| "loss": 0.2152, |
| "step": 341 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 6.408602150537635e-06, |
| "loss": 0.3047, |
| "step": 342 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 6.365591397849464e-06, |
| "loss": 0.2, |
| "step": 343 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 6.3225806451612906e-06, |
| "loss": 0.1949, |
| "step": 344 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 6.279569892473118e-06, |
| "loss": 0.2897, |
| "step": 345 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 6.236559139784947e-06, |
| "loss": 0.2341, |
| "step": 346 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 6.193548387096775e-06, |
| "loss": 0.286, |
| "step": 347 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 6.150537634408603e-06, |
| "loss": 0.291, |
| "step": 348 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 6.10752688172043e-06, |
| "loss": 0.2408, |
| "step": 349 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 6.064516129032259e-06, |
| "loss": 0.2166, |
| "step": 350 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 6.021505376344087e-06, |
| "loss": 0.213, |
| "step": 351 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 5.978494623655915e-06, |
| "loss": 0.2057, |
| "step": 352 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 5.935483870967742e-06, |
| "loss": 0.2169, |
| "step": 353 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 5.89247311827957e-06, |
| "loss": 0.19, |
| "step": 354 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 5.849462365591399e-06, |
| "loss": 0.2565, |
| "step": 355 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 5.806451612903226e-06, |
| "loss": 0.2503, |
| "step": 356 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 5.763440860215054e-06, |
| "loss": 0.207, |
| "step": 357 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 5.720430107526882e-06, |
| "loss": 0.1996, |
| "step": 358 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 5.677419354838711e-06, |
| "loss": 0.2092, |
| "step": 359 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 5.634408602150538e-06, |
| "loss": 0.1877, |
| "step": 360 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 5.591397849462365e-06, |
| "loss": 0.2511, |
| "step": 361 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 5.548387096774194e-06, |
| "loss": 0.2252, |
| "step": 362 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 5.505376344086022e-06, |
| "loss": 0.212, |
| "step": 363 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 5.46236559139785e-06, |
| "loss": 0.2778, |
| "step": 364 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 5.419354838709678e-06, |
| "loss": 0.2931, |
| "step": 365 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 5.376344086021506e-06, |
| "loss": 0.2711, |
| "step": 366 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 5.333333333333334e-06, |
| "loss": 0.2175, |
| "step": 367 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 5.290322580645162e-06, |
| "loss": 0.1713, |
| "step": 368 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 5.24731182795699e-06, |
| "loss": 0.2586, |
| "step": 369 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 5.204301075268817e-06, |
| "loss": 0.249, |
| "step": 370 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 5.161290322580646e-06, |
| "loss": 0.1845, |
| "step": 371 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 5.118279569892473e-06, |
| "loss": 0.2393, |
| "step": 372 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 5.075268817204302e-06, |
| "loss": 0.2239, |
| "step": 373 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 5.032258064516129e-06, |
| "loss": 0.247, |
| "step": 374 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 4.9892473118279576e-06, |
| "loss": 0.2714, |
| "step": 375 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 4.946236559139785e-06, |
| "loss": 0.256, |
| "step": 376 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 4.903225806451613e-06, |
| "loss": 0.253, |
| "step": 377 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 4.860215053763441e-06, |
| "loss": 0.2299, |
| "step": 378 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 4.8172043010752695e-06, |
| "loss": 0.2029, |
| "step": 379 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 4.774193548387097e-06, |
| "loss": 0.2053, |
| "step": 380 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 4.731182795698925e-06, |
| "loss": 0.1907, |
| "step": 381 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 4.688172043010753e-06, |
| "loss": 0.271, |
| "step": 382 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 4.6451612903225815e-06, |
| "loss": 0.2088, |
| "step": 383 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 4.602150537634409e-06, |
| "loss": 0.251, |
| "step": 384 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 4.559139784946237e-06, |
| "loss": 0.2438, |
| "step": 385 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 4.516129032258065e-06, |
| "loss": 0.272, |
| "step": 386 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 4.473118279569893e-06, |
| "loss": 0.1858, |
| "step": 387 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 4.43010752688172e-06, |
| "loss": 0.2267, |
| "step": 388 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 4.387096774193549e-06, |
| "loss": 0.2414, |
| "step": 389 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 4.344086021505377e-06, |
| "loss": 0.2263, |
| "step": 390 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 4.3010752688172045e-06, |
| "loss": 0.2687, |
| "step": 391 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 4.258064516129032e-06, |
| "loss": 0.1808, |
| "step": 392 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 4.215053763440861e-06, |
| "loss": 0.2385, |
| "step": 393 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 4.172043010752688e-06, |
| "loss": 0.1518, |
| "step": 394 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 4.1290322580645165e-06, |
| "loss": 0.212, |
| "step": 395 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 4.086021505376344e-06, |
| "loss": 0.1282, |
| "step": 396 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 4.043010752688172e-06, |
| "loss": 0.1112, |
| "step": 397 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.0839, |
| "step": 398 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 3.9569892473118284e-06, |
| "loss": 0.1306, |
| "step": 399 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 3.913978494623656e-06, |
| "loss": 0.0748, |
| "step": 400 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 3.870967741935484e-06, |
| "loss": 0.119, |
| "step": 401 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 3.827956989247313e-06, |
| "loss": 0.0837, |
| "step": 402 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 3.78494623655914e-06, |
| "loss": 0.1067, |
| "step": 403 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 3.741935483870968e-06, |
| "loss": 0.1242, |
| "step": 404 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 3.698924731182796e-06, |
| "loss": 0.0896, |
| "step": 405 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 3.655913978494624e-06, |
| "loss": 0.0752, |
| "step": 406 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 3.6129032258064515e-06, |
| "loss": 0.0812, |
| "step": 407 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 3.5698924731182797e-06, |
| "loss": 0.0839, |
| "step": 408 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 3.5268817204301075e-06, |
| "loss": 0.1174, |
| "step": 409 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 3.4838709677419357e-06, |
| "loss": 0.1314, |
| "step": 410 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 3.440860215053764e-06, |
| "loss": 0.1054, |
| "step": 411 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 3.3978494623655917e-06, |
| "loss": 0.0968, |
| "step": 412 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 3.35483870967742e-06, |
| "loss": 0.1095, |
| "step": 413 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 3.3118279569892476e-06, |
| "loss": 0.1358, |
| "step": 414 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 3.268817204301076e-06, |
| "loss": 0.0977, |
| "step": 415 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 3.225806451612903e-06, |
| "loss": 0.1667, |
| "step": 416 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 3.182795698924732e-06, |
| "loss": 0.0975, |
| "step": 417 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 3.139784946236559e-06, |
| "loss": 0.0999, |
| "step": 418 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 3.0967741935483874e-06, |
| "loss": 0.0908, |
| "step": 419 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 3.053763440860215e-06, |
| "loss": 0.1005, |
| "step": 420 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 3.0107526881720433e-06, |
| "loss": 0.0656, |
| "step": 421 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 2.967741935483871e-06, |
| "loss": 0.1301, |
| "step": 422 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 2.9247311827956993e-06, |
| "loss": 0.0677, |
| "step": 423 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 2.881720430107527e-06, |
| "loss": 0.0866, |
| "step": 424 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 2.8387096774193553e-06, |
| "loss": 0.0847, |
| "step": 425 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 2.7956989247311827e-06, |
| "loss": 0.1039, |
| "step": 426 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 2.752688172043011e-06, |
| "loss": 0.0864, |
| "step": 427 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 2.709677419354839e-06, |
| "loss": 0.1166, |
| "step": 428 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 2.666666666666667e-06, |
| "loss": 0.0743, |
| "step": 429 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 2.623655913978495e-06, |
| "loss": 0.1162, |
| "step": 430 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 2.580645161290323e-06, |
| "loss": 0.1068, |
| "step": 431 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 2.537634408602151e-06, |
| "loss": 0.12, |
| "step": 432 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 2.4946236559139788e-06, |
| "loss": 0.0886, |
| "step": 433 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 2.4516129032258066e-06, |
| "loss": 0.0796, |
| "step": 434 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 2.4086021505376348e-06, |
| "loss": 0.0748, |
| "step": 435 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 2.3655913978494625e-06, |
| "loss": 0.1078, |
| "step": 436 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 2.3225806451612907e-06, |
| "loss": 0.1135, |
| "step": 437 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 2.2795698924731185e-06, |
| "loss": 0.1138, |
| "step": 438 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 2.2365591397849463e-06, |
| "loss": 0.0867, |
| "step": 439 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 2.1935483870967745e-06, |
| "loss": 0.0923, |
| "step": 440 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 2.1505376344086023e-06, |
| "loss": 0.0791, |
| "step": 441 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 2.1075268817204305e-06, |
| "loss": 0.0905, |
| "step": 442 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 2.0645161290322582e-06, |
| "loss": 0.0952, |
| "step": 443 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 2.021505376344086e-06, |
| "loss": 0.1213, |
| "step": 444 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 1.9784946236559142e-06, |
| "loss": 0.0832, |
| "step": 445 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 1.935483870967742e-06, |
| "loss": 0.1241, |
| "step": 446 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 1.89247311827957e-06, |
| "loss": 0.0873, |
| "step": 447 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 1.849462365591398e-06, |
| "loss": 0.0956, |
| "step": 448 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 1.8064516129032258e-06, |
| "loss": 0.0793, |
| "step": 449 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 1.7634408602150537e-06, |
| "loss": 0.1163, |
| "step": 450 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 1.720430107526882e-06, |
| "loss": 0.1128, |
| "step": 451 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 1.67741935483871e-06, |
| "loss": 0.1018, |
| "step": 452 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 1.634408602150538e-06, |
| "loss": 0.0715, |
| "step": 453 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 1.591397849462366e-06, |
| "loss": 0.1088, |
| "step": 454 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 1.5483870967741937e-06, |
| "loss": 0.1119, |
| "step": 455 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 1.5053763440860217e-06, |
| "loss": 0.1086, |
| "step": 456 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 1.4623655913978497e-06, |
| "loss": 0.049, |
| "step": 457 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 1.4193548387096776e-06, |
| "loss": 0.1056, |
| "step": 458 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 1.3763440860215054e-06, |
| "loss": 0.1173, |
| "step": 459 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 1.3333333333333334e-06, |
| "loss": 0.0988, |
| "step": 460 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 1.2903225806451614e-06, |
| "loss": 0.1257, |
| "step": 461 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 1.2473118279569894e-06, |
| "loss": 0.0812, |
| "step": 462 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 1.2043010752688174e-06, |
| "loss": 0.0941, |
| "step": 463 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 1.1612903225806454e-06, |
| "loss": 0.0967, |
| "step": 464 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 1.1182795698924731e-06, |
| "loss": 0.0848, |
| "step": 465 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 1.0752688172043011e-06, |
| "loss": 0.1152, |
| "step": 466 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 1.0322580645161291e-06, |
| "loss": 0.1088, |
| "step": 467 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 9.892473118279571e-07, |
| "loss": 0.134, |
| "step": 468 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 9.46236559139785e-07, |
| "loss": 0.0838, |
| "step": 469 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 9.032258064516129e-07, |
| "loss": 0.0887, |
| "step": 470 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 8.60215053763441e-07, |
| "loss": 0.1532, |
| "step": 471 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 8.17204301075269e-07, |
| "loss": 0.0614, |
| "step": 472 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 7.741935483870968e-07, |
| "loss": 0.0752, |
| "step": 473 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 7.311827956989248e-07, |
| "loss": 0.0647, |
| "step": 474 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 6.881720430107527e-07, |
| "loss": 0.0669, |
| "step": 475 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 6.451612903225807e-07, |
| "loss": 0.141, |
| "step": 476 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 6.021505376344087e-07, |
| "loss": 0.1032, |
| "step": 477 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 5.591397849462366e-07, |
| "loss": 0.1112, |
| "step": 478 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 5.161290322580646e-07, |
| "loss": 0.0817, |
| "step": 479 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 4.731182795698925e-07, |
| "loss": 0.0868, |
| "step": 480 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 4.301075268817205e-07, |
| "loss": 0.1169, |
| "step": 481 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 3.870967741935484e-07, |
| "loss": 0.1297, |
| "step": 482 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 3.4408602150537636e-07, |
| "loss": 0.0974, |
| "step": 483 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 3.0107526881720434e-07, |
| "loss": 0.0888, |
| "step": 484 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 2.580645161290323e-07, |
| "loss": 0.1391, |
| "step": 485 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 2.1505376344086024e-07, |
| "loss": 0.0777, |
| "step": 486 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 1.7204301075268818e-07, |
| "loss": 0.0964, |
| "step": 487 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 1.2903225806451614e-07, |
| "loss": 0.0738, |
| "step": 488 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 8.602150537634409e-08, |
| "loss": 0.1061, |
| "step": 489 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 4.3010752688172045e-08, |
| "loss": 0.098, |
| "step": 490 |
| }, |
| { |
| "epoch": 4.96, |
| "step": 490, |
| "total_flos": 146652980723712.0, |
| "train_loss": 0.6916719505221259, |
| "train_runtime": 14223.7593, |
| "train_samples_per_second": 4.44, |
| "train_steps_per_second": 0.034 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 490, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 5, |
| "save_steps": 500, |
| "total_flos": 146652980723712.0, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|