| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.9999176615891313, |
| "eval_steps": 500, |
| "global_step": 759, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 8.695652173913044e-07, |
| "loss": 1.7323, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.7391304347826088e-06, |
| "loss": 1.7879, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.6086956521739132e-06, |
| "loss": 1.7641, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.4782608695652175e-06, |
| "loss": 1.785, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.347826086956522e-06, |
| "loss": 1.765, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 5.2173913043478265e-06, |
| "loss": 1.7404, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 6.086956521739132e-06, |
| "loss": 1.7493, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 6.956521739130435e-06, |
| "loss": 1.7229, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 7.82608695652174e-06, |
| "loss": 1.6795, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 8.695652173913044e-06, |
| "loss": 1.6157, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 9.565217391304349e-06, |
| "loss": 1.5726, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.0434782608695653e-05, |
| "loss": 1.568, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.1304347826086957e-05, |
| "loss": 1.5764, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.2173913043478263e-05, |
| "loss": 1.5187, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.3043478260869566e-05, |
| "loss": 1.5299, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.391304347826087e-05, |
| "loss": 1.5024, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.4782608695652174e-05, |
| "loss": 1.4203, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.565217391304348e-05, |
| "loss": 1.493, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.6521739130434785e-05, |
| "loss": 1.4777, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.739130434782609e-05, |
| "loss": 1.4429, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.8260869565217393e-05, |
| "loss": 1.4383, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.9130434782608697e-05, |
| "loss": 1.4382, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2e-05, |
| "loss": 1.4615, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.999990890103106e-05, |
| "loss": 1.4381, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.9999635605784042e-05, |
| "loss": 1.4201, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.9999180119238327e-05, |
| "loss": 1.4226, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9998542449692794e-05, |
| "loss": 1.3955, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.999772260876564e-05, |
| "loss": 1.3743, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.99967206113942e-05, |
| "loss": 1.4049, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9995536475834667e-05, |
| "loss": 1.3434, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.999417022366174e-05, |
| "loss": 1.3606, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9992621879768256e-05, |
| "loss": 1.3521, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.999089147236472e-05, |
| "loss": 1.3454, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.998897903297879e-05, |
| "loss": 1.353, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.998688459645473e-05, |
| "loss": 1.333, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.9984608200952736e-05, |
| "loss": 1.3249, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.9982149887948264e-05, |
| "loss": 1.3181, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.997950970223127e-05, |
| "loss": 1.3081, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.9976687691905394e-05, |
| "loss": 1.3071, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.997368390838708e-05, |
| "loss": 1.3167, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.997049840640465e-05, |
| "loss": 1.3087, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.996713124399729e-05, |
| "loss": 1.307, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.9963582482514003e-05, |
| "loss": 1.3097, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.9959852186612492e-05, |
| "loss": 1.3383, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.995594042425798e-05, |
| "loss": 1.3046, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.995184726672197e-05, |
| "loss": 1.2716, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.994757278858095e-05, |
| "loss": 1.2744, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.994311706771503e-05, |
| "loss": 1.305, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.993848018530652e-05, |
| "loss": 1.2723, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.993366222583847e-05, |
| "loss": 1.2477, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.99286632770931e-05, |
| "loss": 1.2946, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.992348343015023e-05, |
| "loss": 1.2611, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.99181227793856e-05, |
| "loss": 1.27, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.991258142246917e-05, |
| "loss": 1.2493, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.9906859460363307e-05, |
| "loss": 1.2631, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.990095699732099e-05, |
| "loss": 1.292, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9894874140883877e-05, |
| "loss": 1.2596, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9888611001880357e-05, |
| "loss": 1.2619, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.988216769442353e-05, |
| "loss": 1.2367, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.987554433590913e-05, |
| "loss": 1.235, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9868741047013382e-05, |
| "loss": 1.278, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9861757951690813e-05, |
| "loss": 1.2206, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9854595177171968e-05, |
| "loss": 1.2533, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9847252853961136e-05, |
| "loss": 1.2421, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.983973111583392e-05, |
| "loss": 1.2448, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.983203009983484e-05, |
| "loss": 1.2354, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9824149946274827e-05, |
| "loss": 1.2356, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9816090798728648e-05, |
| "loss": 1.24, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9807852804032306e-05, |
| "loss": 1.2453, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9799436112280374e-05, |
| "loss": 1.2375, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.979084087682323e-05, |
| "loss": 1.2368, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.97820672542643e-05, |
| "loss": 1.2212, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9773115404457175e-05, |
| "loss": 1.2444, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9763985490502714e-05, |
| "loss": 1.266, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9754677678746064e-05, |
| "loss": 1.2457, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9745192138773633e-05, |
| "loss": 1.2506, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9735529043410012e-05, |
| "loss": 1.2191, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.97256885687148e-05, |
| "loss": 1.2016, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9715670893979416e-05, |
| "loss": 1.2124, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.970547620172383e-05, |
| "loss": 1.2324, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9695104677693234e-05, |
| "loss": 1.2053, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9684556510854655e-05, |
| "loss": 1.2081, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.967383189339352e-05, |
| "loss": 1.2085, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9662931020710138e-05, |
| "loss": 1.2013, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9651854091416175e-05, |
| "loss": 1.2094, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.964060130733099e-05, |
| "loss": 1.1864, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9629172873477995e-05, |
| "loss": 1.1907, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9617568998080893e-05, |
| "loss": 1.2204, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9605789892559902e-05, |
| "loss": 1.1799, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9593835771527893e-05, |
| "loss": 1.2545, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9581706852786492e-05, |
| "loss": 1.1896, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.956940335732209e-05, |
| "loss": 1.2181, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9556925509301844e-05, |
| "loss": 1.2182, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9544273536069573e-05, |
| "loss": 1.2016, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.953144766814161e-05, |
| "loss": 1.2068, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9518448139202632e-05, |
| "loss": 1.1986, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9505275186101378e-05, |
| "loss": 1.2009, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9491929048846328e-05, |
| "loss": 1.2248, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.947840997060136e-05, |
| "loss": 1.1822, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9464718197681284e-05, |
| "loss": 1.176, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9450853979547384e-05, |
| "loss": 1.1743, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9436817568802854e-05, |
| "loss": 1.1887, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.9422609221188208e-05, |
| "loss": 1.1927, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.94082291955766e-05, |
| "loss": 1.1648, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.9393677753969137e-05, |
| "loss": 1.226, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.9378955161490086e-05, |
| "loss": 1.1833, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.9364061686382042e-05, |
| "loss": 1.2221, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.9348997600001052e-05, |
| "loss": 1.2068, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.9333763176811663e-05, |
| "loss": 1.2105, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.9318358694381926e-05, |
| "loss": 1.2189, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.9302784433378333e-05, |
| "loss": 1.1758, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.928704067756071e-05, |
| "loss": 1.1968, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.9271127713777033e-05, |
| "loss": 1.192, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.925504583195823e-05, |
| "loss": 1.1964, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.9238795325112867e-05, |
| "loss": 1.1578, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.922237648932183e-05, |
| "loss": 1.2211, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.9205789623732923e-05, |
| "loss": 1.2112, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.918903503055541e-05, |
| "loss": 1.2374, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.917211301505453e-05, |
| "loss": 1.2267, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.9155023885545914e-05, |
| "loss": 1.1908, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.913776795338998e-05, |
| "loss": 1.1793, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.9120345532986243e-05, |
| "loss": 1.2087, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.9102756941767625e-05, |
| "loss": 1.1735, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.908500250019462e-05, |
| "loss": 1.2115, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.9067082531749496e-05, |
| "loss": 1.1803, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.9048997362930384e-05, |
| "loss": 1.1689, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.903074732324533e-05, |
| "loss": 1.1948, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.901233274520629e-05, |
| "loss": 1.1627, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.8993753964323086e-05, |
| "loss": 1.1608, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.8975011319097264e-05, |
| "loss": 1.2027, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.8956105151015966e-05, |
| "loss": 1.1847, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.893703580454567e-05, |
| "loss": 1.142, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.891780362712594e-05, |
| "loss": 1.178, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.8898408969163078e-05, |
| "loss": 1.1915, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.8878852184023754e-05, |
| "loss": 1.1788, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.8859133628028564e-05, |
| "loss": 1.2118, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.8839253660445523e-05, |
| "loss": 1.1451, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.881921264348355e-05, |
| "loss": 1.1976, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.879901094228584e-05, |
| "loss": 1.1665, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.8778648924923222e-05, |
| "loss": 1.1771, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.875812696238745e-05, |
| "loss": 1.2061, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.8737445428584456e-05, |
| "loss": 1.1932, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.8716604700327516e-05, |
| "loss": 1.1854, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.8695605157330398e-05, |
| "loss": 1.2046, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.8674447182200457e-05, |
| "loss": 1.16, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.8653131160431622e-05, |
| "loss": 1.1939, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.863165748039743e-05, |
| "loss": 1.219, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.861002653334389e-05, |
| "loss": 1.1843, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.85882387133824e-05, |
| "loss": 1.1847, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.8566294417482552e-05, |
| "loss": 1.1671, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.8544194045464888e-05, |
| "loss": 1.1554, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.8521937999993627e-05, |
| "loss": 1.1791, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.849952668656933e-05, |
| "loss": 1.1666, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.847696051352151e-05, |
| "loss": 1.1581, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.845423989200118e-05, |
| "loss": 1.1737, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.8431365235973383e-05, |
| "loss": 1.1652, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.840833696220963e-05, |
| "loss": 1.2088, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.8385155490280327e-05, |
| "loss": 1.1856, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.836182124254711e-05, |
| "loss": 1.1766, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.833833464415516e-05, |
| "loss": 1.176, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.8314696123025456e-05, |
| "loss": 1.1879, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.8290906109846974e-05, |
| "loss": 1.1849, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.8266965038068856e-05, |
| "loss": 1.1491, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.8242873343892494e-05, |
| "loss": 1.1559, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.8218631466263584e-05, |
| "loss": 1.1694, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.8194239846864133e-05, |
| "loss": 1.1371, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.816969893010442e-05, |
| "loss": 1.1567, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.8145009163114894e-05, |
| "loss": 1.1869, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.812017099573801e-05, |
| "loss": 1.1967, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.8095184880520058e-05, |
| "loss": 1.1561, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.8070051272702905e-05, |
| "loss": 1.1744, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.8044770630215706e-05, |
| "loss": 1.1565, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.801934341366655e-05, |
| "loss": 1.1897, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.7993770086334082e-05, |
| "loss": 1.1711, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.7968051114159046e-05, |
| "loss": 1.1937, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.794218696573582e-05, |
| "loss": 1.1238, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.791617811230385e-05, |
| "loss": 1.1599, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.7890025027739084e-05, |
| "loss": 1.1605, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.7863728188545326e-05, |
| "loss": 1.1818, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.7837288073845566e-05, |
| "loss": 1.1732, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.7810705165373245e-05, |
| "loss": 1.1543, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.778397994746347e-05, |
| "loss": 1.1605, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.77571129070442e-05, |
| "loss": 1.172, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.773010453362737e-05, |
| "loss": 1.1587, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.770295531929998e-05, |
| "loss": 1.1511, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.767566575871511e-05, |
| "loss": 1.1626, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.7648236349082928e-05, |
| "loss": 1.1861, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.7620667590161626e-05, |
| "loss": 1.1984, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.75929599842483e-05, |
| "loss": 1.1614, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.756511403616982e-05, |
| "loss": 1.175, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.7537130253273613e-05, |
| "loss": 1.1688, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.750900914541844e-05, |
| "loss": 1.1795, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.7480751224965083e-05, |
| "loss": 1.155, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.7452357006767026e-05, |
| "loss": 1.148, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.742382700816107e-05, |
| "loss": 1.1444, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.7395161748957905e-05, |
| "loss": 1.1576, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.7366361751432645e-05, |
| "loss": 1.1747, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.7337427540315305e-05, |
| "loss": 1.174, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.730835964278124e-05, |
| "loss": 1.1518, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.7279158588441558e-05, |
| "loss": 1.1269, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.7249824909333445e-05, |
| "loss": 1.1499, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.722035913991048e-05, |
| "loss": 1.1643, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.719076181703291e-05, |
| "loss": 1.148, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.716103347995785e-05, |
| "loss": 1.1736, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.713117467032948e-05, |
| "loss": 1.1843, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.7101185932169147e-05, |
| "loss": 1.1705, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.7071067811865477e-05, |
| "loss": 1.1403, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.7040820858164413e-05, |
| "loss": 1.1338, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.7010445622159214e-05, |
| "loss": 1.1615, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.6979942657280414e-05, |
| "loss": 1.1539, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.694931251928575e-05, |
| "loss": 1.1228, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.691855576625001e-05, |
| "loss": 1.1196, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.68876729585549e-05, |
| "loss": 1.1321, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.6856664658878797e-05, |
| "loss": 1.1416, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.6825531432186545e-05, |
| "loss": 1.1573, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.6794273845719096e-05, |
| "loss": 1.1514, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.6762892468983237e-05, |
| "loss": 1.1552, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.673138787374119e-05, |
| "loss": 1.1062, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.6699760634000166e-05, |
| "loss": 1.1527, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.6668011326001962e-05, |
| "loss": 1.1855, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.6636140528212427e-05, |
| "loss": 1.1865, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.6604148821310912e-05, |
| "loss": 1.1618, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.6572036788179728e-05, |
| "loss": 1.1418, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.6539805013893493e-05, |
| "loss": 1.15, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.650745408570849e-05, |
| "loss": 1.0969, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.6474984593051965e-05, |
| "loss": 1.1225, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.6442397127511366e-05, |
| "loss": 1.1628, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.6409692282823604e-05, |
| "loss": 1.1361, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.63768706548642e-05, |
| "loss": 1.1343, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.6343932841636455e-05, |
| "loss": 1.1284, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.631087944326053e-05, |
| "loss": 1.1861, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.6277711061962525e-05, |
| "loss": 1.1676, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.6244428302063506e-05, |
| "loss": 1.1829, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.6211031769968503e-05, |
| "loss": 1.1682, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.6177522074155436e-05, |
| "loss": 1.1688, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.6143899825164058e-05, |
| "loss": 1.1534, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.6110165635584807e-05, |
| "loss": 1.1279, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.6076320120047667e-05, |
| "loss": 1.1719, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.6042363895210948e-05, |
| "loss": 1.1648, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.6008297579750063e-05, |
| "loss": 1.1386, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.597412179434626e-05, |
| "loss": 1.1404, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.5939837161675297e-05, |
| "loss": 1.1738, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.590544430639611e-05, |
| "loss": 1.1573, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.5870943855139437e-05, |
| "loss": 1.183, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.5836336436496377e-05, |
| "loss": 1.1366, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.5801622681006966e-05, |
| "loss": 1.1644, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.5766803221148676e-05, |
| "loss": 1.1487, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.5731878691324874e-05, |
| "loss": 1.1444, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.5696849727853297e-05, |
| "loss": 1.1516, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.5661716968954436e-05, |
| "loss": 1.1412, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.5626481054739916e-05, |
| "loss": 1.1326, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.5591142627200825e-05, |
| "loss": 1.1708, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.5555702330196024e-05, |
| "loss": 1.1628, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.552016080944042e-05, |
| "loss": 1.1161, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.5484518712493188e-05, |
| "loss": 1.1772, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.544877668874599e-05, |
| "loss": 1.1406, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.5412935389411124e-05, |
| "loss": 1.1679, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.5376995467509673e-05, |
| "loss": 1.1426, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.5340957577859605e-05, |
| "loss": 1.1494, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.530482237706383e-05, |
| "loss": 1.1577, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.526859052349827e-05, |
| "loss": 1.1549, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.5232262677299816e-05, |
| "loss": 1.1509, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.5195839500354337e-05, |
| "loss": 1.1197, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.5159321656284602e-05, |
| "loss": 1.1216, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.5122709810438205e-05, |
| "loss": 1.1471, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.5086004629875426e-05, |
| "loss": 1.1707, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.5049206783357082e-05, |
| "loss": 1.1582, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.501231694133235e-05, |
| "loss": 1.1703, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.4975335775926547e-05, |
| "loss": 1.1943, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.4938263960928878e-05, |
| "loss": 1.1607, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.4901102171780175e-05, |
| "loss": 1.1251, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.4863851085560563e-05, |
| "loss": 1.1472, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.4826511380977155e-05, |
| "loss": 1.1655, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.478908373835167e-05, |
| "loss": 1.1598, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.4751568839608036e-05, |
| "loss": 1.1717, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.4713967368259981e-05, |
| "loss": 1.1455, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.4676280009398544e-05, |
| "loss": 1.1347, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.4638507449679642e-05, |
| "loss": 1.1355, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.4600650377311523e-05, |
| "loss": 1.1047, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.4562709482042237e-05, |
| "loss": 1.175, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.4524685455147071e-05, |
| "loss": 1.1226, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.448657898941596e-05, |
| "loss": 1.1741, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.4448390779140844e-05, |
| "loss": 1.1127, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.4410121520103045e-05, |
| "loss": 1.1451, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.4371771909560566e-05, |
| "loss": 1.1277, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.4333342646235407e-05, |
| "loss": 1.1388, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.4294834430300822e-05, |
| "loss": 1.1811, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.425624796336856e-05, |
| "loss": 1.1222, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.4217583948476094e-05, |
| "loss": 1.1399, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.4178843090073802e-05, |
| "loss": 1.1165, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.4140026094012136e-05, |
| "loss": 1.1414, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.4101133667528761e-05, |
| "loss": 1.1139, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.4062166519235665e-05, |
| "loss": 1.1374, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.4023125359106253e-05, |
| "loss": 1.1488, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.3984010898462417e-05, |
| "loss": 1.1664, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.3944823849961557e-05, |
| "loss": 1.1406, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.3905564927583625e-05, |
| "loss": 1.1464, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.3866234846618083e-05, |
| "loss": 1.1564, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.3826834323650899e-05, |
| "loss": 1.1286, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.3787364076551478e-05, |
| "loss": 1.1488, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.3747824824459577e-05, |
| "loss": 1.1423, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.3708217287772227e-05, |
| "loss": 1.1349, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.3668542188130567e-05, |
| "loss": 1.139, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.3628800248406738e-05, |
| "loss": 1.126, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.3588992192690683e-05, |
| "loss": 1.1162, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.3549118746276968e-05, |
| "loss": 1.153, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.350918063565157e-05, |
| "loss": 1.1643, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.3469178588478621e-05, |
| "loss": 1.1528, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.3429113333587181e-05, |
| "loss": 1.1498, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.3388985600957922e-05, |
| "loss": 1.1209, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.3348796121709862e-05, |
| "loss": 1.1661, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.3308545628087029e-05, |
| "loss": 1.1558, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.3268234853445113e-05, |
| "loss": 1.1407, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.3227864532238113e-05, |
| "loss": 1.1581, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.318743540000496e-05, |
| "loss": 1.1192, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.3146948193356105e-05, |
| "loss": 1.1439, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.3106403649960109e-05, |
| "loss": 1.1795, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.3065802508530186e-05, |
| "loss": 1.1578, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.302514550881076e-05, |
| "loss": 1.1502, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.2984433391563984e-05, |
| "loss": 1.1239, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.294366689855624e-05, |
| "loss": 1.132, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.2902846772544625e-05, |
| "loss": 1.1317, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.2861973757263416e-05, |
| "loss": 1.1173, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.282104859741052e-05, |
| "loss": 1.1215, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.2780072038633913e-05, |
| "loss": 1.1568, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.2739044827518043e-05, |
| "loss": 1.1438, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.2697967711570243e-05, |
| "loss": 1.1108, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.2656841439207093e-05, |
| "loss": 1.1597, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.2615666759740788e-05, |
| "loss": 1.1489, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.2574444423365503e-05, |
| "loss": 1.1595, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.2533175181143704e-05, |
| "loss": 1.1327, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.2491859784992477e-05, |
| "loss": 1.119, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.245049898766982e-05, |
| "loss": 1.1255, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.2409093542760925e-05, |
| "loss": 1.1561, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.2367644204664468e-05, |
| "loss": 1.1602, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.2326151728578839e-05, |
| "loss": 1.1159, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.228461687048839e-05, |
| "loss": 1.156, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.2243040387149682e-05, |
| "loss": 1.1275, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.2201423036077657e-05, |
| "loss": 1.1467, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.2159765575531877e-05, |
| "loss": 1.1434, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.2118068764502677e-05, |
| "loss": 1.1361, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.2076333362697358e-05, |
| "loss": 1.1386, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.2034560130526341e-05, |
| "loss": 1.1254, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.199274982908929e-05, |
| "loss": 1.1216, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.1950903220161286e-05, |
| "loss": 1.1379, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.1909021066178906e-05, |
| "loss": 1.1418, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.1867104130226363e-05, |
| "loss": 1.168, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.1825153176021591e-05, |
| "loss": 1.1334, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.1783168967902314e-05, |
| "loss": 1.146, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.1741152270812155e-05, |
| "loss": 1.1315, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.1699103850286668e-05, |
| "loss": 1.1431, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.1657024472439402e-05, |
| "loss": 1.1435, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.1614914903947952e-05, |
| "loss": 1.1485, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.157277591203996e-05, |
| "loss": 1.1553, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.153060826447918e-05, |
| "loss": 1.1486, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.1488412729551449e-05, |
| "loss": 1.1436, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.144619007605071e-05, |
| "loss": 1.1152, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.1403941073265014e-05, |
| "loss": 1.1533, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.1361666490962468e-05, |
| "loss": 1.1451, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.1319367099377248e-05, |
| "loss": 1.1715, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.1277043669195549e-05, |
| "loss": 1.1125, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.1234696971541534e-05, |
| "loss": 1.1224, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.1192327777963313e-05, |
| "loss": 1.123, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.1149936860418846e-05, |
| "loss": 1.1412, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.1107524991261913e-05, |
| "loss": 1.1315, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.1065092943228024e-05, |
| "loss": 1.1723, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.1022641489420342e-05, |
| "loss": 1.1378, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.098017140329561e-05, |
| "loss": 1.1302, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.0937683458650029e-05, |
| "loss": 1.1688, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.0895178429605189e-05, |
| "loss": 1.1399, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.0852657090593961e-05, |
| "loss": 1.1245, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.0810120216346368e-05, |
| "loss": 1.1261, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.0767568581875494e-05, |
| "loss": 1.179, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.072500296246334e-05, |
| "loss": 1.1467, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.0682424133646712e-05, |
| "loss": 1.1464, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.0639832871203094e-05, |
| "loss": 1.1157, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.0597229951136498e-05, |
| "loss": 1.121, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.0554616149663355e-05, |
| "loss": 1.1477, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.0511992243198335e-05, |
| "loss": 1.1146, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.0469359008340216e-05, |
| "loss": 1.1365, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.0426717221857756e-05, |
| "loss": 1.1254, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.0384067660675508e-05, |
| "loss": 1.1476, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.034141110185968e-05, |
| "loss": 1.1466, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.0298748322603982e-05, |
| "loss": 1.1524, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.0256080100215448e-05, |
| "loss": 1.1351, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.0213407212100296e-05, |
| "loss": 1.1135, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.017073043574975e-05, |
| "loss": 1.1284, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.0128050548725865e-05, |
| "loss": 1.1309, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.0085368328647395e-05, |
| "loss": 1.0959, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.0042684553175575e-05, |
| "loss": 1.1173, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1e-05, |
| "loss": 1.132, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 9.957315446824425e-06, |
| "loss": 1.1172, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 9.91463167135261e-06, |
| "loss": 1.1289, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 9.871949451274137e-06, |
| "loss": 1.1111, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 9.829269564250254e-06, |
| "loss": 1.1246, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 9.786592787899707e-06, |
| "loss": 1.1825, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 9.743919899784555e-06, |
| "loss": 1.1078, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 9.701251677396021e-06, |
| "loss": 1.108, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 9.658588898140322e-06, |
| "loss": 1.1304, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 9.615932339324497e-06, |
| "loss": 1.124, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 9.573282778142246e-06, |
| "loss": 1.1333, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 9.530640991659785e-06, |
| "loss": 1.1308, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 9.488007756801672e-06, |
| "loss": 1.1265, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 9.445383850336648e-06, |
| "loss": 1.1267, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 9.402770048863502e-06, |
| "loss": 1.1018, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 9.360167128796913e-06, |
| "loss": 1.157, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 9.317575866353293e-06, |
| "loss": 1.1212, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 9.274997037536663e-06, |
| "loss": 1.1372, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 9.232431418124507e-06, |
| "loss": 1.162, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 9.189879783653633e-06, |
| "loss": 1.1234, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 9.14734290940604e-06, |
| "loss": 1.1363, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 9.104821570394811e-06, |
| "loss": 1.1435, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 9.062316541349978e-06, |
| "loss": 1.134, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 9.019828596704394e-06, |
| "loss": 1.1184, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 8.977358510579658e-06, |
| "loss": 1.1412, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 8.93490705677198e-06, |
| "loss": 1.1498, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 8.89247500873809e-06, |
| "loss": 1.1717, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 8.850063139581156e-06, |
| "loss": 1.1122, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 8.807672222036692e-06, |
| "loss": 1.1246, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 8.765303028458468e-06, |
| "loss": 1.1277, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 8.722956330804456e-06, |
| "loss": 1.1527, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 8.680632900622752e-06, |
| "loss": 1.0872, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 8.638333509037537e-06, |
| "loss": 1.129, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 8.59605892673499e-06, |
| "loss": 1.1143, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 8.55380992394929e-06, |
| "loss": 1.1301, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 8.511587270448556e-06, |
| "loss": 1.1289, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 8.469391735520824e-06, |
| "loss": 1.1145, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 8.42722408796004e-06, |
| "loss": 1.1467, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 8.385085096052053e-06, |
| "loss": 1.1668, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 8.342975527560601e-06, |
| "loss": 1.184, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 8.300896149713334e-06, |
| "loss": 1.1219, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 8.258847729187845e-06, |
| "loss": 1.1248, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 8.216831032097689e-06, |
| "loss": 1.1532, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 8.174846823978412e-06, |
| "loss": 1.1321, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 8.132895869773638e-06, |
| "loss": 1.1126, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 8.0909789338211e-06, |
| "loss": 1.1054, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 8.04909677983872e-06, |
| "loss": 1.105, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 8.00725017091071e-06, |
| "loss": 1.0812, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 7.965439869473664e-06, |
| "loss": 1.1081, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 7.923666637302643e-06, |
| "loss": 1.1489, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 7.881931235497324e-06, |
| "loss": 1.1063, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 7.84023442446813e-06, |
| "loss": 1.058, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 7.798576963922347e-06, |
| "loss": 1.1126, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 7.75695961285032e-06, |
| "loss": 1.1296, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 7.71538312951161e-06, |
| "loss": 1.1393, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 7.673848271421166e-06, |
| "loss": 1.1197, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 7.632355795335533e-06, |
| "loss": 1.144, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 7.590906457239073e-06, |
| "loss": 1.1393, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 7.549501012330184e-06, |
| "loss": 1.1158, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 7.508140215007526e-06, |
| "loss": 1.1077, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 7.466824818856296e-06, |
| "loss": 1.1213, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 7.4255555766345025e-06, |
| "loss": 1.1235, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 7.384333240259216e-06, |
| "loss": 1.131, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 7.34315856079291e-06, |
| "loss": 1.0999, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 7.3020322884297565e-06, |
| "loss": 1.1296, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 7.260955172481959e-06, |
| "loss": 1.1073, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 7.219927961366091e-06, |
| "loss": 1.1283, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 7.178951402589482e-06, |
| "loss": 1.0866, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 7.1380262427365885e-06, |
| "loss": 1.1159, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 7.097153227455379e-06, |
| "loss": 1.1253, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 7.056333101443761e-06, |
| "loss": 1.1198, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 7.01556660843602e-06, |
| "loss": 1.1117, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 6.974854491189243e-06, |
| "loss": 1.1238, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 6.934197491469818e-06, |
| "loss": 1.1468, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 6.893596350039896e-06, |
| "loss": 1.1063, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 6.853051806643898e-06, |
| "loss": 1.1279, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 6.812564599995042e-06, |
| "loss": 1.1297, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 6.772135467761889e-06, |
| "loss": 1.1373, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 6.731765146554891e-06, |
| "loss": 1.119, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 6.691454371912974e-06, |
| "loss": 1.1026, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 6.651203878290139e-06, |
| "loss": 1.1303, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 6.6110143990420824e-06, |
| "loss": 1.1157, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 6.570886666412823e-06, |
| "loss": 1.0973, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 6.5308214115213785e-06, |
| "loss": 1.1377, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 6.490819364348434e-06, |
| "loss": 1.1418, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 6.450881253723035e-06, |
| "loss": 1.1307, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 6.41100780730932e-06, |
| "loss": 1.15, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 6.371199751593264e-06, |
| "loss": 1.1242, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 6.331457811869437e-06, |
| "loss": 1.1015, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 6.291782712227776e-06, |
| "loss": 1.1038, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 6.2521751755404226e-06, |
| "loss": 1.1292, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 6.212635923448526e-06, |
| "loss": 1.1506, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 6.173165676349103e-06, |
| "loss": 1.1054, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 6.133765153381918e-06, |
| "loss": 1.138, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 6.094435072416379e-06, |
| "loss": 1.13, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 6.055176150038445e-06, |
| "loss": 1.1082, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 6.015989101537586e-06, |
| "loss": 1.1527, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 5.976874640893751e-06, |
| "loss": 1.1561, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 5.937833480764339e-06, |
| "loss": 1.1325, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 5.898866332471241e-06, |
| "loss": 1.112, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 5.859973905987866e-06, |
| "loss": 1.1376, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 5.821156909926202e-06, |
| "loss": 1.1393, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 5.782416051523909e-06, |
| "loss": 1.1445, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 5.743752036631443e-06, |
| "loss": 1.1023, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 5.7051655696991825e-06, |
| "loss": 1.137, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 5.666657353764594e-06, |
| "loss": 1.1152, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 5.628228090439434e-06, |
| "loss": 1.1422, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 5.589878479896959e-06, |
| "loss": 1.125, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 5.55160922085916e-06, |
| "loss": 1.1307, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 5.513421010584044e-06, |
| "loss": 1.1222, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 5.4753145448529284e-06, |
| "loss": 1.1179, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 5.437290517957767e-06, |
| "loss": 1.0994, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 5.399349622688479e-06, |
| "loss": 1.1345, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 5.3614925503203586e-06, |
| "loss": 1.1318, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 5.323719990601459e-06, |
| "loss": 1.1091, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 5.286032631740023e-06, |
| "loss": 1.1343, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 5.248431160391963e-06, |
| "loss": 1.1645, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 5.2109162616483325e-06, |
| "loss": 1.0957, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 5.1734886190228496e-06, |
| "loss": 1.1548, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 5.136148914439441e-06, |
| "loss": 1.1281, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 5.098897828219831e-06, |
| "loss": 1.1126, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 5.061736039071124e-06, |
| "loss": 1.0938, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 5.024664224073454e-06, |
| "loss": 1.1623, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 4.987683058667651e-06, |
| "loss": 1.1411, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 4.950793216642923e-06, |
| "loss": 1.1559, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 4.913995370124578e-06, |
| "loss": 1.1341, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 4.877290189561795e-06, |
| "loss": 1.1211, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 4.840678343715399e-06, |
| "loss": 1.1156, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 4.804160499645667e-06, |
| "loss": 1.1125, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 4.767737322700185e-06, |
| "loss": 1.1397, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 4.7314094765017325e-06, |
| "loss": 1.1322, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 4.695177622936169e-06, |
| "loss": 1.139, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 4.659042422140399e-06, |
| "loss": 1.143, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 4.623004532490328e-06, |
| "loss": 1.1266, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 4.587064610588881e-06, |
| "loss": 1.1283, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 4.551223311254013e-06, |
| "loss": 1.1291, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 4.515481287506811e-06, |
| "loss": 1.1188, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 4.479839190559583e-06, |
| "loss": 1.115, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 4.444297669803981e-06, |
| "loss": 1.1354, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 4.408857372799179e-06, |
| "loss": 1.1182, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 4.37351894526009e-06, |
| "loss": 1.1767, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 4.338283031045567e-06, |
| "loss": 1.1639, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 4.303150272146706e-06, |
| "loss": 1.1483, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 4.268121308675132e-06, |
| "loss": 1.1074, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 4.2331967788513295e-06, |
| "loss": 1.133, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 4.198377318993035e-06, |
| "loss": 1.1293, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 4.1636635635036235e-06, |
| "loss": 1.1178, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 4.129056144860567e-06, |
| "loss": 1.161, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 4.094555693603891e-06, |
| "loss": 1.1374, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 4.060162838324708e-06, |
| "loss": 1.1135, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 4.025878205653747e-06, |
| "loss": 1.1246, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 3.991702420249941e-06, |
| "loss": 1.1251, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 3.957636104789056e-06, |
| "loss": 1.123, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 3.9236798799523375e-06, |
| "loss": 1.1085, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 3.8898343644151945e-06, |
| "loss": 1.1192, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 3.856100174835945e-06, |
| "loss": 1.1267, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 3.822477925844564e-06, |
| "loss": 1.1303, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 3.7889682300315e-06, |
| "loss": 1.1156, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 3.755571697936493e-06, |
| "loss": 1.1575, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 3.722288938037478e-06, |
| "loss": 1.118, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 3.689120556739475e-06, |
| "loss": 1.1237, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 3.6560671583635467e-06, |
| "loss": 1.1329, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 3.6231293451357994e-06, |
| "loss": 1.1065, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 3.590307717176401e-06, |
| "loss": 1.1285, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 3.557602872488638e-06, |
| "loss": 1.1208, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 3.525015406948039e-06, |
| "loss": 1.1264, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 3.492545914291512e-06, |
| "loss": 1.1055, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 3.4601949861065086e-06, |
| "loss": 1.1358, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 3.4279632118202744e-06, |
| "loss": 1.1053, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 3.3958511786890923e-06, |
| "loss": 1.1158, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 3.3638594717875807e-06, |
| "loss": 1.1195, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 3.33198867399804e-06, |
| "loss": 1.1184, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 3.3002393659998357e-06, |
| "loss": 1.1185, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 3.2686121262588165e-06, |
| "loss": 1.1174, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 3.2371075310167634e-06, |
| "loss": 1.1303, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 3.205726154280905e-06, |
| "loss": 1.0918, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 3.174468567813461e-06, |
| "loss": 1.1189, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 3.143335341121202e-06, |
| "loss": 1.1038, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 3.1123270414451035e-06, |
| "loss": 1.1535, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 3.081444233749994e-06, |
| "loss": 1.1417, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 3.050687480714256e-06, |
| "loss": 1.1267, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 3.0200573427195877e-06, |
| "loss": 1.1123, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 2.9895543778407875e-06, |
| "loss": 1.1269, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 2.959179141835591e-06, |
| "loss": 1.1319, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 2.9289321881345257e-06, |
| "loss": 1.1375, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 2.898814067830855e-06, |
| "loss": 1.1272, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 2.868825329670524e-06, |
| "loss": 1.1279, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 2.83896652004215e-06, |
| "loss": 1.1341, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 2.809238182967092e-06, |
| "loss": 1.1252, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 2.779640860089523e-06, |
| "loss": 1.1369, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 2.7501750906665603e-06, |
| "loss": 1.0889, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 2.7208414115584436e-06, |
| "loss": 1.1314, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 2.691640357218759e-06, |
| "loss": 1.1159, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 2.662572459684699e-06, |
| "loss": 1.1012, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 2.6336382485673574e-06, |
| "loss": 1.1273, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 2.6048382510420954e-06, |
| "loss": 1.1127, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 2.576172991838933e-06, |
| "loss": 1.1234, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 2.547642993232976e-06, |
| "loss": 1.1536, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 2.519248775034918e-06, |
| "loss": 1.1439, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 2.490990854581563e-06, |
| "loss": 1.1128, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 2.4628697467263916e-06, |
| "loss": 1.1272, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 2.4348859638301857e-06, |
| "loss": 1.1564, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 2.4070400157517036e-06, |
| "loss": 1.1108, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 2.3793324098383796e-06, |
| "loss": 1.117, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 2.351763650917074e-06, |
| "loss": 1.1151, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 2.3243342412848923e-06, |
| "loss": 1.102, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 2.2970446807000237e-06, |
| "loss": 1.1295, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 2.26989546637263e-06, |
| "loss": 1.1301, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 2.2428870929558012e-06, |
| "loss": 1.1257, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 2.2160200525365326e-06, |
| "loss": 1.1158, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 2.1892948346267583e-06, |
| "loss": 1.1461, |
| "step": 601 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 2.1627119261544348e-06, |
| "loss": 1.1388, |
| "step": 602 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 2.1362718114546777e-06, |
| "loss": 1.1578, |
| "step": 603 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 2.109974972260921e-06, |
| "loss": 1.1581, |
| "step": 604 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 2.0838218876961524e-06, |
| "loss": 1.149, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 2.057813034264181e-06, |
| "loss": 1.1258, |
| "step": 606 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 2.0319488858409552e-06, |
| "loss": 1.1312, |
| "step": 607 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 2.0062299136659203e-06, |
| "loss": 1.1434, |
| "step": 608 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.980656586333449e-06, |
| "loss": 1.1189, |
| "step": 609 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.955229369784295e-06, |
| "loss": 1.1115, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.929948727297096e-06, |
| "loss": 1.1223, |
| "step": 611 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.9048151194799435e-06, |
| "loss": 1.0928, |
| "step": 612 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.8798290042619949e-06, |
| "loss": 1.1151, |
| "step": 613 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.8549908368851099e-06, |
| "loss": 1.1427, |
| "step": 614 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.8303010698955803e-06, |
| "loss": 1.1106, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.8057601531358693e-06, |
| "loss": 1.1634, |
| "step": 616 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.7813685337364205e-06, |
| "loss": 1.1385, |
| "step": 617 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.7571266561075073e-06, |
| "loss": 1.1263, |
| "step": 618 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.7330349619311415e-06, |
| "loss": 1.1237, |
| "step": 619 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.7090938901530264e-06, |
| "loss": 1.1577, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.6853038769745466e-06, |
| "loss": 1.1035, |
| "step": 621 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.6616653558448437e-06, |
| "loss": 1.1248, |
| "step": 622 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.638178757452894e-06, |
| "loss": 1.1165, |
| "step": 623 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.614844509719674e-06, |
| "loss": 1.1144, |
| "step": 624 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.5916630377903696e-06, |
| "loss": 1.1142, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.5686347640266208e-06, |
| "loss": 1.0993, |
| "step": 626 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.5457601079988226e-06, |
| "loss": 1.1667, |
| "step": 627 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.5230394864784925e-06, |
| "loss": 1.0996, |
| "step": 628 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.5004733134306692e-06, |
| "loss": 1.1243, |
| "step": 629 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.478062000006375e-06, |
| "loss": 1.1052, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.4558059545351144e-06, |
| "loss": 1.1256, |
| "step": 631 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.4337055825174506e-06, |
| "loss": 1.0953, |
| "step": 632 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.4117612866176022e-06, |
| "loss": 1.1087, |
| "step": 633 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.3899734666561138e-06, |
| "loss": 1.123, |
| "step": 634 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.3683425196025734e-06, |
| "loss": 1.1144, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.3468688395683783e-06, |
| "loss": 1.1452, |
| "step": 636 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.325552817799547e-06, |
| "loss": 1.1146, |
| "step": 637 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.3043948426696019e-06, |
| "loss": 1.1461, |
| "step": 638 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.2833952996724864e-06, |
| "loss": 1.143, |
| "step": 639 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.2625545714155474e-06, |
| "loss": 1.1151, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.24187303761255e-06, |
| "loss": 1.1066, |
| "step": 641 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.221351075076781e-06, |
| "loss": 1.1535, |
| "step": 642 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.2009890577141625e-06, |
| "loss": 1.1223, |
| "step": 643 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.1807873565164507e-06, |
| "loss": 1.1557, |
| "step": 644 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.1607463395544782e-06, |
| "loss": 1.1301, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.1408663719714418e-06, |
| "loss": 1.139, |
| "step": 646 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.121147815976248e-06, |
| "loss": 1.0843, |
| "step": 647 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.1015910308369239e-06, |
| "loss": 1.0918, |
| "step": 648 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.0821963728740626e-06, |
| "loss": 1.106, |
| "step": 649 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.062964195454329e-06, |
| "loss": 1.1487, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.0438948489840327e-06, |
| "loss": 1.0996, |
| "step": 651 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.0249886809027355e-06, |
| "loss": 1.115, |
| "step": 652 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.0062460356769189e-06, |
| "loss": 1.1227, |
| "step": 653 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 9.876672547937117e-07, |
| "loss": 1.0986, |
| "step": 654 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 9.692526767546727e-07, |
| "loss": 1.1219, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 9.51002637069619e-07, |
| "loss": 1.1248, |
| "step": 656 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 9.32917468250506e-07, |
| "loss": 1.1461, |
| "step": 657 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 9.149974998053823e-07, |
| "loss": 1.1145, |
| "step": 658 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 8.972430582323788e-07, |
| "loss": 1.1498, |
| "step": 659 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 8.796544670137574e-07, |
| "loss": 1.1069, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 8.622320466100242e-07, |
| "loss": 1.1415, |
| "step": 661 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 8.449761144540869e-07, |
| "loss": 1.1049, |
| "step": 662 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 8.278869849454718e-07, |
| "loss": 1.1194, |
| "step": 663 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 8.109649694445898e-07, |
| "loss": 1.1237, |
| "step": 664 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 7.942103762670783e-07, |
| "loss": 1.1195, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 7.776235106781704e-07, |
| "loss": 1.1165, |
| "step": 666 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 7.612046748871327e-07, |
| "loss": 1.1178, |
| "step": 667 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 7.449541680417704e-07, |
| "loss": 1.1439, |
| "step": 668 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 7.288722862229691e-07, |
| "loss": 1.0893, |
| "step": 669 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 7.12959322439295e-07, |
| "loss": 1.1431, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 6.972155666216684e-07, |
| "loss": 1.1192, |
| "step": 671 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 6.816413056180748e-07, |
| "loss": 1.131, |
| "step": 672 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 6.662368231883388e-07, |
| "loss": 1.1401, |
| "step": 673 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 6.510023999989501e-07, |
| "loss": 1.1095, |
| "step": 674 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 6.359383136179598e-07, |
| "loss": 1.1282, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 6.210448385099177e-07, |
| "loss": 1.1323, |
| "step": 676 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 6.063222460308649e-07, |
| "loss": 1.1179, |
| "step": 677 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 5.917708044234017e-07, |
| "loss": 1.1343, |
| "step": 678 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 5.77390778811796e-07, |
| "loss": 1.101, |
| "step": 679 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 5.631824311971456e-07, |
| "loss": 1.1387, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 5.491460204526156e-07, |
| "loss": 1.0765, |
| "step": 681 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 5.352818023187167e-07, |
| "loss": 1.1008, |
| "step": 682 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 5.215900293986431e-07, |
| "loss": 1.134, |
| "step": 683 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 5.08070951153673e-07, |
| "loss": 1.1359, |
| "step": 684 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 4.947248138986249e-07, |
| "loss": 1.1319, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 4.81551860797369e-07, |
| "loss": 1.1359, |
| "step": 686 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 4.6855233185839175e-07, |
| "loss": 1.1289, |
| "step": 687 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 4.557264639304315e-07, |
| "loss": 1.131, |
| "step": 688 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 4.430744906981577e-07, |
| "loss": 1.1484, |
| "step": 689 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 4.305966426779118e-07, |
| "loss": 1.1267, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 4.1829314721351213e-07, |
| "loss": 1.1382, |
| "step": 691 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 4.0616422847211013e-07, |
| "loss": 1.1439, |
| "step": 692 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 3.942101074401028e-07, |
| "loss": 1.1129, |
| "step": 693 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 3.824310019191102e-07, |
| "loss": 1.0898, |
| "step": 694 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 3.708271265220087e-07, |
| "loss": 1.1395, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 3.5939869266901073e-07, |
| "loss": 1.126, |
| "step": 696 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 3.481459085838268e-07, |
| "loss": 1.1485, |
| "step": 697 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 3.370689792898618e-07, |
| "loss": 1.1505, |
| "step": 698 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 3.261681066064859e-07, |
| "loss": 1.1443, |
| "step": 699 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 3.154434891453473e-07, |
| "loss": 1.1293, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 3.0489532230676744e-07, |
| "loss": 1.1002, |
| "step": 701 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 2.945237982761706e-07, |
| "loss": 1.0944, |
| "step": 702 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 2.843291060205855e-07, |
| "loss": 1.1131, |
| "step": 703 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 2.7431143128520243e-07, |
| "loss": 1.0828, |
| "step": 704 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 2.6447095658999054e-07, |
| "loss": 1.1194, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 2.5480786122636713e-07, |
| "loss": 1.1109, |
| "step": 706 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 2.453223212539391e-07, |
| "loss": 1.1455, |
| "step": 707 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 2.3601450949728876e-07, |
| "loss": 1.0945, |
| "step": 708 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 2.2688459554282673e-07, |
| "loss": 1.0706, |
| "step": 709 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 2.1793274573570166e-07, |
| "loss": 1.1309, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 2.091591231767709e-07, |
| "loss": 1.1223, |
| "step": 711 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 2.005638877196303e-07, |
| "loss": 1.1511, |
| "step": 712 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.921471959676957e-07, |
| "loss": 1.0697, |
| "step": 713 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.8390920127135613e-07, |
| "loss": 1.1142, |
| "step": 714 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.7585005372517504e-07, |
| "loss": 1.1251, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.6796990016515914e-07, |
| "loss": 1.1298, |
| "step": 716 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.6026888416608267e-07, |
| "loss": 1.1118, |
| "step": 717 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.5274714603886742e-07, |
| "loss": 1.1155, |
| "step": 718 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.4540482282803136e-07, |
| "loss": 1.1185, |
| "step": 719 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.3824204830918952e-07, |
| "loss": 1.1348, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.3125895298661705e-07, |
| "loss": 1.1312, |
| "step": 721 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.244556640908712e-07, |
| "loss": 1.119, |
| "step": 722 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.1783230557647075e-07, |
| "loss": 1.0977, |
| "step": 723 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.1138899811964477e-07, |
| "loss": 1.0999, |
| "step": 724 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.0512585911612416e-07, |
| "loss": 1.1318, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 9.904300267901012e-08, |
| "loss": 1.1263, |
| "step": 726 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 9.314053963669245e-08, |
| "loss": 1.1071, |
| "step": 727 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 8.741857753083228e-08, |
| "loss": 1.1288, |
| "step": 728 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 8.187722061439806e-08, |
| "loss": 1.1247, |
| "step": 729 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 7.651656984977051e-08, |
| "loss": 1.1275, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 7.133672290690064e-08, |
| "loss": 1.1239, |
| "step": 731 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 6.633777416153232e-08, |
| "loss": 1.1434, |
| "step": 732 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 6.151981469348034e-08, |
| "loss": 1.106, |
| "step": 733 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 5.688293228497399e-08, |
| "loss": 1.1278, |
| "step": 734 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 5.2427211419051605e-08, |
| "loss": 1.1375, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 4.815273327803183e-08, |
| "loss": 1.1598, |
| "step": 736 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 4.405957574202147e-08, |
| "loss": 1.1285, |
| "step": 737 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 4.014781338751106e-08, |
| "loss": 1.0929, |
| "step": 738 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 3.641751748600042e-08, |
| "loss": 1.1339, |
| "step": 739 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 3.2868756002712997e-08, |
| "loss": 1.1193, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 2.950159359535132e-08, |
| "loss": 1.1526, |
| "step": 741 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 2.6316091612920146e-08, |
| "loss": 1.142, |
| "step": 742 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 2.3312308094607382e-08, |
| "loss": 1.1144, |
| "step": 743 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 2.049029776873268e-08, |
| "loss": 1.0968, |
| "step": 744 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.7850112051738255e-08, |
| "loss": 1.156, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.5391799047266287e-08, |
| "loss": 1.08, |
| "step": 746 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.3115403545270744e-08, |
| "loss": 1.1334, |
| "step": 747 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1020967021210249e-08, |
| "loss": 1.1115, |
| "step": 748 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 9.108527635284248e-09, |
| "loss": 1.1364, |
| "step": 749 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 7.378120231745778e-09, |
| "loss": 1.1019, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 5.8297763382597625e-09, |
| "loss": 1.1174, |
| "step": 751 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 4.463524165333466e-09, |
| "loss": 1.116, |
| "step": 752 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 3.2793886057991277e-09, |
| "loss": 1.1341, |
| "step": 753 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 2.277391234363213e-09, |
| "loss": 1.1028, |
| "step": 754 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.4575503072100649e-09, |
| "loss": 1.1089, |
| "step": 755 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 8.198807616732752e-10, |
| "loss": 1.1326, |
| "step": 756 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.6439421595924065e-10, |
| "loss": 1.1139, |
| "step": 757 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 9.109896894066161e-11, |
| "loss": 1.1377, |
| "step": 758 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 0.0, |
| "loss": 1.1199, |
| "step": 759 |
| }, |
| { |
| "epoch": 1.0, |
| "step": 759, |
| "total_flos": 3.241478138413662e+22, |
| "train_loss": 1.1657107080552576, |
| "train_runtime": 16008.6151, |
| "train_samples_per_second": 12.138, |
| "train_steps_per_second": 0.047 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 759, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 200, |
| "total_flos": 3.241478138413662e+22, |
| "train_batch_size": 2, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|