| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.6534879921581441, | |
| "global_step": 500, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 2.8985507246376816e-07, | |
| "loss": 2.3988, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 5.797101449275363e-07, | |
| "loss": 2.2096, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 8.695652173913044e-07, | |
| "loss": 1.8346, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.1594202898550726e-06, | |
| "loss": 1.8346, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.4492753623188408e-06, | |
| "loss": 1.9785, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.7391304347826088e-06, | |
| "loss": 1.8236, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.028985507246377e-06, | |
| "loss": 1.8187, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.3188405797101453e-06, | |
| "loss": 1.8793, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.6086956521739132e-06, | |
| "loss": 1.928, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.8985507246376816e-06, | |
| "loss": 1.76, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 3.188405797101449e-06, | |
| "loss": 1.7108, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.4782608695652175e-06, | |
| "loss": 1.7712, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.768115942028986e-06, | |
| "loss": 1.8288, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.057971014492754e-06, | |
| "loss": 1.8112, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.347826086956522e-06, | |
| "loss": 1.7858, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.637681159420291e-06, | |
| "loss": 1.7692, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.927536231884059e-06, | |
| "loss": 1.6936, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 5.2173913043478265e-06, | |
| "loss": 1.8671, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 5.507246376811595e-06, | |
| "loss": 1.7851, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 5.797101449275363e-06, | |
| "loss": 1.8736, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 6.086956521739132e-06, | |
| "loss": 1.828, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 6.376811594202898e-06, | |
| "loss": 1.6725, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 1.6799, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 6.956521739130435e-06, | |
| "loss": 1.9097, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 7.246376811594203e-06, | |
| "loss": 1.8499, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 7.536231884057972e-06, | |
| "loss": 1.7507, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 7.82608695652174e-06, | |
| "loss": 1.6498, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 8.115942028985508e-06, | |
| "loss": 1.6504, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 8.405797101449275e-06, | |
| "loss": 1.5428, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 8.695652173913044e-06, | |
| "loss": 1.7138, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 8.985507246376812e-06, | |
| "loss": 1.6575, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 9.275362318840581e-06, | |
| "loss": 1.8043, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 9.565217391304349e-06, | |
| "loss": 1.7853, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 9.855072463768118e-06, | |
| "loss": 1.6425, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.0144927536231885e-05, | |
| "loss": 1.7267, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.0434782608695653e-05, | |
| "loss": 1.6063, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.0724637681159422e-05, | |
| "loss": 1.7126, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.101449275362319e-05, | |
| "loss": 1.6379, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.1304347826086957e-05, | |
| "loss": 1.7932, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.1594202898550726e-05, | |
| "loss": 1.5776, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.1884057971014494e-05, | |
| "loss": 1.6716, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.2173913043478263e-05, | |
| "loss": 1.5683, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.2463768115942029e-05, | |
| "loss": 1.7941, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.2753623188405797e-05, | |
| "loss": 1.6677, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.3043478260869566e-05, | |
| "loss": 1.6677, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 1.8042, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.3623188405797103e-05, | |
| "loss": 1.6045, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.391304347826087e-05, | |
| "loss": 1.6353, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.420289855072464e-05, | |
| "loss": 1.6586, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.4492753623188407e-05, | |
| "loss": 1.626, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.4782608695652174e-05, | |
| "loss": 1.6165, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.5072463768115944e-05, | |
| "loss": 1.6579, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.536231884057971e-05, | |
| "loss": 1.585, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.565217391304348e-05, | |
| "loss": 1.585, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.5942028985507246e-05, | |
| "loss": 1.594, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.6231884057971015e-05, | |
| "loss": 1.6924, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.6521739130434785e-05, | |
| "loss": 1.8485, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.681159420289855e-05, | |
| "loss": 1.6987, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.710144927536232e-05, | |
| "loss": 1.5926, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.739130434782609e-05, | |
| "loss": 1.6222, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.7681159420289858e-05, | |
| "loss": 1.6906, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.7971014492753624e-05, | |
| "loss": 1.6466, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.8260869565217393e-05, | |
| "loss": 1.6629, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.8550724637681162e-05, | |
| "loss": 1.7639, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.8840579710144928e-05, | |
| "loss": 1.5877, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.9130434782608697e-05, | |
| "loss": 1.6888, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.9420289855072467e-05, | |
| "loss": 1.6415, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.9710144927536236e-05, | |
| "loss": 1.6959, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 2e-05, | |
| "loss": 1.6053, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.999999004091687e-05, | |
| "loss": 1.5844, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.9999960163687307e-05, | |
| "loss": 1.7203, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.9999910368370826e-05, | |
| "loss": 1.6762, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.9999840655066608e-05, | |
| "loss": 1.7704, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.9999751023913506e-05, | |
| "loss": 1.8186, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.999964147509006e-05, | |
| "loss": 1.816, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.999951200881446e-05, | |
| "loss": 1.6055, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.9999362625344584e-05, | |
| "loss": 1.7048, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.9999193324977974e-05, | |
| "loss": 1.6125, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.9999004108051846e-05, | |
| "loss": 1.7239, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.9998794974943087e-05, | |
| "loss": 1.7377, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.9998565926068253e-05, | |
| "loss": 1.6942, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.9998316961883563e-05, | |
| "loss": 1.6398, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.999804808288491e-05, | |
| "loss": 1.5898, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.9997759289607854e-05, | |
| "loss": 1.6926, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.9997450582627614e-05, | |
| "loss": 1.5962, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.999712196255908e-05, | |
| "loss": 1.7171, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.9996773430056806e-05, | |
| "loss": 1.6443, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.9996404985814998e-05, | |
| "loss": 1.7252, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.9996016630567535e-05, | |
| "loss": 1.6777, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.9995608365087945e-05, | |
| "loss": 1.6758, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.9995180190189424e-05, | |
| "loss": 1.6416, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.999473210672481e-05, | |
| "loss": 1.6161, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.999426411558661e-05, | |
| "loss": 1.8361, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.9993776217706972e-05, | |
| "loss": 1.6755, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.9993268414057704e-05, | |
| "loss": 1.5446, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.9992740705650252e-05, | |
| "loss": 1.6776, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.999219309353572e-05, | |
| "loss": 1.5742, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.999162557880485e-05, | |
| "loss": 1.7124, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.9991038162588018e-05, | |
| "loss": 1.6711, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.999043084605526e-05, | |
| "loss": 1.5564, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.998980363041624e-05, | |
| "loss": 1.666, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.9989156516920248e-05, | |
| "loss": 1.7261, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.9988489506856218e-05, | |
| "loss": 1.5975, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9987802601552717e-05, | |
| "loss": 1.5775, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9987095802377933e-05, | |
| "loss": 1.6664, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9986369110739675e-05, | |
| "loss": 1.6425, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9985622528085382e-05, | |
| "loss": 1.7126, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.998485605590211e-05, | |
| "loss": 1.5268, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9984069695716534e-05, | |
| "loss": 1.7186, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.998326344909494e-05, | |
| "loss": 1.5536, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9982437317643218e-05, | |
| "loss": 1.743, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.998159130300687e-05, | |
| "loss": 1.7855, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9980725406871007e-05, | |
| "loss": 1.6741, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9979839630960333e-05, | |
| "loss": 1.6982, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.997893397703915e-05, | |
| "loss": 1.9001, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9978008446911354e-05, | |
| "loss": 1.8314, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9977063042420438e-05, | |
| "loss": 1.6957, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9976097765449463e-05, | |
| "loss": 1.6764, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9975112617921097e-05, | |
| "loss": 1.6958, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.997410760179756e-05, | |
| "loss": 1.6011, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9973082719080673e-05, | |
| "loss": 1.665, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9972037971811802e-05, | |
| "loss": 1.6208, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.99709733620719e-05, | |
| "loss": 1.6691, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9969888891981473e-05, | |
| "loss": 1.8241, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9968784563700586e-05, | |
| "loss": 1.7574, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9967660379428855e-05, | |
| "loss": 1.6484, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.9966516341405452e-05, | |
| "loss": 1.5869, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.996535245190909e-05, | |
| "loss": 1.7188, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.996416871325803e-05, | |
| "loss": 1.6428, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.996296512781005e-05, | |
| "loss": 1.7432, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.996174169796248e-05, | |
| "loss": 1.6332, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.996049842615217e-05, | |
| "loss": 1.7188, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.9959235314855485e-05, | |
| "loss": 1.7858, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9957952366588307e-05, | |
| "loss": 1.5635, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.995664958390604e-05, | |
| "loss": 1.7286, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9955326969403587e-05, | |
| "loss": 1.5419, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9953984525715354e-05, | |
| "loss": 1.6727, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.995262225551524e-05, | |
| "loss": 1.6839, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9951240161516643e-05, | |
| "loss": 1.5626, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9949838246472436e-05, | |
| "loss": 1.6662, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9948416513174976e-05, | |
| "loss": 1.6306, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.9946974964456094e-05, | |
| "loss": 1.6706, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.9945513603187096e-05, | |
| "loss": 1.8325, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.9944032432278743e-05, | |
| "loss": 1.7128, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.9942531454681254e-05, | |
| "loss": 1.6979, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.9941010673384307e-05, | |
| "loss": 1.6811, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.9939470091417012e-05, | |
| "loss": 1.5436, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.993790971184793e-05, | |
| "loss": 1.705, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.9936329537785054e-05, | |
| "loss": 1.5621, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9934729572375792e-05, | |
| "loss": 1.7562, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.993310981880699e-05, | |
| "loss": 1.6263, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9931470280304895e-05, | |
| "loss": 1.6247, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.992981096013517e-05, | |
| "loss": 1.6521, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9928131861602878e-05, | |
| "loss": 1.5587, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.992643298805247e-05, | |
| "loss": 1.6085, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.992471434286779e-05, | |
| "loss": 1.5774, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9922975929472076e-05, | |
| "loss": 1.731, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9921217751327916e-05, | |
| "loss": 1.5823, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9919439811937283e-05, | |
| "loss": 1.5621, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9917642114841505e-05, | |
| "loss": 1.6566, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9915824663621267e-05, | |
| "loss": 1.5214, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9913987461896597e-05, | |
| "loss": 1.7257, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9912130513326863e-05, | |
| "loss": 1.7401, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.991025382161077e-05, | |
| "loss": 1.5845, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.9908357390486342e-05, | |
| "loss": 1.6235, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.990644122373092e-05, | |
| "loss": 1.571, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.990450532516116e-05, | |
| "loss": 1.8406, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.990254969863301e-05, | |
| "loss": 1.8326, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.9900574348041728e-05, | |
| "loss": 1.6099, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.9898579277321845e-05, | |
| "loss": 1.4999, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.989656449044718e-05, | |
| "loss": 1.6982, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.9894529991430814e-05, | |
| "loss": 1.7241, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.98924757843251e-05, | |
| "loss": 1.6838, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.9890401873221642e-05, | |
| "loss": 1.5703, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.9888308262251286e-05, | |
| "loss": 1.6669, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.9886194955584126e-05, | |
| "loss": 1.682, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.988406195742948e-05, | |
| "loss": 1.7266, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.9881909272035888e-05, | |
| "loss": 1.7065, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.9879736903691107e-05, | |
| "loss": 1.6784, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9877544856722094e-05, | |
| "loss": 1.8589, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9875333135495e-05, | |
| "loss": 1.7492, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.987310174441518e-05, | |
| "loss": 1.6953, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.987085068792715e-05, | |
| "loss": 1.6772, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.98685799705146e-05, | |
| "loss": 1.6507, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9866289596700383e-05, | |
| "loss": 1.6677, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.98639795710465e-05, | |
| "loss": 1.5531, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9861649898154107e-05, | |
| "loss": 1.692, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.985930058266348e-05, | |
| "loss": 1.5314, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.9856931629254032e-05, | |
| "loss": 1.5806, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.985454304264427e-05, | |
| "loss": 1.6998, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.985213482759183e-05, | |
| "loss": 1.532, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.9849706988893433e-05, | |
| "loss": 1.4833, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.984725953138489e-05, | |
| "loss": 1.6384, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.984479245994108e-05, | |
| "loss": 1.602, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.984230577947597e-05, | |
| "loss": 1.6837, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.983979949494256e-05, | |
| "loss": 1.6221, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.9837273611332918e-05, | |
| "loss": 1.5824, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.983472813367813e-05, | |
| "loss": 1.5726, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.9832163067048335e-05, | |
| "loss": 1.8058, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.982957841655266e-05, | |
| "loss": 1.6665, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.9826974187339267e-05, | |
| "loss": 1.6163, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.9824350384595295e-05, | |
| "loss": 1.6621, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.9821707013546885e-05, | |
| "loss": 1.6097, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.9819044079459142e-05, | |
| "loss": 1.6398, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.9816361587636143e-05, | |
| "loss": 1.5231, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.9813659543420924e-05, | |
| "loss": 1.5495, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.981093795219546e-05, | |
| "loss": 1.6761, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.9808196819380656e-05, | |
| "loss": 1.5347, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.9805436150436352e-05, | |
| "loss": 1.5175, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.980265595086129e-05, | |
| "loss": 1.6109, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.9799856226193125e-05, | |
| "loss": 1.5862, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.9797036982008385e-05, | |
| "loss": 1.6155, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.9794198223922496e-05, | |
| "loss": 1.6225, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.979133995758974e-05, | |
| "loss": 1.7578, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.978846218870326e-05, | |
| "loss": 1.6944, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.9785564922995042e-05, | |
| "loss": 1.5978, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.978264816623591e-05, | |
| "loss": 1.4836, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.977971192423551e-05, | |
| "loss": 1.7529, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.9776756202842297e-05, | |
| "loss": 1.5821, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.977378100794352e-05, | |
| "loss": 1.5954, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.977078634546523e-05, | |
| "loss": 1.5528, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.976777222137224e-05, | |
| "loss": 1.7068, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.9764738641668137e-05, | |
| "loss": 1.6066, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.9761685612395253e-05, | |
| "loss": 1.639, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.9758613139634662e-05, | |
| "loss": 1.5999, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.9755521229506164e-05, | |
| "loss": 1.6008, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.9752409888168285e-05, | |
| "loss": 1.7106, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.9749279121818235e-05, | |
| "loss": 1.6942, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.974612893669194e-05, | |
| "loss": 1.7407, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.9742959339063977e-05, | |
| "loss": 1.6449, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.9739770335247616e-05, | |
| "loss": 1.5437, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.973656193159476e-05, | |
| "loss": 1.5952, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.9733334134495963e-05, | |
| "loss": 1.7003, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.9730086950380404e-05, | |
| "loss": 1.5441, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.9726820385715877e-05, | |
| "loss": 1.5202, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.9723534447008785e-05, | |
| "loss": 1.6691, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.972022914080411e-05, | |
| "loss": 1.6877, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.9716904473685417e-05, | |
| "loss": 1.5395, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.971356045227484e-05, | |
| "loss": 1.6574, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.9710197083233044e-05, | |
| "loss": 1.5122, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.970681437325925e-05, | |
| "loss": 1.4915, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.97034123290912e-05, | |
| "loss": 1.5517, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.9699990957505136e-05, | |
| "loss": 1.7456, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.9696550265315805e-05, | |
| "loss": 1.4699, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.9693090259376436e-05, | |
| "loss": 1.5823, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.9689610946578725e-05, | |
| "loss": 1.7989, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.9686112333852826e-05, | |
| "loss": 1.4173, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.968259442816733e-05, | |
| "loss": 1.605, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.9679057236529266e-05, | |
| "loss": 1.6035, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.9675500765984066e-05, | |
| "loss": 1.5451, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.9671925023615572e-05, | |
| "loss": 1.8563, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.9668330016546004e-05, | |
| "loss": 1.464, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.9664715751935958e-05, | |
| "loss": 1.5198, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.9661082236984387e-05, | |
| "loss": 1.4562, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.965742947892858e-05, | |
| "loss": 1.6719, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.965375748504417e-05, | |
| "loss": 1.629, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.9650066262645097e-05, | |
| "loss": 1.5683, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.964635581908359e-05, | |
| "loss": 1.6081, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.9642626161750176e-05, | |
| "loss": 1.5387, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.9638877298073645e-05, | |
| "loss": 1.5545, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.9635109235521057e-05, | |
| "loss": 1.5341, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.963132198159769e-05, | |
| "loss": 1.5831, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.9627515543847068e-05, | |
| "loss": 1.6099, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.962368992985091e-05, | |
| "loss": 1.5442, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.961984514722914e-05, | |
| "loss": 1.6946, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.961598120363986e-05, | |
| "loss": 1.6732, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.961209810677934e-05, | |
| "loss": 1.6603, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.9608195864381994e-05, | |
| "loss": 1.4686, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.960427448422037e-05, | |
| "loss": 1.7164, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.9600333974105145e-05, | |
| "loss": 1.6552, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.9596374341885093e-05, | |
| "loss": 1.4366, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.9592395595447064e-05, | |
| "loss": 1.6662, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.9588397742716004e-05, | |
| "loss": 1.6731, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.9584380791654896e-05, | |
| "loss": 1.4749, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.958034475026477e-05, | |
| "loss": 1.5257, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.9576289626584685e-05, | |
| "loss": 1.5007, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.95722154286917e-05, | |
| "loss": 1.5079, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.9568122164700867e-05, | |
| "loss": 1.5105, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.9564009842765225e-05, | |
| "loss": 1.5288, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.9559878471075763e-05, | |
| "loss": 1.4527, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.955572805786141e-05, | |
| "loss": 1.5178, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.955155861138903e-05, | |
| "loss": 1.5058, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.9547370139963406e-05, | |
| "loss": 1.4538, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.954316265192719e-05, | |
| "loss": 1.6261, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.9538936155660934e-05, | |
| "loss": 1.578, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.9534690659583045e-05, | |
| "loss": 1.4569, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.953042617214977e-05, | |
| "loss": 1.5795, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.9526142701855187e-05, | |
| "loss": 1.625, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.9521840257231183e-05, | |
| "loss": 1.6226, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.9517518846847437e-05, | |
| "loss": 1.5874, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.951317847931141e-05, | |
| "loss": 1.5774, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.9508819163268315e-05, | |
| "loss": 1.525, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.9504440907401113e-05, | |
| "loss": 1.6432, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.9500043720430484e-05, | |
| "loss": 1.6038, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.9495627611114817e-05, | |
| "loss": 1.6018, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.9491192588250198e-05, | |
| "loss": 1.5559, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.9486738660670373e-05, | |
| "loss": 1.6093, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.948226583724675e-05, | |
| "loss": 1.4772, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.947777412688838e-05, | |
| "loss": 1.6215, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.9473263538541916e-05, | |
| "loss": 1.6031, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.9468734081191627e-05, | |
| "loss": 1.6468, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.9464185763859365e-05, | |
| "loss": 1.5391, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.945961859560454e-05, | |
| "loss": 1.5916, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.9455032585524115e-05, | |
| "loss": 1.5238, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.9450427742752583e-05, | |
| "loss": 1.5679, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.9445804076461946e-05, | |
| "loss": 1.4694, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.944116159586169e-05, | |
| "loss": 1.7224, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.94365003101988e-05, | |
| "loss": 1.5224, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.943182022875769e-05, | |
| "loss": 1.4027, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.9427121360860233e-05, | |
| "loss": 1.6428, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.9422403715865708e-05, | |
| "loss": 1.5872, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.9417667303170803e-05, | |
| "loss": 1.5616, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.9412912132209573e-05, | |
| "loss": 1.7097, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.9408138212453456e-05, | |
| "loss": 1.5574, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.940334555341122e-05, | |
| "loss": 1.6133, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.9398534164628968e-05, | |
| "loss": 1.5629, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.93937040556901e-05, | |
| "loss": 1.4919, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.938885523621531e-05, | |
| "loss": 1.4721, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.9383987715862554e-05, | |
| "loss": 1.7848, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.9379101504327044e-05, | |
| "loss": 1.5623, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.9374196611341212e-05, | |
| "loss": 1.6131, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.9369273046674708e-05, | |
| "loss": 1.6132, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.936433082013437e-05, | |
| "loss": 1.5333, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.935936994156421e-05, | |
| "loss": 1.554, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.9354390420845387e-05, | |
| "loss": 1.5865, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.934939226789619e-05, | |
| "loss": 1.5857, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.9344375492672024e-05, | |
| "loss": 1.5555, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.933934010516539e-05, | |
| "loss": 1.5558, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.933428611540585e-05, | |
| "loss": 1.6296, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.932921353346003e-05, | |
| "loss": 1.5004, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.932412236943158e-05, | |
| "loss": 1.6247, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.9319012633461164e-05, | |
| "loss": 1.6277, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.9313884335726443e-05, | |
| "loss": 1.7737, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.9308737486442045e-05, | |
| "loss": 1.5115, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.9303572095859545e-05, | |
| "loss": 1.5231, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.9298388174267463e-05, | |
| "loss": 1.5685, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.9293185731991212e-05, | |
| "loss": 1.4307, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.9287964779393105e-05, | |
| "loss": 1.6424, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.9282725326872324e-05, | |
| "loss": 1.5317, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.92774673848649e-05, | |
| "loss": 1.5713, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.927219096384368e-05, | |
| "loss": 1.4465, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.9266896074318335e-05, | |
| "loss": 1.4761, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.9261582726835316e-05, | |
| "loss": 1.6569, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.925625093197783e-05, | |
| "loss": 1.588, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.9250900700365837e-05, | |
| "loss": 1.6713, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.924553204265602e-05, | |
| "loss": 1.6163, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.9240144969541754e-05, | |
| "loss": 1.5906, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.923473949175311e-05, | |
| "loss": 1.561, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.9229315620056805e-05, | |
| "loss": 1.4219, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.9223873365256192e-05, | |
| "loss": 1.5169, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.921841273819125e-05, | |
| "loss": 1.6726, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.9212933749738546e-05, | |
| "loss": 1.4714, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.920743641081122e-05, | |
| "loss": 1.5211, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.920192073235896e-05, | |
| "loss": 1.6119, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.919638672536799e-05, | |
| "loss": 1.5366, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.9190834400861035e-05, | |
| "loss": 1.4784, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.918526376989731e-05, | |
| "loss": 1.5361, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.9179674843572487e-05, | |
| "loss": 1.734, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.9174067633018682e-05, | |
| "loss": 1.5957, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.9168442149404438e-05, | |
| "loss": 1.5167, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.916279840393467e-05, | |
| "loss": 1.4856, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.9157136407850706e-05, | |
| "loss": 1.5079, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.9151456172430186e-05, | |
| "loss": 1.6342, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.9145757708987103e-05, | |
| "loss": 1.6238, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.914004102887176e-05, | |
| "loss": 1.602, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.9134306143470722e-05, | |
| "loss": 1.6316, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.9128553064206835e-05, | |
| "loss": 1.3469, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.912278180253918e-05, | |
| "loss": 1.5529, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.911699236996305e-05, | |
| "loss": 1.4857, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.9111184778009934e-05, | |
| "loss": 1.5868, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.9105359038247484e-05, | |
| "loss": 1.5045, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.9099515162279515e-05, | |
| "loss": 1.6578, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.909365316174595e-05, | |
| "loss": 1.4851, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.908777304832282e-05, | |
| "loss": 1.4573, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.9081874833722234e-05, | |
| "loss": 1.5313, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.9075958529692355e-05, | |
| "loss": 1.4284, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.9070024148017375e-05, | |
| "loss": 1.3889, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.906407170051749e-05, | |
| "loss": 1.5237, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.905810119904889e-05, | |
| "loss": 1.5036, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.9052112655503713e-05, | |
| "loss": 1.6423, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.9046106081810047e-05, | |
| "loss": 1.5939, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.904008148993188e-05, | |
| "loss": 1.643, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.903403889186909e-05, | |
| "loss": 1.5539, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.9027978299657436e-05, | |
| "loss": 1.5466, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.9021899725368498e-05, | |
| "loss": 1.4804, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.9015803181109685e-05, | |
| "loss": 1.6738, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.900968867902419e-05, | |
| "loss": 1.6189, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.9003556231290988e-05, | |
| "loss": 1.3754, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.8997405850124786e-05, | |
| "loss": 1.4995, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.8991237547776014e-05, | |
| "loss": 1.7472, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.89850513365308e-05, | |
| "loss": 1.5819, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.897884722871094e-05, | |
| "loss": 1.4441, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.8972625236673887e-05, | |
| "loss": 1.4998, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.8966385372812703e-05, | |
| "loss": 1.7693, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.896012764955605e-05, | |
| "loss": 1.5972, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.895385207936817e-05, | |
| "loss": 1.4894, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.8947558674748844e-05, | |
| "loss": 1.6153, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.8941247448233386e-05, | |
| "loss": 1.5506, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.8934918412392596e-05, | |
| "loss": 1.4783, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.8928571579832756e-05, | |
| "loss": 1.6733, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.892220696319559e-05, | |
| "loss": 1.4514, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.891582457515825e-05, | |
| "loss": 1.6588, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.8909424428433278e-05, | |
| "loss": 1.5374, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.89030065357686e-05, | |
| "loss": 1.5054, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.8896570909947477e-05, | |
| "loss": 1.5964, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.889011756378849e-05, | |
| "loss": 1.5827, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.888364651014553e-05, | |
| "loss": 1.4867, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.887715776190775e-05, | |
| "loss": 1.5016, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.8870651331999542e-05, | |
| "loss": 1.6249, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.886412723338052e-05, | |
| "loss": 1.6036, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.8857585479045493e-05, | |
| "loss": 1.6172, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.885102608202444e-05, | |
| "loss": 1.5305, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.8844449055382473e-05, | |
| "loss": 1.5793, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.8837854412219828e-05, | |
| "loss": 1.6235, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.8831242165671816e-05, | |
| "loss": 1.6813, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.8824612328908828e-05, | |
| "loss": 1.553, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.8817964915136277e-05, | |
| "loss": 1.5233, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.8811299937594598e-05, | |
| "loss": 1.5602, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.88046174095592e-05, | |
| "loss": 1.6497, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.8797917344340453e-05, | |
| "loss": 1.5556, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.8791199755283664e-05, | |
| "loss": 1.4972, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.8784464655769033e-05, | |
| "loss": 1.4895, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.8777712059211643e-05, | |
| "loss": 1.4853, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.8770941979061432e-05, | |
| "loss": 1.5, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.8764154428803155e-05, | |
| "loss": 1.5266, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.875734942195637e-05, | |
| "loss": 1.6001, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.87505269720754e-05, | |
| "loss": 1.6224, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.8743687092749318e-05, | |
| "loss": 1.8942, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.8736829797601903e-05, | |
| "loss": 1.597, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.8729955100291634e-05, | |
| "loss": 1.4492, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.872306301451165e-05, | |
| "loss": 1.553, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.8716153553989716e-05, | |
| "loss": 1.6568, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.8709226732488216e-05, | |
| "loss": 1.6419, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.87022825638041e-05, | |
| "loss": 1.4201, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.8695321061768886e-05, | |
| "loss": 1.5144, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.868834224024861e-05, | |
| "loss": 1.6008, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.86813461131438e-05, | |
| "loss": 1.5232, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.8674332694389464e-05, | |
| "loss": 1.5325, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.8667301997955038e-05, | |
| "loss": 1.5205, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.866025403784439e-05, | |
| "loss": 1.5523, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.8653188828095754e-05, | |
| "loss": 1.6117, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.8646106382781738e-05, | |
| "loss": 1.5324, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.8639006716009275e-05, | |
| "loss": 1.5437, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.8631889841919596e-05, | |
| "loss": 1.646, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.862475577468821e-05, | |
| "loss": 1.4662, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.8617604528524876e-05, | |
| "loss": 1.6842, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.8610436117673557e-05, | |
| "loss": 1.6015, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8603250556412418e-05, | |
| "loss": 1.4642, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8596047859053776e-05, | |
| "loss": 1.5905, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8588828039944086e-05, | |
| "loss": 1.3811, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8581591113463903e-05, | |
| "loss": 1.477, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8574337094027858e-05, | |
| "loss": 1.5653, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8567065996084628e-05, | |
| "loss": 1.6238, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8559777834116906e-05, | |
| "loss": 1.5427, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8552472622641372e-05, | |
| "loss": 1.478, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.854515037620867e-05, | |
| "loss": 1.4926, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8537811109403372e-05, | |
| "loss": 1.5102, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8530454836843953e-05, | |
| "loss": 1.5598, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8523081573182754e-05, | |
| "loss": 1.6468, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.851569133310597e-05, | |
| "loss": 1.5438, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8508284131333604e-05, | |
| "loss": 1.4735, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8500859982619438e-05, | |
| "loss": 1.4544, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.8493418901751016e-05, | |
| "loss": 1.5896, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.8485960903549614e-05, | |
| "loss": 1.5448, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.847848600287019e-05, | |
| "loss": 1.48, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.8470994214601378e-05, | |
| "loss": 1.5051, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.846348555366544e-05, | |
| "loss": 1.4919, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.845596003501826e-05, | |
| "loss": 1.4351, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.8448417673649292e-05, | |
| "loss": 1.5142, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.844085848458153e-05, | |
| "loss": 1.5037, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.8433282482871497e-05, | |
| "loss": 1.4373, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.8425689683609198e-05, | |
| "loss": 1.3986, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.8418080101918095e-05, | |
| "loss": 1.5879, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.841045375295508e-05, | |
| "loss": 1.5525, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.8402810651910444e-05, | |
| "loss": 1.4899, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.839515081400784e-05, | |
| "loss": 1.5907, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.8387474254504265e-05, | |
| "loss": 1.4677, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.8379780988690014e-05, | |
| "loss": 1.451, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.837207103188866e-05, | |
| "loss": 1.3821, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.8364344399457028e-05, | |
| "loss": 1.5024, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.8356601106785148e-05, | |
| "loss": 1.5263, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.8348841169296247e-05, | |
| "loss": 1.5388, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.8341064602446686e-05, | |
| "loss": 1.6173, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.8333271421725968e-05, | |
| "loss": 1.5454, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.8325461642656676e-05, | |
| "loss": 1.4468, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.8317635280794466e-05, | |
| "loss": 1.3626, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.8309792351728006e-05, | |
| "loss": 1.4551, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.8301932871078975e-05, | |
| "loss": 1.3878, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.829405685450202e-05, | |
| "loss": 1.525, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.828616431768472e-05, | |
| "loss": 1.4175, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.8278255276347563e-05, | |
| "loss": 1.5459, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.8270329746243903e-05, | |
| "loss": 1.612, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.826238774315995e-05, | |
| "loss": 1.4209, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.8254429282914715e-05, | |
| "loss": 1.5253, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.824645438135999e-05, | |
| "loss": 1.4988, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.823846305438032e-05, | |
| "loss": 1.4734, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.8230455317892957e-05, | |
| "loss": 1.5854, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.822243118784785e-05, | |
| "loss": 1.3589, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.8214390680227588e-05, | |
| "loss": 1.5336, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.82063338110474e-05, | |
| "loss": 1.4623, | |
| "step": 500 | |
| } | |
| ], | |
| "max_steps": 2295, | |
| "num_train_epochs": 3, | |
| "total_flos": 1.169292903139246e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |