| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.9972316210396803, | |
| "global_step": 1218, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 5.405405405405406e-07, | |
| "loss": 1.5052, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 1.0810810810810812e-06, | |
| "loss": 1.572, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.6216216216216219e-06, | |
| "loss": 1.4854, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.1621621621621623e-06, | |
| "loss": 1.4884, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.1621621621621623e-06, | |
| "loss": 1.5765, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.702702702702703e-06, | |
| "loss": 1.5366, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.2432432432432437e-06, | |
| "loss": 1.5657, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.7837837837837844e-06, | |
| "loss": 1.5046, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.7837837837837844e-06, | |
| "loss": 1.4414, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.324324324324325e-06, | |
| "loss": 1.4408, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.864864864864866e-06, | |
| "loss": 1.4576, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 5.405405405405406e-06, | |
| "loss": 1.4792, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 5.405405405405406e-06, | |
| "loss": 1.3751, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 5.405405405405406e-06, | |
| "loss": 1.2943, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 5.945945945945947e-06, | |
| "loss": 1.3096, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 6.486486486486487e-06, | |
| "loss": 1.3018, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 7.027027027027028e-06, | |
| "loss": 1.3502, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 7.567567567567569e-06, | |
| "loss": 1.2168, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 8.108108108108109e-06, | |
| "loss": 1.2869, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 8.64864864864865e-06, | |
| "loss": 1.201, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 8.64864864864865e-06, | |
| "loss": 1.2444, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 8.64864864864865e-06, | |
| "loss": 1.2378, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 8.64864864864865e-06, | |
| "loss": 1.2257, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 8.64864864864865e-06, | |
| "loss": 1.2749, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 9.189189189189191e-06, | |
| "loss": 1.2798, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 9.729729729729732e-06, | |
| "loss": 1.2705, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.027027027027027e-05, | |
| "loss": 1.2711, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.0810810810810812e-05, | |
| "loss": 1.2435, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.1351351351351352e-05, | |
| "loss": 1.2006, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.1351351351351352e-05, | |
| "loss": 1.1974, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.1891891891891894e-05, | |
| "loss": 1.2233, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.2432432432432433e-05, | |
| "loss": 1.2902, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.2972972972972975e-05, | |
| "loss": 1.1509, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.3513513513513515e-05, | |
| "loss": 1.198, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.4054054054054055e-05, | |
| "loss": 1.1384, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.4594594594594596e-05, | |
| "loss": 1.2207, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.5135135135135138e-05, | |
| "loss": 1.1605, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.5675675675675676e-05, | |
| "loss": 1.2356, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.6216216216216218e-05, | |
| "loss": 1.212, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.6756756756756757e-05, | |
| "loss": 1.0847, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.72972972972973e-05, | |
| "loss": 1.1486, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.783783783783784e-05, | |
| "loss": 1.1702, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.8378378378378383e-05, | |
| "loss": 1.1427, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.891891891891892e-05, | |
| "loss": 1.1607, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.9459459459459463e-05, | |
| "loss": 1.1498, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 2e-05, | |
| "loss": 1.1193, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.999996461903301e-05, | |
| "loss": 1.2106, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.9999858476382388e-05, | |
| "loss": 1.2074, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.9999681572799226e-05, | |
| "loss": 1.1671, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.9999433909535333e-05, | |
| "loss": 1.1289, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.9999115488343213e-05, | |
| "loss": 1.1084, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.999872631147608e-05, | |
| "loss": 1.1514, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.999872631147608e-05, | |
| "loss": 1.2061, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.999826638168783e-05, | |
| "loss": 1.1689, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9997735702233006e-05, | |
| "loss": 1.1228, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.99971342768668e-05, | |
| "loss": 1.1259, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.999646210984502e-05, | |
| "loss": 1.1042, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.999571920592405e-05, | |
| "loss": 1.1362, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9994905570360817e-05, | |
| "loss": 1.1776, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.999402120891276e-05, | |
| "loss": 1.2139, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.99930661278378e-05, | |
| "loss": 1.0975, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9992040333894273e-05, | |
| "loss": 1.1779, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9990943834340893e-05, | |
| "loss": 1.1284, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9989776636936705e-05, | |
| "loss": 1.1707, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9988538749941024e-05, | |
| "loss": 1.2277, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9987230182113374e-05, | |
| "loss": 1.2012, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.998585094271344e-05, | |
| "loss": 1.1742, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.998440104150098e-05, | |
| "loss": 1.1874, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.998440104150098e-05, | |
| "loss": 1.1938, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.998288048873578e-05, | |
| "loss": 1.1144, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.9981289295177566e-05, | |
| "loss": 1.2661, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9979627472085927e-05, | |
| "loss": 1.1696, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.997789503122025e-05, | |
| "loss": 1.1022, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9976091984839616e-05, | |
| "loss": 1.1423, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9974218345702733e-05, | |
| "loss": 1.1428, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.9972274127067838e-05, | |
| "loss": 1.1857, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.997025934269259e-05, | |
| "loss": 1.1107, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.9968174006833996e-05, | |
| "loss": 1.1741, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.9966018134248296e-05, | |
| "loss": 1.1125, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9963791740190863e-05, | |
| "loss": 1.1686, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.996149484041609e-05, | |
| "loss": 1.2111, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9959127451177287e-05, | |
| "loss": 1.176, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9956689589226555e-05, | |
| "loss": 1.141, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9954181271814673e-05, | |
| "loss": 1.2343, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9951602516690988e-05, | |
| "loss": 1.1353, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9948953342103268e-05, | |
| "loss": 1.1547, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.994623376679758e-05, | |
| "loss": 1.1914, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.9943443810018174e-05, | |
| "loss": 1.1855, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.9940583491507314e-05, | |
| "loss": 1.2137, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.993765283150517e-05, | |
| "loss": 1.1967, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.9934651850749663e-05, | |
| "loss": 1.1978, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.9934651850749663e-05, | |
| "loss": 1.1091, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.9934651850749663e-05, | |
| "loss": 1.1659, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.9931580570476306e-05, | |
| "loss": 1.2263, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.9931580570476306e-05, | |
| "loss": 1.2255, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9928439012418076e-05, | |
| "loss": 1.1399, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9925227198805247e-05, | |
| "loss": 1.1647, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9921945152365235e-05, | |
| "loss": 1.1642, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9918592896322432e-05, | |
| "loss": 1.2057, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.9918592896322432e-05, | |
| "loss": 1.139, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.9915170454398045e-05, | |
| "loss": 1.2032, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.9911677850809943e-05, | |
| "loss": 1.1182, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.9908115110272463e-05, | |
| "loss": 1.1721, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.9904482257996244e-05, | |
| "loss": 1.1924, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.990077931968805e-05, | |
| "loss": 1.0885, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.9897006321550592e-05, | |
| "loss": 1.1357, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.9893163290282335e-05, | |
| "loss": 1.17, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.9893163290282335e-05, | |
| "loss": 1.1068, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.9889250253077306e-05, | |
| "loss": 1.0895, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.9889250253077306e-05, | |
| "loss": 1.1376, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.9885267237624923e-05, | |
| "loss": 1.2073, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.988121427210976e-05, | |
| "loss": 1.2018, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.98770913852114e-05, | |
| "loss": 1.0813, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.9872898606104175e-05, | |
| "loss": 1.145, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.9868635964457007e-05, | |
| "loss": 1.1122, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.986430349043317e-05, | |
| "loss": 1.1345, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.9859901214690094e-05, | |
| "loss": 1.0742, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.9855429168379127e-05, | |
| "loss": 1.0388, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.9855429168379127e-05, | |
| "loss": 1.1202, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.9855429168379127e-05, | |
| "loss": 1.17, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.9855429168379127e-05, | |
| "loss": 1.2388, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.985088738314534e-05, | |
| "loss": 1.0845, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.9846275891127275e-05, | |
| "loss": 1.1517, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.9841594724956746e-05, | |
| "loss": 1.1195, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.9836843917758593e-05, | |
| "loss": 1.1406, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.983202350315044e-05, | |
| "loss": 1.1288, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.982713351524248e-05, | |
| "loss": 1.2129, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.982713351524248e-05, | |
| "loss": 1.1557, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.982217398863721e-05, | |
| "loss": 1.1713, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.98171449584292e-05, | |
| "loss": 1.0706, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.9812046460204837e-05, | |
| "loss": 1.1652, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.9806878530042083e-05, | |
| "loss": 1.1527, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.9801641204510216e-05, | |
| "loss": 1.0598, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.9796334520669555e-05, | |
| "loss": 1.1816, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.9790958516071228e-05, | |
| "loss": 1.1124, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.978551322875688e-05, | |
| "loss": 1.0941, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.977999869725842e-05, | |
| "loss": 1.1864, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.977441496059774e-05, | |
| "loss": 1.1492, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.9768762058286433e-05, | |
| "loss": 1.1783, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.976304003032554e-05, | |
| "loss": 1.0771, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.9757248917205228e-05, | |
| "loss": 1.1648, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.975138875990454e-05, | |
| "loss": 1.1304, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.974545959989108e-05, | |
| "loss": 1.153, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.9739461479120727e-05, | |
| "loss": 1.1888, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.9739461479120727e-05, | |
| "loss": 1.109, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.973339444003735e-05, | |
| "loss": 1.1372, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.9727258525572487e-05, | |
| "loss": 1.1119, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.9721053779145057e-05, | |
| "loss": 1.1979, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.9714780244661044e-05, | |
| "loss": 1.1624, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.9708437966513196e-05, | |
| "loss": 1.1357, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.9702026989580694e-05, | |
| "loss": 1.0822, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.969554735922885e-05, | |
| "loss": 1.1286, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.968899912130879e-05, | |
| "loss": 1.1906, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.968899912130879e-05, | |
| "loss": 1.1154, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.9682382322157103e-05, | |
| "loss": 1.1316, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.9675697008595545e-05, | |
| "loss": 1.1555, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.9668943227930686e-05, | |
| "loss": 1.1378, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.966212102795358e-05, | |
| "loss": 1.0963, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.966212102795358e-05, | |
| "loss": 1.1602, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.965523045693944e-05, | |
| "loss": 1.1456, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.964827156364728e-05, | |
| "loss": 1.0217, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.964124439731957e-05, | |
| "loss": 1.186, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.9634149007681894e-05, | |
| "loss": 1.1621, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.962698544494261e-05, | |
| "loss": 1.198, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.962698544494261e-05, | |
| "loss": 1.1155, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.9619753759792466e-05, | |
| "loss": 1.2091, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.9619753759792466e-05, | |
| "loss": 1.2163, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.961245400340427e-05, | |
| "loss": 1.1876, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.961245400340427e-05, | |
| "loss": 1.0896, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.9605086227432512e-05, | |
| "loss": 1.0972, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.9597650484012997e-05, | |
| "loss": 1.131, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.9590146825762476e-05, | |
| "loss": 1.1095, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.9582575305778297e-05, | |
| "loss": 1.119, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.9582575305778297e-05, | |
| "loss": 1.153, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.9574935977637994e-05, | |
| "loss": 1.0994, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.9567228895398936e-05, | |
| "loss": 1.1747, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.955945411359792e-05, | |
| "loss": 1.1566, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.9551611687250808e-05, | |
| "loss": 1.1407, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.9543701671852127e-05, | |
| "loss": 1.2034, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.9535724123374674e-05, | |
| "loss": 1.1647, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.952767909826913e-05, | |
| "loss": 1.2311, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.951956665346364e-05, | |
| "loss": 1.1375, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.951138684636344e-05, | |
| "loss": 1.2173, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.9503139734850426e-05, | |
| "loss": 1.1083, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.9494825377282746e-05, | |
| "loss": 1.1478, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.9486443832494414e-05, | |
| "loss": 1.1635, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.9477995159794854e-05, | |
| "loss": 1.2343, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.9469479418968506e-05, | |
| "loss": 1.1554, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.9460896670274408e-05, | |
| "loss": 1.1989, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.9452246974445743e-05, | |
| "loss": 1.1154, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.9443530392689434e-05, | |
| "loss": 1.1306, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.94347469866857e-05, | |
| "loss": 1.124, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.9425896818587615e-05, | |
| "loss": 1.1231, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.941697995102069e-05, | |
| "loss": 1.1638, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.9407996447082394e-05, | |
| "loss": 1.0767, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.939894637034174e-05, | |
| "loss": 1.0996, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.9389829784838833e-05, | |
| "loss": 1.1234, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.938064675508438e-05, | |
| "loss": 1.0935, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.9371397346059286e-05, | |
| "loss": 1.1513, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.936208162321415e-05, | |
| "loss": 1.1359, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.9352699652468835e-05, | |
| "loss": 1.2211, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.9343251500211977e-05, | |
| "loss": 1.1504, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.933373723330053e-05, | |
| "loss": 1.1768, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.9324156919059286e-05, | |
| "loss": 1.1934, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.93145106252804e-05, | |
| "loss": 1.1678, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.93145106252804e-05, | |
| "loss": 1.1403, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.9304798420222918e-05, | |
| "loss": 1.2295, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.9295020372612276e-05, | |
| "loss": 1.1167, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.9285176551639826e-05, | |
| "loss": 1.1411, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.9275267026962358e-05, | |
| "loss": 1.1209, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.9265291868701584e-05, | |
| "loss": 1.1431, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.9255251147443646e-05, | |
| "loss": 1.0942, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.924514493423864e-05, | |
| "loss": 1.2042, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.9234973300600074e-05, | |
| "loss": 1.2075, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.92247363185044e-05, | |
| "loss": 1.1006, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.9214434060390484e-05, | |
| "loss": 1.1622, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.9204066599159094e-05, | |
| "loss": 1.203, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.9193634008172396e-05, | |
| "loss": 1.094, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.9183136361253417e-05, | |
| "loss": 1.1161, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.917257373268554e-05, | |
| "loss": 1.2007, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.916194619721196e-05, | |
| "loss": 1.2133, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.915125383003518e-05, | |
| "loss": 1.2108, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.914049670681646e-05, | |
| "loss": 1.0996, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.912967490367528e-05, | |
| "loss": 1.2501, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.9118788497188815e-05, | |
| "loss": 1.1252, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.9107837564391376e-05, | |
| "loss": 1.1589, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.9096822182773887e-05, | |
| "loss": 1.2532, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.9085742430283322e-05, | |
| "loss": 1.1482, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.907459838532215e-05, | |
| "loss": 1.1188, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.9063390126747778e-05, | |
| "loss": 1.1482, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.9052117733872025e-05, | |
| "loss": 1.1335, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.904078128646052e-05, | |
| "loss": 1.1648, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.902938086473215e-05, | |
| "loss": 1.1477, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.901791654935852e-05, | |
| "loss": 1.1836, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.9006388421463322e-05, | |
| "loss": 1.2014, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.899479656262183e-05, | |
| "loss": 1.1979, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.898314105486028e-05, | |
| "loss": 1.1352, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8971421980655295e-05, | |
| "loss": 1.1553, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8959639422933316e-05, | |
| "loss": 1.1289, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.894779346506999e-05, | |
| "loss": 1.1257, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.893588419088962e-05, | |
| "loss": 1.1614, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.892391168466452e-05, | |
| "loss": 1.1656, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.891187603111447e-05, | |
| "loss": 1.1423, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8899777315406073e-05, | |
| "loss": 1.1309, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8887615623152188e-05, | |
| "loss": 1.0227, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.88753910404113e-05, | |
| "loss": 1.1151, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.8863103653686917e-05, | |
| "loss": 1.1514, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.8850753549926967e-05, | |
| "loss": 1.2559, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.8838340816523175e-05, | |
| "loss": 1.2324, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.8838340816523175e-05, | |
| "loss": 1.2488, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.8838340816523175e-05, | |
| "loss": 1.1292, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.8825865541310438e-05, | |
| "loss": 1.1782, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.8813327812566217e-05, | |
| "loss": 1.1617, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.880072771900991e-05, | |
| "loss": 1.1814, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.878806534980221e-05, | |
| "loss": 1.1925, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.8775340794544497e-05, | |
| "loss": 1.1433, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.876255414327818e-05, | |
| "loss": 1.1688, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.8749705486484074e-05, | |
| "loss": 1.1577, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.8736794915081765e-05, | |
| "loss": 1.1985, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.8723822520428954e-05, | |
| "loss": 1.0873, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.8710788394320807e-05, | |
| "loss": 1.1821, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.8697692628989327e-05, | |
| "loss": 1.1282, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.868453531710268e-05, | |
| "loss": 1.1346, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.8671316551764552e-05, | |
| "loss": 1.105, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.865803642651348e-05, | |
| "loss": 1.2267, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.8644695035322203e-05, | |
| "loss": 1.1812, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.8644695035322203e-05, | |
| "loss": 1.2009, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.8631292472596978e-05, | |
| "loss": 1.2195, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.8617828833176935e-05, | |
| "loss": 1.1591, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.860430421233339e-05, | |
| "loss": 1.087, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.859071870576918e-05, | |
| "loss": 1.1992, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.857707240961797e-05, | |
| "loss": 1.1432, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.8563365420443594e-05, | |
| "loss": 1.131, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.854959783523936e-05, | |
| "loss": 1.2095, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.854959783523936e-05, | |
| "loss": 1.1089, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.853576975142736e-05, | |
| "loss": 1.1882, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.852188126685779e-05, | |
| "loss": 1.145, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.8507932479808254e-05, | |
| "loss": 1.1407, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.8493923488983066e-05, | |
| "loss": 1.219, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.847985439351256e-05, | |
| "loss": 1.1448, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.846572529295237e-05, | |
| "loss": 1.1969, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.845153628728274e-05, | |
| "loss": 1.1975, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.8437287476907828e-05, | |
| "loss": 1.1798, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.842297896265497e-05, | |
| "loss": 1.1761, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.8408610845773974e-05, | |
| "loss": 1.1433, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.8394183227936418e-05, | |
| "loss": 1.1364, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.8379696211234918e-05, | |
| "loss": 1.1431, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.8365149898182403e-05, | |
| "loss": 1.1093, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.8350544391711396e-05, | |
| "loss": 1.0717, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.833587979517329e-05, | |
| "loss": 1.1857, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.8321156212337604e-05, | |
| "loss": 1.0927, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.8321156212337604e-05, | |
| "loss": 1.1124, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.830637374739126e-05, | |
| "loss": 1.2296, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.829153250493783e-05, | |
| "loss": 1.1223, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.827663258999683e-05, | |
| "loss": 1.1712, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.8261674108002925e-05, | |
| "loss": 1.1244, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.8261674108002925e-05, | |
| "loss": 1.1804, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.824665716480524e-05, | |
| "loss": 1.0104, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.823158186666656e-05, | |
| "loss": 1.1202, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.821644832026261e-05, | |
| "loss": 1.1698, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.82012566326813e-05, | |
| "loss": 1.1408, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.8186006911421937e-05, | |
| "loss": 1.0759, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.817069926439451e-05, | |
| "loss": 1.1292, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.8155333799918883e-05, | |
| "loss": 1.201, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.8155333799918883e-05, | |
| "loss": 1.1038, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.8139910626724058e-05, | |
| "loss": 1.1499, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.8124429853947387e-05, | |
| "loss": 1.1365, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.8108891591133812e-05, | |
| "loss": 1.1646, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.809329594823509e-05, | |
| "loss": 1.1382, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.8077643035609006e-05, | |
| "loss": 1.1458, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.806193296401859e-05, | |
| "loss": 1.1096, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.804616584463136e-05, | |
| "loss": 1.1937, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.803034178901849e-05, | |
| "loss": 1.1677, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.8014460909154058e-05, | |
| "loss": 1.2202, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.799852331741425e-05, | |
| "loss": 1.2097, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.799852331741425e-05, | |
| "loss": 1.1376, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.7982529126576543e-05, | |
| "loss": 1.1526, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.7966478449818925e-05, | |
| "loss": 1.1528, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.7950371400719087e-05, | |
| "loss": 1.1735, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.7934208093253625e-05, | |
| "loss": 1.1243, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.7917988641797227e-05, | |
| "loss": 1.15, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.7901713161121873e-05, | |
| "loss": 1.1155, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.7885381766396008e-05, | |
| "loss": 1.174, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.786899457318374e-05, | |
| "loss": 1.2247, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.7852551697444017e-05, | |
| "loss": 1.0521, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.783605325552981e-05, | |
| "loss": 1.1616, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.7819499364187282e-05, | |
| "loss": 1.1757, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.780289014055497e-05, | |
| "loss": 1.1687, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.7786225702162955e-05, | |
| "loss": 1.1133, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.7769506166932026e-05, | |
| "loss": 1.2429, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.7752731653172847e-05, | |
| "loss": 1.0541, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.7735902279585118e-05, | |
| "loss": 1.1295, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.7719018165256745e-05, | |
| "loss": 1.0616, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.7702079429662986e-05, | |
| "loss": 1.2172, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.7685086192665605e-05, | |
| "loss": 1.0925, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.7668038574512045e-05, | |
| "loss": 1.1621, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.7668038574512045e-05, | |
| "loss": 1.1228, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.7650936695834536e-05, | |
| "loss": 1.2114, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.763378067764929e-05, | |
| "loss": 1.1061, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.7616570641355602e-05, | |
| "loss": 1.1365, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.759930670873502e-05, | |
| "loss": 1.1526, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.758198900195047e-05, | |
| "loss": 1.1304, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.7564617643545395e-05, | |
| "loss": 1.1383, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.7547192756442887e-05, | |
| "loss": 1.1379, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.7547192756442887e-05, | |
| "loss": 1.0709, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.7529714463944815e-05, | |
| "loss": 1.1121, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.751218288973096e-05, | |
| "loss": 1.2317, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.751218288973096e-05, | |
| "loss": 1.1517, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.751218288973096e-05, | |
| "loss": 1.142, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.7494598157858127e-05, | |
| "loss": 1.1774, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.7476960392759284e-05, | |
| "loss": 1.1382, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.7459269719242665e-05, | |
| "loss": 1.1377, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.74415262624909e-05, | |
| "loss": 1.1379, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.742373014806012e-05, | |
| "loss": 1.1947, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.740588150187907e-05, | |
| "loss": 1.1375, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.7387980450248222e-05, | |
| "loss": 1.1951, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.7370027119838884e-05, | |
| "loss": 1.1919, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.735202163769229e-05, | |
| "loss": 1.0869, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.7333964131218714e-05, | |
| "loss": 1.1057, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.7315854728196568e-05, | |
| "loss": 1.2241, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.729769355677149e-05, | |
| "loss": 1.0972, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.7279480745455433e-05, | |
| "loss": 1.1912, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.7261216423125782e-05, | |
| "loss": 1.0613, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.724290071902441e-05, | |
| "loss": 1.1275, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.7224533762756775e-05, | |
| "loss": 1.1917, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.720611568429103e-05, | |
| "loss": 1.1288, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.720611568429103e-05, | |
| "loss": 1.1787, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.718764661395704e-05, | |
| "loss": 1.1365, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.716912668244553e-05, | |
| "loss": 1.2281, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.715055602080711e-05, | |
| "loss": 1.1344, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.7131934760451385e-05, | |
| "loss": 1.1644, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.7113263033145985e-05, | |
| "loss": 1.13, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.7094540971015663e-05, | |
| "loss": 1.1143, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.7075768706541355e-05, | |
| "loss": 1.1438, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.7056946372559234e-05, | |
| "loss": 1.137, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.7056946372559234e-05, | |
| "loss": 1.0825, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.7038074102259775e-05, | |
| "loss": 1.1002, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.7019152029186817e-05, | |
| "loss": 1.0477, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.70001802872366e-05, | |
| "loss": 1.0862, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.6981159010656847e-05, | |
| "loss": 1.2227, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.6962088334045785e-05, | |
| "loss": 1.1665, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.6962088334045785e-05, | |
| "loss": 1.1262, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.694296839235121e-05, | |
| "loss": 1.1451, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.692379932086953e-05, | |
| "loss": 1.1458, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.692379932086953e-05, | |
| "loss": 1.1896, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.6904581255244802e-05, | |
| "loss": 1.1233, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.688531433146777e-05, | |
| "loss": 1.1583, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.6865998685874923e-05, | |
| "loss": 1.2036, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.6865998685874923e-05, | |
| "loss": 1.1578, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.6846634455147498e-05, | |
| "loss": 1.1066, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.6827221776310532e-05, | |
| "loss": 1.1191, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.6807760786731905e-05, | |
| "loss": 1.1265, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.6788251624121335e-05, | |
| "loss": 1.2217, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.6768694426529432e-05, | |
| "loss": 1.1626, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.6749089332346714e-05, | |
| "loss": 1.1616, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.672943648030261e-05, | |
| "loss": 1.1559, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.6709736009464504e-05, | |
| "loss": 1.193, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.668998805923675e-05, | |
| "loss": 1.1917, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.6670192769359643e-05, | |
| "loss": 1.1331, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.6670192769359643e-05, | |
| "loss": 1.1127, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.6670192769359643e-05, | |
| "loss": 1.2107, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.6650350279908497e-05, | |
| "loss": 1.0895, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.6630460731292597e-05, | |
| "loss": 1.146, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.661052426425424e-05, | |
| "loss": 1.1694, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.6590541019867722e-05, | |
| "loss": 1.1629, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.6570511139538348e-05, | |
| "loss": 1.265, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.655043476500142e-05, | |
| "loss": 1.0617, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.6530312038321247e-05, | |
| "loss": 1.0527, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.6510143101890136e-05, | |
| "loss": 1.1541, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.6489928098427383e-05, | |
| "loss": 1.1049, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.6469667170978258e-05, | |
| "loss": 1.1096, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.6449360462913005e-05, | |
| "loss": 1.1301, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.642900811792582e-05, | |
| "loss": 1.0167, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.640861028003383e-05, | |
| "loss": 1.132, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.6388167093576083e-05, | |
| "loss": 1.0682, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.6388167093576083e-05, | |
| "loss": 1.0953, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.6367678703212515e-05, | |
| "loss": 1.0795, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.6367678703212515e-05, | |
| "loss": 1.0682, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.6347145253922942e-05, | |
| "loss": 1.2072, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.6347145253922942e-05, | |
| "loss": 1.0967, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 1.632656689100602e-05, | |
| "loss": 1.0267, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 1.632656689100602e-05, | |
| "loss": 1.0864, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 1.6305943760078226e-05, | |
| "loss": 1.0798, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 1.628527600707283e-05, | |
| "loss": 1.1086, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.6264563778238834e-05, | |
| "loss": 1.063, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.6243807220139988e-05, | |
| "loss": 1.1038, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.6223006479653708e-05, | |
| "loss": 1.0511, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.6202161703970057e-05, | |
| "loss": 1.1235, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 1.6181273040590696e-05, | |
| "loss": 1.1478, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 1.616034063732785e-05, | |
| "loss": 1.0928, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 1.613936464230325e-05, | |
| "loss": 1.1063, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 1.6118345203947093e-05, | |
| "loss": 1.094, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.6118345203947093e-05, | |
| "loss": 1.0798, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.6097282470996997e-05, | |
| "loss": 1.14, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.6076176592496926e-05, | |
| "loss": 1.1188, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.605502771779616e-05, | |
| "loss": 1.1085, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.603383599654823e-05, | |
| "loss": 1.1473, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.603383599654823e-05, | |
| "loss": 1.0793, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.601260157870985e-05, | |
| "loss": 1.0635, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.599132461453987e-05, | |
| "loss": 1.0208, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.5970005254598204e-05, | |
| "loss": 1.0608, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.594864364974476e-05, | |
| "loss": 1.0763, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.592723995113839e-05, | |
| "loss": 1.1966, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.592723995113839e-05, | |
| "loss": 1.1704, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.5905794310235808e-05, | |
| "loss": 1.1497, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.5884306878790512e-05, | |
| "loss": 1.1819, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.586277780885172e-05, | |
| "loss": 1.0923, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.58412072527633e-05, | |
| "loss": 1.0444, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.5819595363162682e-05, | |
| "loss": 1.0953, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.5797942292979767e-05, | |
| "loss": 1.119, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.577624819543587e-05, | |
| "loss": 1.0497, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.5754513224042625e-05, | |
| "loss": 1.1818, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.573273753260089e-05, | |
| "loss": 1.1151, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 1.571092127519967e-05, | |
| "loss": 1.063, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 1.568906460621502e-05, | |
| "loss": 1.1638, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 1.566716768030896e-05, | |
| "loss": 1.0469, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 1.5645230652428367e-05, | |
| "loss": 1.1851, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 1.5623253677803897e-05, | |
| "loss": 1.1276, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 1.5601236911948876e-05, | |
| "loss": 1.1357, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 1.5579180510658187e-05, | |
| "loss": 1.1378, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 1.5557084630007206e-05, | |
| "loss": 1.0875, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 1.5534949426350642e-05, | |
| "loss": 1.1506, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 1.551277505632149e-05, | |
| "loss": 1.0844, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 1.549056167682987e-05, | |
| "loss": 1.0969, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 1.546830944506196e-05, | |
| "loss": 1.1575, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.544601851847885e-05, | |
| "loss": 1.1057, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.542368905481545e-05, | |
| "loss": 1.0743, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.5401321212079366e-05, | |
| "loss": 1.126, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.5378915148549772e-05, | |
| "loss": 1.1383, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 1.5356471022776315e-05, | |
| "loss": 1.0252, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 1.5333988993577958e-05, | |
| "loss": 1.0526, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 1.5311469220041903e-05, | |
| "loss": 1.092, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 1.5288911861522413e-05, | |
| "loss": 1.1172, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 1.526631707763972e-05, | |
| "loss": 1.1228, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 1.5243685028278888e-05, | |
| "loss": 1.1605, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 1.5221015873588672e-05, | |
| "loss": 1.1598, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 1.5198309773980397e-05, | |
| "loss": 1.0659, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.5175566890126812e-05, | |
| "loss": 1.1201, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.5152787382960968e-05, | |
| "loss": 1.1096, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.5152787382960968e-05, | |
| "loss": 1.1627, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.5152787382960968e-05, | |
| "loss": 1.0902, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 1.5129971413675055e-05, | |
| "loss": 1.1086, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 1.510711914371929e-05, | |
| "loss": 1.0882, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 1.510711914371929e-05, | |
| "loss": 1.1937, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 1.5084230734800754e-05, | |
| "loss": 1.1105, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.5084230734800754e-05, | |
| "loss": 1.0682, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.5084230734800754e-05, | |
| "loss": 1.0745, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.5084230734800754e-05, | |
| "loss": 1.1605, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.5061306348882252e-05, | |
| "loss": 1.1498, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.5038346148181178e-05, | |
| "loss": 1.0581, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.5015350295168344e-05, | |
| "loss": 1.152, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.4992318952566862e-05, | |
| "loss": 1.1908, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.4992318952566862e-05, | |
| "loss": 1.2114, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.4969252283350964e-05, | |
| "loss": 1.1433, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.4969252283350964e-05, | |
| "loss": 1.1324, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.4946150450744859e-05, | |
| "loss": 1.1062, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.4923013618221584e-05, | |
| "loss": 1.0513, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.4899841949501845e-05, | |
| "loss": 1.1276, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.4876635608552845e-05, | |
| "loss": 1.0997, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.4876635608552845e-05, | |
| "loss": 1.0966, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.4853394759587146e-05, | |
| "loss": 1.0841, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.4830119567061484e-05, | |
| "loss": 1.0822, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.4806810195675627e-05, | |
| "loss": 1.1052, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.4783466810371195e-05, | |
| "loss": 1.1415, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.4760089576330493e-05, | |
| "loss": 1.1243, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.4736678658975357e-05, | |
| "loss": 1.078, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.471323422396596e-05, | |
| "loss": 1.0579, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.4689756437199658e-05, | |
| "loss": 1.1387, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.4689756437199658e-05, | |
| "loss": 1.0594, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.4666245464809818e-05, | |
| "loss": 1.1052, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.4666245464809818e-05, | |
| "loss": 1.0599, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.4642701473164618e-05, | |
| "loss": 1.1142, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.4619124628865904e-05, | |
| "loss": 1.0957, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.4619124628865904e-05, | |
| "loss": 1.1183, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.459551509874798e-05, | |
| "loss": 1.1294, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.4571873049876452e-05, | |
| "loss": 1.1012, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.454819864954703e-05, | |
| "loss": 1.0487, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.4524492065284344e-05, | |
| "loss": 1.0667, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.4500753464840775e-05, | |
| "loss": 1.0578, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.4476983016195245e-05, | |
| "loss": 1.0845, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.4453180887552052e-05, | |
| "loss": 1.1243, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.4429347247339656e-05, | |
| "loss": 0.9873, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.4405482264209512e-05, | |
| "loss": 1.1144, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.4381586107034849e-05, | |
| "loss": 1.072, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.4381586107034849e-05, | |
| "loss": 1.0671, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.4357658944909496e-05, | |
| "loss": 1.0966, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.4333700947146686e-05, | |
| "loss": 1.0818, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.4309712283277839e-05, | |
| "loss": 1.0918, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.4309712283277839e-05, | |
| "loss": 1.0109, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.4285693123051385e-05, | |
| "loss": 1.0969, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.4261643636431539e-05, | |
| "loss": 1.119, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.4237563993597133e-05, | |
| "loss": 1.1245, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.4237563993597133e-05, | |
| "loss": 1.152, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.4213454364940362e-05, | |
| "loss": 1.1732, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.4189314921065629e-05, | |
| "loss": 1.062, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.4165145832788305e-05, | |
| "loss": 1.1283, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.4140947271133536e-05, | |
| "loss": 1.0471, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.4116719407335022e-05, | |
| "loss": 1.0625, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 1.4092462412833811e-05, | |
| "loss": 1.1946, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 1.406817645927709e-05, | |
| "loss": 1.0807, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 1.4043861718516964e-05, | |
| "loss": 1.1708, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 1.4019518362609239e-05, | |
| "loss": 1.0088, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.399514656381221e-05, | |
| "loss": 1.0797, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.3970746494585439e-05, | |
| "loss": 1.0838, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.3946318327588534e-05, | |
| "loss": 1.0151, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.3946318327588534e-05, | |
| "loss": 1.0555, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.3921862235679929e-05, | |
| "loss": 1.1403, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.389737839191566e-05, | |
| "loss": 1.0558, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.3872866969548143e-05, | |
| "loss": 1.1357, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.3872866969548143e-05, | |
| "loss": 1.1351, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.384832814202494e-05, | |
| "loss": 1.0569, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.3823762082987544e-05, | |
| "loss": 1.0458, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.3799168966270139e-05, | |
| "loss": 1.1161, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.3774548965898371e-05, | |
| "loss": 1.1268, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 1.3749902256088125e-05, | |
| "loss": 1.0403, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 1.3725229011244294e-05, | |
| "loss": 1.0115, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 1.3700529405959517e-05, | |
| "loss": 1.15, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 1.3675803615012993e-05, | |
| "loss": 1.053, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.3651051813369188e-05, | |
| "loss": 1.1024, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.3626274176176645e-05, | |
| "loss": 1.0702, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.3601470878766714e-05, | |
| "loss": 1.0325, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.3601470878766714e-05, | |
| "loss": 1.1449, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.3576642096652322e-05, | |
| "loss": 1.1638, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.3551788005526738e-05, | |
| "loss": 1.0787, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.3526908781262314e-05, | |
| "loss": 1.1074, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.3526908781262314e-05, | |
| "loss": 0.9868, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.3502004599909255e-05, | |
| "loss": 1.0782, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.3477075637694362e-05, | |
| "loss": 1.1022, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.3452122071019797e-05, | |
| "loss": 1.1311, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.3427144076461818e-05, | |
| "loss": 1.1138, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.3402141830769551e-05, | |
| "loss": 1.0972, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.3377115510863716e-05, | |
| "loss": 1.0664, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.3352065293835399e-05, | |
| "loss": 1.1277, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.3326991356944776e-05, | |
| "loss": 1.1859, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.3301893877619874e-05, | |
| "loss": 1.0863, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.3276773033455312e-05, | |
| "loss": 1.1276, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.3251629002211042e-05, | |
| "loss": 1.1127, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.322646196181109e-05, | |
| "loss": 1.0815, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.3201272090342303e-05, | |
| "loss": 1.0121, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.3176059566053083e-05, | |
| "loss": 1.0896, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.3150824567352128e-05, | |
| "loss": 1.1186, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.3125567272807167e-05, | |
| "loss": 1.1824, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.3100287861143703e-05, | |
| "loss": 1.1333, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.3074986511243741e-05, | |
| "loss": 1.1035, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.3074986511243741e-05, | |
| "loss": 1.0916, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.3049663402144528e-05, | |
| "loss": 1.2061, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.302431871303728e-05, | |
| "loss": 1.1206, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.2998952623265917e-05, | |
| "loss": 1.1941, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.2973565312325798e-05, | |
| "loss": 1.1687, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.2948156959862446e-05, | |
| "loss": 1.0668, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.2922727745670276e-05, | |
| "loss": 1.1493, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.2897277849691326e-05, | |
| "loss": 1.0341, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.2871807452013977e-05, | |
| "loss": 1.1093, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.2871807452013977e-05, | |
| "loss": 1.0038, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.28463167328717e-05, | |
| "loss": 1.1001, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.2820805872641745e-05, | |
| "loss": 1.0977, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.2795275051843893e-05, | |
| "loss": 1.1301, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.276972445113917e-05, | |
| "loss": 1.0771, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.2744154251328573e-05, | |
| "loss": 1.1446, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 1.2718564633351773e-05, | |
| "loss": 1.0806, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 1.2692955778285865e-05, | |
| "loss": 1.0326, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 1.266732786734405e-05, | |
| "loss": 1.0992, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 1.266732786734405e-05, | |
| "loss": 1.0253, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.2641681081874394e-05, | |
| "loss": 1.1312, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.2616015603358497e-05, | |
| "loss": 1.1362, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.2590331613410261e-05, | |
| "loss": 1.1169, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.2564629293774561e-05, | |
| "loss": 1.0657, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 1.253890882632598e-05, | |
| "loss": 1.0873, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 1.2513170393067527e-05, | |
| "loss": 1.1539, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 1.2513170393067527e-05, | |
| "loss": 1.1084, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 1.2513170393067527e-05, | |
| "loss": 1.1232, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.2487414176129322e-05, | |
| "loss": 1.0896, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.246164035776735e-05, | |
| "loss": 1.1459, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.2435849120362123e-05, | |
| "loss": 1.2163, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.2410040646417431e-05, | |
| "loss": 1.135, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.2384215118559027e-05, | |
| "loss": 1.1274, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.235837271953334e-05, | |
| "loss": 1.1326, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.2332513632206183e-05, | |
| "loss": 1.1253, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.2306638039561455e-05, | |
| "loss": 1.0119, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.2280746124699864e-05, | |
| "loss": 1.02, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.2280746124699864e-05, | |
| "loss": 1.1543, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.2254838070837596e-05, | |
| "loss": 1.0669, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.2228914061305059e-05, | |
| "loss": 1.064, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 1.2202974279545554e-05, | |
| "loss": 1.0327, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 1.2177018909113994e-05, | |
| "loss": 1.0624, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 1.21510481336756e-05, | |
| "loss": 1.0961, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 1.2125062137004602e-05, | |
| "loss": 1.1146, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.2099061102982939e-05, | |
| "loss": 1.0938, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.2073045215598953e-05, | |
| "loss": 1.0607, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.2073045215598953e-05, | |
| "loss": 1.1032, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.20470146589461e-05, | |
| "loss": 1.1542, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 1.2020969617221627e-05, | |
| "loss": 1.1351, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 1.199491027472529e-05, | |
| "loss": 1.1101, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 1.1968836815858038e-05, | |
| "loss": 1.1252, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 1.1942749425120704e-05, | |
| "loss": 1.0601, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 1.1916648287112714e-05, | |
| "loss": 1.1064, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 1.1890533586530766e-05, | |
| "loss": 1.1221, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 1.1864405508167532e-05, | |
| "loss": 1.0848, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 1.1864405508167532e-05, | |
| "loss": 1.1277, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 1.1838264236910348e-05, | |
| "loss": 1.0979, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 1.1838264236910348e-05, | |
| "loss": 1.0496, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 1.1812109957739907e-05, | |
| "loss": 1.0531, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 1.1785942855728945e-05, | |
| "loss": 1.0413, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 1.1759763116040936e-05, | |
| "loss": 1.1525, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 1.1733570923928785e-05, | |
| "loss": 1.0346, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 1.1733570923928785e-05, | |
| "loss": 1.0079, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 1.1707366464733501e-05, | |
| "loss": 1.1522, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 1.1681149923882913e-05, | |
| "loss": 1.1947, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 1.1654921486890327e-05, | |
| "loss": 1.1049, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 1.1628681339353244e-05, | |
| "loss": 1.1212, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 1.1602429666952015e-05, | |
| "loss": 1.0158, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 1.1576166655448558e-05, | |
| "loss": 1.125, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 1.1549892490685018e-05, | |
| "loss": 1.1191, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 1.1523607358582462e-05, | |
| "loss": 0.9951, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 1.149731144513958e-05, | |
| "loss": 1.0928, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 1.1471004936431327e-05, | |
| "loss": 1.0634, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 1.144468801860766e-05, | |
| "loss": 1.081, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 1.1418360877892165e-05, | |
| "loss": 1.0573, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 1.1392023700580796e-05, | |
| "loss": 1.0894, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 1.1365676673040502e-05, | |
| "loss": 1.0959, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 1.133931998170795e-05, | |
| "loss": 1.116, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 1.1312953813088183e-05, | |
| "loss": 1.0826, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 1.1286578353753313e-05, | |
| "loss": 1.079, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 1.1260193790341186e-05, | |
| "loss": 1.114, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 1.1233800309554083e-05, | |
| "loss": 1.0547, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 1.1207398098157371e-05, | |
| "loss": 1.0623, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 1.1180987342978209e-05, | |
| "loss": 0.9716, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 1.1154568230904204e-05, | |
| "loss": 1.0553, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 1.1128140948882107e-05, | |
| "loss": 1.084, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 1.1101705683916473e-05, | |
| "loss": 1.1569, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 1.1101705683916473e-05, | |
| "loss": 1.1716, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 1.1075262623068352e-05, | |
| "loss": 1.1011, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 1.1048811953453955e-05, | |
| "loss": 1.0421, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 1.1022353862243338e-05, | |
| "loss": 1.1387, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 1.0995888536659067e-05, | |
| "loss": 1.1285, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 1.096941616397491e-05, | |
| "loss": 1.0415, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.0942936931514492e-05, | |
| "loss": 1.0013, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.0916451026649981e-05, | |
| "loss": 1.0699, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.0916451026649981e-05, | |
| "loss": 1.1105, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.088995863680077e-05, | |
| "loss": 1.1356, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 1.0863459949432122e-05, | |
| "loss": 1.0638, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 1.0836955152053883e-05, | |
| "loss": 1.0228, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 1.081044443221912e-05, | |
| "loss": 1.0095, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 1.0783927977522819e-05, | |
| "loss": 1.0925, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 1.0757405975600534e-05, | |
| "loss": 1.1006, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 1.0730878614127087e-05, | |
| "loss": 1.1201, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 1.0704346080815218e-05, | |
| "loss": 1.1514, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 1.0677808563414256e-05, | |
| "loss": 1.0657, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.0651266249708816e-05, | |
| "loss": 1.0907, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.0624719327517434e-05, | |
| "loss": 1.1276, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.0598167984691276e-05, | |
| "loss": 1.1104, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.057161240911277e-05, | |
| "loss": 1.0683, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 1.0545052788694312e-05, | |
| "loss": 1.1155, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 1.0518489311376905e-05, | |
| "loss": 1.0613, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 1.0491922165128853e-05, | |
| "loss": 1.0657, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 1.0465351537944429e-05, | |
| "loss": 1.0921, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 1.043877761784252e-05, | |
| "loss": 1.143, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 1.043877761784252e-05, | |
| "loss": 1.0966, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 1.0412200592865331e-05, | |
| "loss": 1.0698, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 1.0412200592865331e-05, | |
| "loss": 1.0695, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 1.0385620651077024e-05, | |
| "loss": 1.121, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 1.0359037980562416e-05, | |
| "loss": 1.0819, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 1.0332452769425619e-05, | |
| "loss": 1.134, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 1.0305865205788728e-05, | |
| "loss": 1.0645, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 1.0279275477790487e-05, | |
| "loss": 1.1163, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 1.0252683773584953e-05, | |
| "loss": 1.1289, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 1.0226090281340168e-05, | |
| "loss": 1.0632, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 1.0199495189236828e-05, | |
| "loss": 1.0571, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 1.0172898685466947e-05, | |
| "loss": 1.1067, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 1.0146300958232528e-05, | |
| "loss": 1.1304, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 1.0119702195744236e-05, | |
| "loss": 1.0648, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 1.0093102586220056e-05, | |
| "loss": 1.0753, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 1.0066502317883969e-05, | |
| "loss": 1.1494, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 1.0039901578964619e-05, | |
| "loss": 1.103, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 1.0013300557693981e-05, | |
| "loss": 1.105, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 9.986699442306025e-06, | |
| "loss": 1.1234, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 9.960098421035383e-06, | |
| "loss": 1.1089, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 9.960098421035383e-06, | |
| "loss": 1.1128, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 9.933497682116035e-06, | |
| "loss": 1.0996, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 9.906897413779949e-06, | |
| "loss": 1.121, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 9.88029780425577e-06, | |
| "loss": 1.0723, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 9.853699041767473e-06, | |
| "loss": 1.085, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 9.827101314533056e-06, | |
| "loss": 1.0817, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 9.800504810763176e-06, | |
| "loss": 1.0649, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 9.773909718659831e-06, | |
| "loss": 1.1562, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 9.747316226415052e-06, | |
| "loss": 1.0294, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 9.720724522209518e-06, | |
| "loss": 1.0389, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 9.720724522209518e-06, | |
| "loss": 1.0775, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 9.694134794211277e-06, | |
| "loss": 1.0804, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 9.667547230574386e-06, | |
| "loss": 1.0589, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 9.64096201943759e-06, | |
| "loss": 1.1451, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 9.61437934892298e-06, | |
| "loss": 1.0446, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 9.587799407134672e-06, | |
| "loss": 1.088, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 9.587799407134672e-06, | |
| "loss": 1.1246, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 9.56122238215748e-06, | |
| "loss": 1.0897, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 9.534648462055576e-06, | |
| "loss": 1.1113, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 9.50807783487115e-06, | |
| "loss": 1.1456, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 9.481510688623098e-06, | |
| "loss": 1.0886, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 9.454947211305691e-06, | |
| "loss": 1.1339, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 9.42838759088723e-06, | |
| "loss": 1.1105, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 9.401832015308728e-06, | |
| "loss": 1.098, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 9.375280672482567e-06, | |
| "loss": 1.0945, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 9.348733750291186e-06, | |
| "loss": 1.0507, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 9.322191436585745e-06, | |
| "loss": 1.0621, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 9.295653919184787e-06, | |
| "loss": 1.0752, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 9.269121385872915e-06, | |
| "loss": 1.0413, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 9.242594024399467e-06, | |
| "loss": 1.0219, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 9.216072022477183e-06, | |
| "loss": 1.0307, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 9.189555567780882e-06, | |
| "loss": 1.1017, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 9.189555567780882e-06, | |
| "loss": 1.1019, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 9.16304484794612e-06, | |
| "loss": 1.0908, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 9.13654005056788e-06, | |
| "loss": 1.1263, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 9.110041363199233e-06, | |
| "loss": 1.0468, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 9.083548973350019e-06, | |
| "loss": 1.113, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 9.057063068485513e-06, | |
| "loss": 1.0335, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 9.030583836025093e-06, | |
| "loss": 1.1527, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 9.004111463340935e-06, | |
| "loss": 1.1044, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 8.977646137756662e-06, | |
| "loss": 1.0624, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 8.951188046546048e-06, | |
| "loss": 1.057, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 8.924737376931651e-06, | |
| "loss": 1.0724, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 8.898294316083529e-06, | |
| "loss": 1.0964, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 8.871859051117896e-06, | |
| "loss": 1.0759, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 8.8454317690958e-06, | |
| "loss": 0.9316, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 8.819012657021794e-06, | |
| "loss": 1.0518, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 8.79260190184263e-06, | |
| "loss": 1.1317, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 8.76619969044592e-06, | |
| "loss": 1.0858, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 8.739806209658812e-06, | |
| "loss": 1.0621, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 8.713421646246692e-06, | |
| "loss": 1.0204, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 8.687046186911819e-06, | |
| "loss": 1.092, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 8.660680018292053e-06, | |
| "loss": 1.0981, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 8.634323326959501e-06, | |
| "loss": 1.1185, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 8.60797629941921e-06, | |
| "loss": 1.0391, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 8.581639122107837e-06, | |
| "loss": 1.0094, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 8.555311981392342e-06, | |
| "loss": 1.0877, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 8.528995063568673e-06, | |
| "loss": 1.0701, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 8.502688554860426e-06, | |
| "loss": 1.1157, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 8.47639264141754e-06, | |
| "loss": 1.1232, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 8.450107509314983e-06, | |
| "loss": 1.0751, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 8.423833344551443e-06, | |
| "loss": 1.1229, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 8.423833344551443e-06, | |
| "loss": 1.0607, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 8.423833344551443e-06, | |
| "loss": 1.123, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 8.397570333047985e-06, | |
| "loss": 1.1089, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 8.37131866064676e-06, | |
| "loss": 1.114, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 8.345078513109677e-06, | |
| "loss": 1.1008, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 8.31885007611709e-06, | |
| "loss": 1.0697, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 8.2926335352665e-06, | |
| "loss": 1.0889, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 8.2926335352665e-06, | |
| "loss": 1.125, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 8.266429076071221e-06, | |
| "loss": 1.0054, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 8.240236883959067e-06, | |
| "loss": 1.0789, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 8.214057144271058e-06, | |
| "loss": 1.0938, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 8.187890042260094e-06, | |
| "loss": 1.1036, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 8.161735763089654e-06, | |
| "loss": 1.1085, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 8.13559449183247e-06, | |
| "loss": 1.1132, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 8.109466413469238e-06, | |
| "loss": 1.0786, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 8.083351712887288e-06, | |
| "loss": 1.0557, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 8.057250574879296e-06, | |
| "loss": 1.0675, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 8.031163184141965e-06, | |
| "loss": 1.1093, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 8.005089725274711e-06, | |
| "loss": 1.1161, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 7.979030382778376e-06, | |
| "loss": 1.06, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 7.952985341053902e-06, | |
| "loss": 1.0706, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 7.92695478440105e-06, | |
| "loss": 1.0718, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 7.900938897017064e-06, | |
| "loss": 1.069, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 7.874937862995401e-06, | |
| "loss": 1.1215, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 7.848951866324402e-06, | |
| "loss": 1.1296, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 7.822981090886011e-06, | |
| "loss": 1.075, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 7.822981090886011e-06, | |
| "loss": 1.0761, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 7.79702572045445e-06, | |
| "loss": 1.0116, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 7.771085938694943e-06, | |
| "loss": 1.0345, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 7.745161929162405e-06, | |
| "loss": 1.026, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 7.719253875300138e-06, | |
| "loss": 1.1431, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 7.693361960438548e-06, | |
| "loss": 1.1076, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 7.667486367793822e-06, | |
| "loss": 1.1333, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 7.641627280466663e-06, | |
| "loss": 1.064, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 7.615784881440975e-06, | |
| "loss": 0.9942, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 7.589959353582574e-06, | |
| "loss": 1.0166, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 7.564150879637882e-06, | |
| "loss": 1.061, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 7.538359642232654e-06, | |
| "loss": 1.0256, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 7.5125858238706785e-06, | |
| "loss": 1.0732, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 7.486829606932478e-06, | |
| "loss": 0.9878, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 7.486829606932478e-06, | |
| "loss": 1.0634, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 7.461091173674022e-06, | |
| "loss": 1.086, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 7.43537070622544e-06, | |
| "loss": 1.1077, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 7.40966838658974e-06, | |
| "loss": 1.0418, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 7.383984396641506e-06, | |
| "loss": 1.0398, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 7.383984396641506e-06, | |
| "loss": 1.1066, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 7.358318918125613e-06, | |
| "loss": 1.0911, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 7.332672132655953e-06, | |
| "loss": 1.055, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 7.307044221714139e-06, | |
| "loss": 1.0988, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 7.307044221714139e-06, | |
| "loss": 1.03, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 7.2814353666482276e-06, | |
| "loss": 1.0436, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 7.2814353666482276e-06, | |
| "loss": 1.099, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 7.2558457486714316e-06, | |
| "loss": 1.0698, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 7.230275548860833e-06, | |
| "loss": 1.0925, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 7.2047249481561125e-06, | |
| "loss": 0.9996, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 7.179194127358258e-06, | |
| "loss": 0.9369, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 7.153683267128304e-06, | |
| "loss": 1.0355, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 7.153683267128304e-06, | |
| "loss": 1.0375, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 7.128192547986023e-06, | |
| "loss": 1.0004, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 7.128192547986023e-06, | |
| "loss": 1.041, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 7.102722150308678e-06, | |
| "loss": 1.0829, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 7.077272254329726e-06, | |
| "loss": 1.0619, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 7.051843040137558e-06, | |
| "loss": 1.0293, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 7.026434687674204e-06, | |
| "loss": 1.0453, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 7.001047376734087e-06, | |
| "loss": 1.0631, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 6.975681286962724e-06, | |
| "loss": 1.1154, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 6.9503365978554735e-06, | |
| "loss": 0.9798, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 6.925013488756264e-06, | |
| "loss": 0.9492, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 6.8997121388563e-06, | |
| "loss": 0.9858, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 6.874432727192837e-06, | |
| "loss": 1.0972, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 6.849175432647875e-06, | |
| "loss": 1.0957, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 6.823940433946921e-06, | |
| "loss": 0.964, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 6.798727909657698e-06, | |
| "loss": 1.0073, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 6.773538038188912e-06, | |
| "loss": 1.0884, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 6.773538038188912e-06, | |
| "loss": 1.0126, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 6.74837099778896e-06, | |
| "loss": 1.1461, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 6.723226966544691e-06, | |
| "loss": 1.0353, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 6.69810612238013e-06, | |
| "loss": 1.0648, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 6.673008643055228e-06, | |
| "loss": 0.9614, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 6.6479347061646046e-06, | |
| "loss": 0.9475, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 6.622884489136286e-06, | |
| "loss": 0.9858, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 6.622884489136286e-06, | |
| "loss": 0.9516, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 6.597858169230454e-06, | |
| "loss": 1.0605, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 6.597858169230454e-06, | |
| "loss": 1.0432, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 6.572855923538186e-06, | |
| "loss": 1.083, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 6.547877928980206e-06, | |
| "loss": 1.1029, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 6.522924362305639e-06, | |
| "loss": 1.0126, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 6.497995400090748e-06, | |
| "loss": 1.0487, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 6.4730912187376895e-06, | |
| "loss": 1.0179, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 6.448211994473263e-06, | |
| "loss": 1.1008, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 6.448211994473263e-06, | |
| "loss": 0.9999, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 6.42335790334768e-06, | |
| "loss": 1.0426, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 6.42335790334768e-06, | |
| "loss": 1.0961, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 6.398529121233291e-06, | |
| "loss": 1.0348, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 6.373725823823359e-06, | |
| "loss": 1.1038, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 6.348948186630815e-06, | |
| "loss": 1.0372, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 6.324196384987009e-06, | |
| "loss": 1.0386, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 6.2994705940404825e-06, | |
| "loss": 1.0796, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 6.274770988755712e-06, | |
| "loss": 1.0432, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 6.250097743911877e-06, | |
| "loss": 1.0826, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 6.225451034101631e-06, | |
| "loss": 1.0985, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 6.200831033729864e-06, | |
| "loss": 1.0645, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 6.176237917012459e-06, | |
| "loss": 1.0353, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 6.151671857975061e-06, | |
| "loss": 1.0356, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 6.12713303045186e-06, | |
| "loss": 1.0918, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 6.10262160808434e-06, | |
| "loss": 1.1154, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 6.0781377643200765e-06, | |
| "loss": 1.0837, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 6.053681672411471e-06, | |
| "loss": 0.927, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 6.029253505414565e-06, | |
| "loss": 0.9846, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 6.004853436187794e-06, | |
| "loss": 0.9542, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 5.9804816373907625e-06, | |
| "loss": 1.0098, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 5.956138281483039e-06, | |
| "loss": 1.0494, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 5.931823540722912e-06, | |
| "loss": 1.0146, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 5.907537587166191e-06, | |
| "loss": 1.0438, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 5.883280592664979e-06, | |
| "loss": 0.999, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 5.859052728866468e-06, | |
| "loss": 1.0596, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 5.859052728866468e-06, | |
| "loss": 0.9974, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 5.859052728866468e-06, | |
| "loss": 1.0249, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 5.859052728866468e-06, | |
| "loss": 1.0427, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 5.859052728866468e-06, | |
| "loss": 1.0657, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 5.834854167211699e-06, | |
| "loss": 1.017, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 5.810685078934375e-06, | |
| "loss": 1.0231, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 5.810685078934375e-06, | |
| "loss": 1.1882, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 5.78654563505964e-06, | |
| "loss": 0.9763, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 5.762436006402874e-06, | |
| "loss": 1.0037, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 5.738356363568463e-06, | |
| "loss": 1.0616, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 5.714306876948621e-06, | |
| "loss": 1.0914, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 5.69028771672216e-06, | |
| "loss": 0.9882, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 5.666299052853314e-06, | |
| "loss": 1.0209, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 5.642341055090508e-06, | |
| "loss": 1.0136, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 5.618413892965158e-06, | |
| "loss": 0.9574, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 5.5945177357904935e-06, | |
| "loss": 1.0981, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 5.570652752660343e-06, | |
| "loss": 1.0165, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 5.546819112447952e-06, | |
| "loss": 1.02, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 5.523016983804759e-06, | |
| "loss": 0.9675, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 5.499246535159231e-06, | |
| "loss": 0.9956, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 5.47550793471566e-06, | |
| "loss": 0.9971, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 5.451801350452975e-06, | |
| "loss": 1.0175, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 5.428126950123551e-06, | |
| "loss": 0.9816, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 5.404484901252023e-06, | |
| "loss": 1.0217, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 5.3808753711341e-06, | |
| "loss": 0.9744, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 5.357298526835381e-06, | |
| "loss": 1.0531, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 5.333754535190186e-06, | |
| "loss": 1.0347, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 5.3102435628003435e-06, | |
| "loss": 0.9996, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 5.286765776034044e-06, | |
| "loss": 0.9865, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 5.263321341024646e-06, | |
| "loss": 1.0154, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 5.239910423669509e-06, | |
| "loss": 1.0845, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 5.216533189628808e-06, | |
| "loss": 1.0039, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 5.193189804324376e-06, | |
| "loss": 1.0424, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 5.169880432938519e-06, | |
| "loss": 1.0209, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 5.146605240412859e-06, | |
| "loss": 0.9603, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 5.123364391447156e-06, | |
| "loss": 0.9627, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 5.100158050498159e-06, | |
| "loss": 1.1093, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 5.076986381778417e-06, | |
| "loss": 1.0122, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 5.053849549255143e-06, | |
| "loss": 1.0515, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 5.053849549255143e-06, | |
| "loss": 1.0038, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 5.03074771664904e-06, | |
| "loss": 1.1328, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 5.03074771664904e-06, | |
| "loss": 1.0808, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 5.0076810474331395e-06, | |
| "loss": 1.0216, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 4.984649704831658e-06, | |
| "loss": 0.9545, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 4.961653851818827e-06, | |
| "loss": 1.0393, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 4.938693651117751e-06, | |
| "loss": 1.0588, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 4.9157692651992495e-06, | |
| "loss": 1.0493, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 4.892880856280713e-06, | |
| "loss": 0.9733, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 4.892880856280713e-06, | |
| "loss": 0.9834, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 4.870028586324947e-06, | |
| "loss": 1.007, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 4.847212617039037e-06, | |
| "loss": 1.0005, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 4.82443310987319e-06, | |
| "loss": 1.0557, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 4.801690226019606e-06, | |
| "loss": 1.0515, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 4.77898412641133e-06, | |
| "loss": 0.9365, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 4.756314971721115e-06, | |
| "loss": 0.999, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 4.733682922360282e-06, | |
| "loss": 1.0896, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 4.71108813847759e-06, | |
| "loss": 1.0972, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 4.688530779958099e-06, | |
| "loss": 1.0062, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 4.666011006422041e-06, | |
| "loss": 0.9758, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 4.643528977223689e-06, | |
| "loss": 1.0303, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 4.621084851450229e-06, | |
| "loss": 1.145, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 4.5986787879206375e-06, | |
| "loss": 1.0513, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 4.5763109451845515e-06, | |
| "loss": 1.0194, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 4.553981481521156e-06, | |
| "loss": 1.0725, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 4.531690554938043e-06, | |
| "loss": 1.0504, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 4.531690554938043e-06, | |
| "loss": 1.0375, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 4.509438323170131e-06, | |
| "loss": 1.1302, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 4.487224943678513e-06, | |
| "loss": 1.0631, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 4.465050573649359e-06, | |
| "loss": 0.9878, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 4.465050573649359e-06, | |
| "loss": 0.9993, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 4.442915369992802e-06, | |
| "loss": 0.9874, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 4.442915369992802e-06, | |
| "loss": 1.0806, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 4.4208194893418125e-06, | |
| "loss": 1.0668, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 4.398763088051127e-06, | |
| "loss": 1.1283, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 4.3767463221961034e-06, | |
| "loss": 1.0712, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 4.354769347571638e-06, | |
| "loss": 1.0416, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 4.332832319691044e-06, | |
| "loss": 1.0996, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 4.3109353937849815e-06, | |
| "loss": 1.0031, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 4.289078724800331e-06, | |
| "loss": 1.0374, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 4.267262467399114e-06, | |
| "loss": 1.0049, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 4.267262467399114e-06, | |
| "loss": 1.0352, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 4.24548677595738e-06, | |
| "loss": 1.0891, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 4.22375180456413e-06, | |
| "loss": 1.0007, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 4.202057707020235e-06, | |
| "loss": 1.0688, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 4.180404636837321e-06, | |
| "loss": 1.0779, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 4.158792747236702e-06, | |
| "loss": 1.0325, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 4.137222191148282e-06, | |
| "loss": 0.9851, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 4.11569312120949e-06, | |
| "loss": 1.0092, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 4.0942056897641934e-06, | |
| "loss": 1.0669, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 4.072760048861614e-06, | |
| "loss": 1.0599, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 4.051356350255246e-06, | |
| "loss": 1.092, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 4.0299947454018e-06, | |
| "loss": 1.1094, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 4.008675385460131e-06, | |
| "loss": 1.0148, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 3.987398421290155e-06, | |
| "loss": 1.0314, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 3.966164003451775e-06, | |
| "loss": 0.9866, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 3.944972282203844e-06, | |
| "loss": 0.9976, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 3.944972282203844e-06, | |
| "loss": 0.964, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 3.923823407503076e-06, | |
| "loss": 1.0865, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 3.902717529003005e-06, | |
| "loss": 1.0493, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 3.902717529003005e-06, | |
| "loss": 0.9973, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 3.88165479605291e-06, | |
| "loss": 1.0446, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 3.860635357696756e-06, | |
| "loss": 1.0599, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 3.839659362672156e-06, | |
| "loss": 1.0406, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 3.818726959409305e-06, | |
| "loss": 1.0254, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 3.7978382960299476e-06, | |
| "loss": 1.0693, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 3.776993520346295e-06, | |
| "loss": 1.0674, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 3.756192779860014e-06, | |
| "loss": 1.0298, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 3.756192779860014e-06, | |
| "loss": 0.9912, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 3.7354362217611652e-06, | |
| "loss": 1.1106, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 3.714723992927177e-06, | |
| "loss": 1.0397, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 3.694056239921776e-06, | |
| "loss": 1.1313, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 3.6734331089939835e-06, | |
| "loss": 1.1178, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 3.6528547460770636e-06, | |
| "loss": 1.028, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 3.6323212967874866e-06, | |
| "loss": 0.9894, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 3.6118329064239222e-06, | |
| "loss": 0.9559, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 3.5913897199661716e-06, | |
| "loss": 1.0402, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 3.5709918820741816e-06, | |
| "loss": 1.0309, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 3.5709918820741816e-06, | |
| "loss": 0.9957, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 3.5506395370869963e-06, | |
| "loss": 0.9978, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 3.5303328290217453e-06, | |
| "loss": 1.0179, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 3.5100719015726228e-06, | |
| "loss": 1.0326, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 3.5100719015726228e-06, | |
| "loss": 1.0268, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 3.4898568981098678e-06, | |
| "loss": 1.0515, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 3.469687961678757e-06, | |
| "loss": 1.0936, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 3.4495652349985844e-06, | |
| "loss": 1.0403, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 3.429488860461655e-06, | |
| "loss": 1.0342, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 3.4094589801322773e-06, | |
| "loss": 1.0026, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 3.389475735745761e-06, | |
| "loss": 1.0009, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 3.3695392687074045e-06, | |
| "loss": 0.9617, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 3.3496497200915067e-06, | |
| "loss": 1.0, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 3.3298072306403595e-06, | |
| "loss": 1.0145, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 3.3100119407632556e-06, | |
| "loss": 0.9589, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 3.2902639905354948e-06, | |
| "loss": 1.0562, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 3.2705635196973927e-06, | |
| "loss": 1.0662, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 3.2509106676532897e-06, | |
| "loss": 1.0597, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 3.231305573470569e-06, | |
| "loss": 1.0901, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 3.231305573470569e-06, | |
| "loss": 0.9323, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 3.2117483758786683e-06, | |
| "loss": 1.0963, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 3.2117483758786683e-06, | |
| "loss": 1.0256, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 3.192239213268099e-06, | |
| "loss": 0.9955, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 3.17277822368947e-06, | |
| "loss": 1.0681, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 3.1533655448525057e-06, | |
| "loss": 1.03, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 3.134001314125079e-06, | |
| "loss": 0.9993, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 3.114685668532229e-06, | |
| "loss": 1.0574, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 3.0954187447551996e-06, | |
| "loss": 0.9894, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 3.076200679130471e-06, | |
| "loss": 0.9975, | |
| "step": 1041 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 3.0570316076487918e-06, | |
| "loss": 1.0756, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 3.0379116659542186e-06, | |
| "loss": 1.0049, | |
| "step": 1043 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 3.0188409893431556e-06, | |
| "loss": 1.0116, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 2.999819712763402e-06, | |
| "loss": 1.0514, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 2.9808479708131864e-06, | |
| "loss": 1.0405, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 2.9619258977402253e-06, | |
| "loss": 1.0393, | |
| "step": 1047 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 2.943053627440771e-06, | |
| "loss": 1.0099, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 2.943053627440771e-06, | |
| "loss": 1.0145, | |
| "step": 1049 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 2.924231293458647e-06, | |
| "loss": 1.0731, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 2.90545902898434e-06, | |
| "loss": 1.0022, | |
| "step": 1051 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 2.886736966854018e-06, | |
| "loss": 1.0274, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 2.8680652395486198e-06, | |
| "loss": 1.0585, | |
| "step": 1053 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 2.849443979192892e-06, | |
| "loss": 1.0792, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 2.8308733175544724e-06, | |
| "loss": 1.0762, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 2.812353386042962e-06, | |
| "loss": 1.0184, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 2.7938843157089734e-06, | |
| "loss": 0.9722, | |
| "step": 1057 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 2.775466237243226e-06, | |
| "loss": 1.1118, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 2.7570992809755937e-06, | |
| "loss": 1.0514, | |
| "step": 1059 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 2.73878357687422e-06, | |
| "loss": 1.0908, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 2.720519254544568e-06, | |
| "loss": 1.0791, | |
| "step": 1061 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 2.702306443228516e-06, | |
| "loss": 1.0933, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 2.702306443228516e-06, | |
| "loss": 1.0118, | |
| "step": 1063 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 2.6841452718034343e-06, | |
| "loss": 1.0542, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 2.666035868781285e-06, | |
| "loss": 1.0074, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 2.6479783623077105e-06, | |
| "loss": 1.0274, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 2.62997288016112e-06, | |
| "loss": 1.0626, | |
| "step": 1067 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 2.6120195497517818e-06, | |
| "loss": 1.0031, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 2.5941184981209354e-06, | |
| "loss": 1.0309, | |
| "step": 1069 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 2.5941184981209354e-06, | |
| "loss": 1.0782, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 2.5762698519398832e-06, | |
| "loss": 1.0939, | |
| "step": 1071 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 2.5584737375091016e-06, | |
| "loss": 1.1096, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 2.5407302807573387e-06, | |
| "loss": 1.0452, | |
| "step": 1073 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 2.5230396072407204e-06, | |
| "loss": 1.0232, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 2.5054018421418737e-06, | |
| "loss": 0.9717, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 2.487817110269042e-06, | |
| "loss": 1.0756, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 2.470285536055188e-06, | |
| "loss": 1.0175, | |
| "step": 1077 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 2.4528072435571158e-06, | |
| "loss": 1.0394, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 2.4353823564546064e-06, | |
| "loss": 1.0886, | |
| "step": 1079 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 2.4353823564546064e-06, | |
| "loss": 1.0737, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 2.4353823564546064e-06, | |
| "loss": 0.9719, | |
| "step": 1081 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 2.4353823564546064e-06, | |
| "loss": 1.0594, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 2.4180109980495293e-06, | |
| "loss": 1.1362, | |
| "step": 1083 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 2.4006932912649816e-06, | |
| "loss": 1.041, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 2.3834293586444e-06, | |
| "loss": 1.0417, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 2.3662193223507135e-06, | |
| "loss": 0.9742, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 2.349063304165462e-06, | |
| "loss": 1.0592, | |
| "step": 1087 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 2.331961425487956e-06, | |
| "loss": 1.0067, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 2.331961425487956e-06, | |
| "loss": 1.0051, | |
| "step": 1089 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 2.3149138073343958e-06, | |
| "loss": 1.0228, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 2.297920570337019e-06, | |
| "loss": 1.066, | |
| "step": 1091 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 2.2809818347432598e-06, | |
| "loss": 1.0886, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 2.2640977204148838e-06, | |
| "loss": 0.9865, | |
| "step": 1093 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 2.2640977204148838e-06, | |
| "loss": 0.9916, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 2.2472683468271584e-06, | |
| "loss": 1.0139, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 2.230493833067977e-06, | |
| "loss": 1.0251, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 2.213774297837047e-06, | |
| "loss": 1.1165, | |
| "step": 1097 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 2.1971098594450315e-06, | |
| "loss": 1.0507, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 2.1805006358127213e-06, | |
| "loss": 1.0245, | |
| "step": 1099 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 2.1805006358127213e-06, | |
| "loss": 1.1373, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 2.1639467444701934e-06, | |
| "loss": 1.0135, | |
| "step": 1101 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 2.1474483025559857e-06, | |
| "loss": 1.0316, | |
| "step": 1102 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 2.1310054268162628e-06, | |
| "loss": 1.0715, | |
| "step": 1103 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 2.114618233603992e-06, | |
| "loss": 1.0695, | |
| "step": 1104 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 2.0982868388781286e-06, | |
| "loss": 1.0166, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 2.0820113582027734e-06, | |
| "loss": 1.1768, | |
| "step": 1106 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 2.0657919067463773e-06, | |
| "loss": 1.0106, | |
| "step": 1107 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 2.0496285992809163e-06, | |
| "loss": 1.1257, | |
| "step": 1108 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 2.033521550181078e-06, | |
| "loss": 1.0428, | |
| "step": 1109 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 2.0174708734234596e-06, | |
| "loss": 1.0674, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 2.0174708734234596e-06, | |
| "loss": 0.9989, | |
| "step": 1111 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 2.0014766825857514e-06, | |
| "loss": 0.9387, | |
| "step": 1112 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 1.985539090845943e-06, | |
| "loss": 1.0979, | |
| "step": 1113 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 1.9696582109815145e-06, | |
| "loss": 1.0745, | |
| "step": 1114 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 1.9538341553686446e-06, | |
| "loss": 1.0447, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 1.93806703598141e-06, | |
| "loss": 1.0419, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 1.9223569643909978e-06, | |
| "loss": 1.0366, | |
| "step": 1117 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 1.9067040517649115e-06, | |
| "loss": 0.9998, | |
| "step": 1118 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 1.8911084088661903e-06, | |
| "loss": 1.0471, | |
| "step": 1119 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 1.8911084088661903e-06, | |
| "loss": 1.0032, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 1.8755701460526166e-06, | |
| "loss": 1.0189, | |
| "step": 1121 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 1.860089373275945e-06, | |
| "loss": 1.0055, | |
| "step": 1122 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 1.8446662000811177e-06, | |
| "loss": 0.9388, | |
| "step": 1123 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 1.8293007356054903e-06, | |
| "loss": 1.035, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 1.8139930885780621e-06, | |
| "loss": 1.0502, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 1.7987433673187026e-06, | |
| "loss": 0.9969, | |
| "step": 1126 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 1.7835516797373908e-06, | |
| "loss": 1.0394, | |
| "step": 1127 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 1.7684181333334437e-06, | |
| "loss": 1.0101, | |
| "step": 1128 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 1.7533428351947634e-06, | |
| "loss": 0.9991, | |
| "step": 1129 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 1.7383258919970746e-06, | |
| "loss": 1.0013, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 1.7233674100031728e-06, | |
| "loss": 1.0078, | |
| "step": 1131 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 1.7233674100031728e-06, | |
| "loss": 1.0005, | |
| "step": 1132 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 1.7084674950621694e-06, | |
| "loss": 1.0359, | |
| "step": 1133 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 1.6936262526087432e-06, | |
| "loss": 1.0419, | |
| "step": 1134 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 1.6936262526087432e-06, | |
| "loss": 1.0236, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 1.6788437876623963e-06, | |
| "loss": 1.0773, | |
| "step": 1136 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 1.6641202048267102e-06, | |
| "loss": 0.9716, | |
| "step": 1137 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 1.6494556082886038e-06, | |
| "loss": 1.0103, | |
| "step": 1138 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 1.634850101817601e-06, | |
| "loss": 1.09, | |
| "step": 1139 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 1.6203037887650842e-06, | |
| "loss": 1.0327, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 1.6058167720635832e-06, | |
| "loss": 1.0267, | |
| "step": 1141 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 1.5913891542260284e-06, | |
| "loss": 1.0975, | |
| "step": 1142 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 1.5770210373450356e-06, | |
| "loss": 0.949, | |
| "step": 1143 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 1.5627125230921725e-06, | |
| "loss": 1.0099, | |
| "step": 1144 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 1.5484637127172609e-06, | |
| "loss": 0.9873, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 1.5342747070476339e-06, | |
| "loss": 0.9921, | |
| "step": 1146 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 1.520145606487442e-06, | |
| "loss": 1.0358, | |
| "step": 1147 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 1.506076511016935e-06, | |
| "loss": 1.0532, | |
| "step": 1148 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 1.506076511016935e-06, | |
| "loss": 0.9765, | |
| "step": 1149 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 1.506076511016935e-06, | |
| "loss": 1.0929, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 1.4920675201917467e-06, | |
| "loss": 1.002, | |
| "step": 1151 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 1.4781187331422109e-06, | |
| "loss": 0.9677, | |
| "step": 1152 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 1.4642302485726423e-06, | |
| "loss": 1.0863, | |
| "step": 1153 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 1.4642302485726423e-06, | |
| "loss": 1.0279, | |
| "step": 1154 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 1.4504021647606448e-06, | |
| "loss": 0.9456, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 1.4366345795564084e-06, | |
| "loss": 1.0098, | |
| "step": 1156 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 1.4229275903820306e-06, | |
| "loss": 0.9929, | |
| "step": 1157 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 1.409281294230821e-06, | |
| "loss": 1.0472, | |
| "step": 1158 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 1.395695787666611e-06, | |
| "loss": 1.0939, | |
| "step": 1159 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 1.3821711668230675e-06, | |
| "loss": 0.9547, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 1.3687075274030238e-06, | |
| "loss": 0.9937, | |
| "step": 1161 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 1.3553049646777993e-06, | |
| "loss": 1.0674, | |
| "step": 1162 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 1.34196357348652e-06, | |
| "loss": 0.9952, | |
| "step": 1163 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 1.3286834482354506e-06, | |
| "loss": 0.9764, | |
| "step": 1164 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 1.3154646828973217e-06, | |
| "loss": 1.0367, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 1.3023073710106726e-06, | |
| "loss": 1.0518, | |
| "step": 1166 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 1.2892116056791927e-06, | |
| "loss": 1.0889, | |
| "step": 1167 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 1.2761774795710502e-06, | |
| "loss": 1.1367, | |
| "step": 1168 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 1.2632050849182365e-06, | |
| "loss": 1.0241, | |
| "step": 1169 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 1.2502945135159272e-06, | |
| "loss": 1.0073, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 1.2374458567218217e-06, | |
| "loss": 1.0031, | |
| "step": 1171 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 1.2246592054555062e-06, | |
| "loss": 1.0898, | |
| "step": 1172 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 1.2119346501977914e-06, | |
| "loss": 0.9809, | |
| "step": 1173 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 1.1992722809900925e-06, | |
| "loss": 1.0642, | |
| "step": 1174 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 1.1866721874337827e-06, | |
| "loss": 1.0443, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 1.1741344586895642e-06, | |
| "loss": 1.0289, | |
| "step": 1176 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 1.1616591834768299e-06, | |
| "loss": 1.0584, | |
| "step": 1177 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 1.149246450073036e-06, | |
| "loss": 1.0363, | |
| "step": 1178 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 1.1368963463130866e-06, | |
| "loss": 1.0247, | |
| "step": 1179 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 1.1246089595887023e-06, | |
| "loss": 1.0164, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 1.1123843768478148e-06, | |
| "loss": 1.0541, | |
| "step": 1181 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 1.100222684593929e-06, | |
| "loss": 1.0542, | |
| "step": 1182 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 1.088123968885534e-06, | |
| "loss": 0.9276, | |
| "step": 1183 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 1.0760883153354818e-06, | |
| "loss": 1.0496, | |
| "step": 1184 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 1.0641158091103832e-06, | |
| "loss": 1.046, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 1.0522065349300103e-06, | |
| "loss": 0.9732, | |
| "step": 1186 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 1.0522065349300103e-06, | |
| "loss": 0.9684, | |
| "step": 1187 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 1.040360577066688e-06, | |
| "loss": 1.0563, | |
| "step": 1188 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.028578019344706e-06, | |
| "loss": 1.035, | |
| "step": 1189 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.0168589451397204e-06, | |
| "loss": 1.0778, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.0052034373781716e-06, | |
| "loss": 0.955, | |
| "step": 1191 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.0052034373781716e-06, | |
| "loss": 1.1, | |
| "step": 1192 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 9.936115785366817e-07, | |
| "loss": 1.0728, | |
| "step": 1193 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 9.820834506414866e-07, | |
| "loss": 1.0634, | |
| "step": 1194 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 9.706191352678495e-07, | |
| "loss": 1.0508, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 9.592187135394826e-07, | |
| "loss": 1.0482, | |
| "step": 1196 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 9.478822661279763e-07, | |
| "loss": 1.1091, | |
| "step": 1197 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 9.366098732522233e-07, | |
| "loss": 1.099, | |
| "step": 1198 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 9.254016146778555e-07, | |
| "loss": 1.049, | |
| "step": 1199 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 9.1425756971668e-07, | |
| "loss": 1.0137, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 9.1425756971668e-07, | |
| "loss": 0.9972, | |
| "step": 1201 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 9.03177817226113e-07, | |
| "loss": 0.9507, | |
| "step": 1202 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 8.921624356086256e-07, | |
| "loss": 1.0585, | |
| "step": 1203 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 8.81211502811189e-07, | |
| "loss": 1.0801, | |
| "step": 1204 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 8.703250963247223e-07, | |
| "loss": 1.0114, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 8.595032931835423e-07, | |
| "loss": 1.0778, | |
| "step": 1206 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 8.595032931835423e-07, | |
| "loss": 1.1023, | |
| "step": 1207 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 8.595032931835423e-07, | |
| "loss": 1.114, | |
| "step": 1208 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 8.487461699648203e-07, | |
| "loss": 0.9095, | |
| "step": 1209 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 8.380538027880425e-07, | |
| "loss": 1.0415, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 8.274262673144651e-07, | |
| "loss": 1.0887, | |
| "step": 1211 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 8.168636387465856e-07, | |
| "loss": 1.1132, | |
| "step": 1212 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 8.063659918276056e-07, | |
| "loss": 1.0403, | |
| "step": 1213 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 7.95933400840907e-07, | |
| "loss": 0.9683, | |
| "step": 1214 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 7.855659396095183e-07, | |
| "loss": 1.1194, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 7.752636814956027e-07, | |
| "loss": 0.9638, | |
| "step": 1216 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 7.6502669939993e-07, | |
| "loss": 0.9801, | |
| "step": 1217 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 7.548550657613651e-07, | |
| "loss": 1.046, | |
| "step": 1218 | |
| } | |
| ], | |
| "max_steps": 1218, | |
| "num_train_epochs": 3, | |
| "total_flos": 1.2021253265917215e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |