| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.9938461538461538, | |
| "global_step": 324, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 2.9766, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 2.987, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 6e-06, | |
| "loss": 2.455, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 2.4113, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 1e-05, | |
| "loss": 2.2854, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1.2e-05, | |
| "loss": 1.8355, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1.4e-05, | |
| "loss": 1.916, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 1.8224, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.8e-05, | |
| "loss": 1.7426, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 2e-05, | |
| "loss": 1.7137, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.9999499496831964e-05, | |
| "loss": 1.7565, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.9997998037428528e-05, | |
| "loss": 1.5721, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.9995495772086735e-05, | |
| "loss": 1.6722, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.999199295128493e-05, | |
| "loss": 1.5741, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.99874899256577e-05, | |
| "loss": 1.5782, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.998198714596076e-05, | |
| "loss": 1.5812, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.9975485163025837e-05, | |
| "loss": 1.6109, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.996798462770555e-05, | |
| "loss": 1.8711, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.995948629080824e-05, | |
| "loss": 1.5257, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.994999100302281e-05, | |
| "loss": 1.468, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.993949971483358e-05, | |
| "loss": 1.5347, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.992801347642515e-05, | |
| "loss": 1.5208, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9915533437577264e-05, | |
| "loss": 1.5473, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9902060847549716e-05, | |
| "loss": 1.5837, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9887597054957304e-05, | |
| "loss": 1.5577, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.987214350763483e-05, | |
| "loss": 1.7363, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.9855701752492174e-05, | |
| "loss": 1.5417, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.9838273435359447e-05, | |
| "loss": 1.427, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9819860300822227e-05, | |
| "loss": 1.6365, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9800464192046956e-05, | |
| "loss": 1.6032, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.9780087050596407e-05, | |
| "loss": 1.4046, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9758730916235356e-05, | |
| "loss": 1.253, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.973639792672638e-05, | |
| "loss": 1.4498, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9713090317615877e-05, | |
| "loss": 1.5219, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.968881042201029e-05, | |
| "loss": 1.5883, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.966356067034256e-05, | |
| "loss": 1.5119, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.963734359012881e-05, | |
| "loss": 1.5395, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.96101618057154e-05, | |
| "loss": 1.5608, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9582018038016156e-05, | |
| "loss": 1.6791, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.9552915104240067e-05, | |
| "loss": 1.4483, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.9522855917609243e-05, | |
| "loss": 1.5176, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.9491843487067305e-05, | |
| "loss": 1.7341, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.945988091697821e-05, | |
| "loss": 1.7081, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.9426971406815464e-05, | |
| "loss": 1.5174, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.9393118250841897e-05, | |
| "loss": 1.7841, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.9358324837779864e-05, | |
| "loss": 1.6924, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.932259465047206e-05, | |
| "loss": 1.6845, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.9285931265532875e-05, | |
| "loss": 1.5164, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.924833835299037e-05, | |
| "loss": 1.4916, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.920981967591891e-05, | |
| "loss": 1.5541, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.917037909006248e-05, | |
| "loss": 1.5278, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.9130020543448705e-05, | |
| "loss": 1.5412, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.908874807599368e-05, | |
| "loss": 1.5861, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.9046565819097546e-05, | |
| "loss": 1.4084, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.9003477995230942e-05, | |
| "loss": 1.6763, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.895948891751234e-05, | |
| "loss": 1.357, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.8914602989276294e-05, | |
| "loss": 1.5576, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.8868824703632658e-05, | |
| "loss": 1.551, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.882215864301683e-05, | |
| "loss": 1.6156, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.8774609478731048e-05, | |
| "loss": 1.541, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.872618197047678e-05, | |
| "loss": 1.392, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.8676880965878293e-05, | |
| "loss": 1.427, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.862671139999738e-05, | |
| "loss": 1.5492, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.857567829483937e-05, | |
| "loss": 1.6885, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.8523786758850436e-05, | |
| "loss": 1.4042, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.84710419864062e-05, | |
| "loss": 1.5082, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.8417449257291802e-05, | |
| "loss": 1.58, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.8363013936173393e-05, | |
| "loss": 1.465, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.8307741472061097e-05, | |
| "loss": 1.4981, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.8251637397763597e-05, | |
| "loss": 1.6048, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.8194707329334277e-05, | |
| "loss": 1.5923, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.8136956965509064e-05, | |
| "loss": 1.5466, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.8078392087135957e-05, | |
| "loss": 1.5844, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.8019018556596402e-05, | |
| "loss": 1.4136, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.7958842317218413e-05, | |
| "loss": 1.5178, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.7897869392681685e-05, | |
| "loss": 1.3765, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.7836105886414596e-05, | |
| "loss": 1.8089, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.7773557980983264e-05, | |
| "loss": 1.3932, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.771023193747264e-05, | |
| "loss": 1.539, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.7646134094859816e-05, | |
| "loss": 1.6678, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.7581270869379443e-05, | |
| "loss": 1.5492, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.7515648753881495e-05, | |
| "loss": 1.4473, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.7449274317181304e-05, | |
| "loss": 1.5742, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.738215420340205e-05, | |
| "loss": 1.3161, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.731429513130964e-05, | |
| "loss": 1.4037, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.7245703893640188e-05, | |
| "loss": 1.602, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.717638735642005e-05, | |
| "loss": 1.3847, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.7106352458278524e-05, | |
| "loss": 1.5577, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.7035606209753276e-05, | |
| "loss": 1.4845, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.696415569258862e-05, | |
| "loss": 1.6808, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.6892008059026587e-05, | |
| "loss": 1.5455, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.6819170531091018e-05, | |
| "loss": 1.623, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.6745650399864608e-05, | |
| "loss": 1.6224, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.667145502475907e-05, | |
| "loss": 1.3582, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.659659183277847e-05, | |
| "loss": 1.3149, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.6521068317775756e-05, | |
| "loss": 1.6216, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.644489203970263e-05, | |
| "loss": 1.5645, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.6368070623852794e-05, | |
| "loss": 1.3277, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.6290611760098655e-05, | |
| "loss": 1.6269, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6212523202121547e-05, | |
| "loss": 1.4384, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6133812766635603e-05, | |
| "loss": 1.4306, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.6054488332605282e-05, | |
| "loss": 1.4768, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.59745578404567e-05, | |
| "loss": 1.8864, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.589402929128276e-05, | |
| "loss": 1.3486, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.581291074604226e-05, | |
| "loss": 1.613, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.573121032475297e-05, | |
| "loss": 1.4764, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.564893620567884e-05, | |
| "loss": 1.358, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.5566096624511306e-05, | |
| "loss": 1.4771, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.5482699873544937e-05, | |
| "loss": 1.3671, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.5398754300847346e-05, | |
| "loss": 1.5569, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.531426830942354e-05, | |
| "loss": 1.4741, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.5229250356374804e-05, | |
| "loss": 1.6195, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.5143708952052099e-05, | |
| "loss": 1.4781, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.5057652659204198e-05, | |
| "loss": 1.5379, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.4971090092120544e-05, | |
| "loss": 1.3646, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.4884029915768945e-05, | |
| "loss": 1.4879, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.4796480844928218e-05, | |
| "loss": 1.5311, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.4708451643315827e-05, | |
| "loss": 1.527, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.4619951122710624e-05, | |
| "loss": 1.4042, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.4530988142070802e-05, | |
| "loss": 1.4992, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.4441571606647089e-05, | |
| "loss": 1.6264, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.4351710467091337e-05, | |
| "loss": 1.6355, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.4261413718560549e-05, | |
| "loss": 1.5735, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.4170690399816469e-05, | |
| "loss": 1.5545, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.4079549592320782e-05, | |
| "loss": 1.3931, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.3988000419326073e-05, | |
| "loss": 1.5475, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.3896052044962558e-05, | |
| "loss": 1.4149, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.3803713673320773e-05, | |
| "loss": 1.7302, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.3710994547530225e-05, | |
| "loss": 1.5059, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.3617903948834155e-05, | |
| "loss": 1.5142, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.3524451195660472e-05, | |
| "loss": 1.6166, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.343064564268899e-05, | |
| "loss": 1.4794, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.3336496679914982e-05, | |
| "loss": 1.3817, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.324201373170929e-05, | |
| "loss": 1.448, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.3147206255874886e-05, | |
| "loss": 1.5142, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.3052083742700172e-05, | |
| "loss": 1.4328, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.2956655714008992e-05, | |
| "loss": 1.3995, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.286093172220748e-05, | |
| "loss": 1.5674, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.2764921349327864e-05, | |
| "loss": 1.4618, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.2668634206069305e-05, | |
| "loss": 1.5822, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.257207993083585e-05, | |
| "loss": 1.5195, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.2475268188771628e-05, | |
| "loss": 1.391, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.2378208670793361e-05, | |
| "loss": 1.302, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.2280911092620298e-05, | |
| "loss": 1.4355, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.2183385193801655e-05, | |
| "loss": 1.5066, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.2085640736741708e-05, | |
| "loss": 1.3998, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.1987687505722532e-05, | |
| "loss": 1.5438, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.1889535305924619e-05, | |
| "loss": 1.4369, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.1791193962445358e-05, | |
| "loss": 1.3723, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.1692673319315541e-05, | |
| "loss": 1.319, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.1593983238513971e-05, | |
| "loss": 1.5461, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.1495133598980263e-05, | |
| "loss": 1.2542, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.1396134295625971e-05, | |
| "loss": 1.5088, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.1296995238344084e-05, | |
| "loss": 1.4485, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.1197726351017052e-05, | |
| "loss": 1.6495, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.1098337570523397e-05, | |
| "loss": 1.3967, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.0998838845743012e-05, | |
| "loss": 1.3436, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.08992401365613e-05, | |
| "loss": 1.441, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.0799551412872151e-05, | |
| "loss": 1.3192, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.0699782653579973e-05, | |
| "loss": 1.6255, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.0599943845600781e-05, | |
| "loss": 1.4091, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.0500044982862519e-05, | |
| "loss": 1.3174, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.0400096065304637e-05, | |
| "loss": 1.3448, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.0300107097877114e-05, | |
| "loss": 1.1215, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.0200088089538944e-05, | |
| "loss": 1.3024, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.0100049052256236e-05, | |
| "loss": 1.0719, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1e-05, | |
| "loss": 1.4881, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 9.899950947743767e-06, | |
| "loss": 1.3868, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 9.799911910461059e-06, | |
| "loss": 1.176, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 9.699892902122887e-06, | |
| "loss": 1.3391, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 9.599903934695366e-06, | |
| "loss": 1.2758, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 9.499955017137485e-06, | |
| "loss": 1.2091, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 9.40005615439922e-06, | |
| "loss": 1.7195, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 9.300217346420029e-06, | |
| "loss": 1.2776, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 9.200448587127852e-06, | |
| "loss": 1.5049, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 9.100759863438702e-06, | |
| "loss": 1.3742, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 9.00116115425699e-06, | |
| "loss": 1.3311, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 8.901662429476607e-06, | |
| "loss": 1.2521, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 8.802273648982951e-06, | |
| "loss": 1.201, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 8.703004761655918e-06, | |
| "loss": 1.221, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 8.603865704374032e-06, | |
| "loss": 1.1296, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 8.504866401019738e-06, | |
| "loss": 1.2867, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 8.406016761486034e-06, | |
| "loss": 1.2172, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 8.30732668068446e-06, | |
| "loss": 1.3546, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 8.208806037554645e-06, | |
| "loss": 1.3152, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 8.110464694075383e-06, | |
| "loss": 1.2396, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 8.012312494277473e-06, | |
| "loss": 1.2363, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 7.914359263258295e-06, | |
| "loss": 1.269, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 7.816614806198347e-06, | |
| "loss": 1.117, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 7.719088907379705e-06, | |
| "loss": 1.052, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 7.621791329206643e-06, | |
| "loss": 1.1074, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 7.524731811228374e-06, | |
| "loss": 1.1173, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 7.427920069164154e-06, | |
| "loss": 1.4199, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 7.331365793930698e-06, | |
| "loss": 1.1899, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 7.235078650672141e-06, | |
| "loss": 1.1529, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 7.139068277792524e-06, | |
| "loss": 1.0999, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 7.043344285991012e-06, | |
| "loss": 1.173, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 6.947916257299829e-06, | |
| "loss": 1.0733, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 6.8527937441251195e-06, | |
| "loss": 1.1061, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 6.757986268290713e-06, | |
| "loss": 1.458, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 6.663503320085021e-06, | |
| "loss": 1.8228, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 6.569354357311015e-06, | |
| "loss": 1.207, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 6.475548804339529e-06, | |
| "loss": 1.2792, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 6.382096051165847e-06, | |
| "loss": 1.1533, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 6.289005452469778e-06, | |
| "loss": 1.0791, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 6.196286326679231e-06, | |
| "loss": 1.266, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 6.103947955037447e-06, | |
| "loss": 1.1793, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 6.0119995806739316e-06, | |
| "loss": 1.2426, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 5.920450407679219e-06, | |
| "loss": 1.2104, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 5.829309600183536e-06, | |
| "loss": 1.162, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 5.738586281439455e-06, | |
| "loss": 1.2641, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 5.648289532908666e-06, | |
| "loss": 1.7289, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 5.558428393352914e-06, | |
| "loss": 1.1043, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 5.469011857929202e-06, | |
| "loss": 1.1279, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 5.380048877289381e-06, | |
| "loss": 0.9973, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 5.291548356684177e-06, | |
| "loss": 2.1676, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 5.2035191550717856e-06, | |
| "loss": 1.3953, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 5.115970084231059e-06, | |
| "loss": 1.2132, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 5.028909907879462e-06, | |
| "loss": 1.1612, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 4.942347340795803e-06, | |
| "loss": 1.3607, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 4.856291047947904e-06, | |
| "loss": 1.3732, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 4.7707496436252e-06, | |
| "loss": 1.6733, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 4.685731690576464e-06, | |
| "loss": 1.2424, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 4.601245699152659e-06, | |
| "loss": 1.299, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 4.5173001264550665e-06, | |
| "loss": 1.3776, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 4.4339033754886974e-06, | |
| "loss": 1.1441, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 4.3510637943211656e-06, | |
| "loss": 1.4207, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 4.268789675247029e-06, | |
| "loss": 1.4656, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 4.1870892539577435e-06, | |
| "loss": 1.3888, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 4.105970708717244e-06, | |
| "loss": 1.3829, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 4.0254421595433045e-06, | |
| "loss": 1.2, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 3.945511667394719e-06, | |
| "loss": 1.2289, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 3.866187233364402e-06, | |
| "loss": 1.1711, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 3.787476797878459e-06, | |
| "loss": 1.5137, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 3.7093882399013504e-06, | |
| "loss": 1.1758, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 3.6319293761472073e-06, | |
| "loss": 1.1653, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 3.5551079602973735e-06, | |
| "loss": 1.404, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 3.47893168222425e-06, | |
| "loss": 1.0932, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 3.403408167221536e-06, | |
| "loss": 1.321, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 3.3285449752409315e-06, | |
| "loss": 1.1774, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 3.2543496001353968e-06, | |
| "loss": 1.166, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 3.1808294689089856e-06, | |
| "loss": 1.239, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 3.107991940973417e-06, | |
| "loss": 1.3065, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 3.035844307411384e-06, | |
| "loss": 1.1926, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 2.964393790246728e-06, | |
| "loss": 1.3487, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 2.8936475417214795e-06, | |
| "loss": 1.1353, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 2.8236126435799492e-06, | |
| "loss": 1.0904, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 2.754296106359811e-06, | |
| "loss": 1.2677, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 2.6857048686903618e-06, | |
| "loss": 1.2408, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 2.6178457965979543e-06, | |
| "loss": 1.4542, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 2.5507256828186978e-06, | |
| "loss": 1.2505, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 2.484351246118507e-06, | |
| "loss": 1.4077, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 2.4187291306205573e-06, | |
| "loss": 1.1162, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 2.353865905140187e-06, | |
| "loss": 1.3291, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 2.2897680625273623e-06, | |
| "loss": 1.1045, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 2.226442019016739e-06, | |
| "loss": 1.1402, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 2.1638941135854042e-06, | |
| "loss": 1.1621, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 2.1021306073183166e-06, | |
| "loss": 1.2284, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 2.0411576827815904e-06, | |
| "loss": 1.0387, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 1.9809814434036e-06, | |
| "loss": 1.1022, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 1.921607912864042e-06, | |
| "loss": 1.2595, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 1.8630430344909378e-06, | |
| "loss": 1.1431, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 1.8052926706657226e-06, | |
| "loss": 1.2936, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 1.748362602236403e-06, | |
| "loss": 1.4389, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 1.6922585279389037e-06, | |
| "loss": 1.0685, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 1.63698606382661e-06, | |
| "loss": 1.0782, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 1.5825507427081976e-06, | |
| "loss": 1.0724, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 1.528958013593801e-06, | |
| "loss": 0.98, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 1.4762132411495644e-06, | |
| "loss": 1.1179, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 1.4243217051606285e-06, | |
| "loss": 1.1229, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.3732886000026235e-06, | |
| "loss": 1.2608, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.3231190341217081e-06, | |
| "loss": 1.1365, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 1.2738180295232206e-06, | |
| "loss": 1.7409, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 1.2253905212689554e-06, | |
| "loss": 1.1186, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 1.1778413569831726e-06, | |
| "loss": 1.121, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.1311752963673438e-06, | |
| "loss": 1.1812, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.085397010723709e-06, | |
| "loss": 1.3303, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 1.0405110824876619e-06, | |
| "loss": 1.1579, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 9.965220047690626e-07, | |
| "loss": 1.4992, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 9.534341809024583e-07, | |
| "loss": 2.0781, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 9.112519240063234e-07, | |
| "loss": 1.1184, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 8.699794565512976e-07, | |
| "loss": 1.1405, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 8.296209099375252e-07, | |
| "loss": 1.1669, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 7.901803240810901e-07, | |
| "loss": 1.362, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 7.516616470096317e-07, | |
| "loss": 1.2286, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 7.140687344671282e-07, | |
| "loss": 1.0405, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 6.77405349527942e-07, | |
| "loss": 1.2192, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 6.416751622201389e-07, | |
| "loss": 1.4261, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 6.068817491581069e-07, | |
| "loss": 1.1794, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 5.730285931845381e-07, | |
| "loss": 1.2879, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 5.40119083021794e-07, | |
| "loss": 1.0215, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 5.08156512932696e-07, | |
| "loss": 1.1408, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 4.771440823907603e-07, | |
| "loss": 1.4883, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 4.4708489575993496e-07, | |
| "loss": 1.0723, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 4.1798196198384545e-07, | |
| "loss": 1.5278, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 3.8983819428460414e-07, | |
| "loss": 1.4491, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 3.626564098711904e-07, | |
| "loss": 1.2908, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 3.364393296574453e-07, | |
| "loss": 1.2405, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 3.1118957798970895e-07, | |
| "loss": 1.2314, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 2.8690968238412444e-07, | |
| "loss": 1.2061, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 2.636020732736233e-07, | |
| "loss": 1.7261, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 2.41269083764647e-07, | |
| "loss": 1.154, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 2.1991294940359343e-07, | |
| "loss": 1.1905, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 1.9953580795304628e-07, | |
| "loss": 1.3603, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 1.8013969917777484e-07, | |
| "loss": 1.2766, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 1.6172656464055748e-07, | |
| "loss": 1.0368, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.4429824750782583e-07, | |
| "loss": 1.2633, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 1.278564923651704e-07, | |
| "loss": 1.6303, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 1.1240294504269777e-07, | |
| "loss": 1.2937, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 9.793915245028595e-08, | |
| "loss": 1.7401, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 8.446656242273699e-08, | |
| "loss": 1.1894, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 7.19865235748507e-08, | |
| "loss": 1.1576, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 6.05002851664227e-08, | |
| "loss": 1.0123, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 5.000899697719552e-08, | |
| "loss": 1.2243, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 4.05137091917629e-08, | |
| "loss": 1.1567, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 3.201537229445051e-08, | |
| "loss": 1.0936, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 2.4514836974165458e-08, | |
| "loss": 1.2039, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.8012854039244354e-08, | |
| "loss": 1.2848, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.2510074342301225e-08, | |
| "loss": 2.3523, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 8.007048715068522e-09, | |
| "loss": 1.1037, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 4.504227913265658e-09, | |
| "loss": 1.09, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 2.0019625714740032e-09, | |
| "loss": 1.1748, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 5.005031680394012e-10, | |
| "loss": 0.9517, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 0.0, | |
| "loss": 1.4235, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "step": 324, | |
| "total_flos": 4.296843871230362e+16, | |
| "train_loss": 1.4149539908509197, | |
| "train_runtime": 5540.1348, | |
| "train_samples_per_second": 1.877, | |
| "train_steps_per_second": 0.058 | |
| } | |
| ], | |
| "max_steps": 324, | |
| "num_train_epochs": 2, | |
| "total_flos": 4.296843871230362e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |