| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.9879518072289155, |
| "global_step": 186, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3333333333333333e-06, |
| "loss": 1.1166, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 6.666666666666667e-06, |
| "loss": 1.1079, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1e-05, |
| "loss": 1.0037, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.3333333333333333e-05, |
| "loss": 0.9462, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.6666666666666667e-05, |
| "loss": 0.8892, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 2e-05, |
| "loss": 0.9079, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9998476951563914e-05, |
| "loss": 0.8614, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.999390827019096e-05, |
| "loss": 0.8644, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.9986295347545738e-05, |
| "loss": 0.8468, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.9975640502598243e-05, |
| "loss": 0.8693, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.9961946980917457e-05, |
| "loss": 0.839, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.9945218953682736e-05, |
| "loss": 0.8395, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.9925461516413224e-05, |
| "loss": 0.8389, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.9902680687415704e-05, |
| "loss": 0.83, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.9876883405951378e-05, |
| "loss": 0.8273, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.9848077530122083e-05, |
| "loss": 0.8489, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.9816271834476642e-05, |
| "loss": 0.7885, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.9781476007338058e-05, |
| "loss": 0.8397, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.9743700647852356e-05, |
| "loss": 0.7727, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.9702957262759964e-05, |
| "loss": 0.7558, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.9659258262890683e-05, |
| "loss": 0.7638, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.961261695938319e-05, |
| "loss": 0.8176, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.9563047559630356e-05, |
| "loss": 0.8318, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.9510565162951538e-05, |
| "loss": 0.8329, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.945518575599317e-05, |
| "loss": 0.8085, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.9396926207859085e-05, |
| "loss": 0.7387, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.9335804264972018e-05, |
| "loss": 0.8065, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.9271838545667876e-05, |
| "loss": 0.789, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.9205048534524405e-05, |
| "loss": 0.8126, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.913545457642601e-05, |
| "loss": 0.7798, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.9063077870366504e-05, |
| "loss": 0.7797, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.8987940462991673e-05, |
| "loss": 0.7673, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.891006524188368e-05, |
| "loss": 0.8111, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.8829475928589272e-05, |
| "loss": 0.7703, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.874619707139396e-05, |
| "loss": 0.7802, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.866025403784439e-05, |
| "loss": 0.7463, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.8571673007021124e-05, |
| "loss": 0.7685, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.848048096156426e-05, |
| "loss": 0.7878, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.8386705679454243e-05, |
| "loss": 0.7804, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.8290375725550417e-05, |
| "loss": 0.7496, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.819152044288992e-05, |
| "loss": 0.7291, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.8090169943749477e-05, |
| "loss": 0.719, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.798635510047293e-05, |
| "loss": 0.7418, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.788010753606722e-05, |
| "loss": 0.7863, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.777145961456971e-05, |
| "loss": 0.7877, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.766044443118978e-05, |
| "loss": 0.7172, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.7547095802227723e-05, |
| "loss": 0.8055, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.7431448254773943e-05, |
| "loss": 0.7943, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.7313537016191706e-05, |
| "loss": 0.7359, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.7193398003386514e-05, |
| "loss": 0.7287, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.7071067811865477e-05, |
| "loss": 0.7704, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.6946583704589973e-05, |
| "loss": 0.7463, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.6819983600624986e-05, |
| "loss": 0.7505, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.6691306063588583e-05, |
| "loss": 0.7528, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.6560590289905074e-05, |
| "loss": 0.7547, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.6427876096865394e-05, |
| "loss": 0.6967, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.6293203910498375e-05, |
| "loss": 0.7067, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.6156614753256583e-05, |
| "loss": 0.7201, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.6018150231520486e-05, |
| "loss": 0.7541, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.5877852522924733e-05, |
| "loss": 0.7247, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.573576436351046e-05, |
| "loss": 0.7124, |
| "step": 61 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.5591929034707468e-05, |
| "loss": 0.7564, |
| "step": 62 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.5446390350150272e-05, |
| "loss": 0.5717, |
| "step": 63 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.529919264233205e-05, |
| "loss": 0.5542, |
| "step": 64 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.5150380749100545e-05, |
| "loss": 0.5303, |
| "step": 65 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.5000000000000002e-05, |
| "loss": 0.4975, |
| "step": 66 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.4848096202463373e-05, |
| "loss": 0.4998, |
| "step": 67 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.469471562785891e-05, |
| "loss": 0.5106, |
| "step": 68 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.4539904997395468e-05, |
| "loss": 0.4744, |
| "step": 69 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.4383711467890776e-05, |
| "loss": 0.4874, |
| "step": 70 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.4226182617406996e-05, |
| "loss": 0.4865, |
| "step": 71 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.4067366430758004e-05, |
| "loss": 0.5166, |
| "step": 72 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 1.3907311284892737e-05, |
| "loss": 0.5052, |
| "step": 73 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.3746065934159123e-05, |
| "loss": 0.477, |
| "step": 74 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 1.3583679495453e-05, |
| "loss": 0.4927, |
| "step": 75 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.342020143325669e-05, |
| "loss": 0.4834, |
| "step": 76 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.3255681544571568e-05, |
| "loss": 0.4601, |
| "step": 77 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 1.3090169943749475e-05, |
| "loss": 0.5014, |
| "step": 78 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.2923717047227368e-05, |
| "loss": 0.4874, |
| "step": 79 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.2756373558169992e-05, |
| "loss": 0.4659, |
| "step": 80 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.2588190451025209e-05, |
| "loss": 0.4712, |
| "step": 81 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.2419218955996677e-05, |
| "loss": 0.4778, |
| "step": 82 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.2249510543438652e-05, |
| "loss": 0.4893, |
| "step": 83 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 1.2079116908177592e-05, |
| "loss": 0.4945, |
| "step": 84 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.190808995376545e-05, |
| "loss": 0.4899, |
| "step": 85 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.1736481776669307e-05, |
| "loss": 0.4667, |
| "step": 86 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.156434465040231e-05, |
| "loss": 0.4696, |
| "step": 87 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.1391731009600655e-05, |
| "loss": 0.461, |
| "step": 88 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 1.1218693434051475e-05, |
| "loss": 0.4387, |
| "step": 89 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.1045284632676535e-05, |
| "loss": 0.4862, |
| "step": 90 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 1.0871557427476585e-05, |
| "loss": 0.4836, |
| "step": 91 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.0697564737441254e-05, |
| "loss": 0.475, |
| "step": 92 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.0523359562429441e-05, |
| "loss": 0.5063, |
| "step": 93 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.0348994967025012e-05, |
| "loss": 0.4836, |
| "step": 94 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.0174524064372837e-05, |
| "loss": 0.4892, |
| "step": 95 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 1e-05, |
| "loss": 0.449, |
| "step": 96 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 9.825475935627165e-06, |
| "loss": 0.453, |
| "step": 97 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 9.651005032974994e-06, |
| "loss": 0.4785, |
| "step": 98 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 9.476640437570562e-06, |
| "loss": 0.4835, |
| "step": 99 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 9.302435262558748e-06, |
| "loss": 0.4778, |
| "step": 100 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 9.128442572523418e-06, |
| "loss": 0.4749, |
| "step": 101 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 8.954715367323468e-06, |
| "loss": 0.4832, |
| "step": 102 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 8.781306565948528e-06, |
| "loss": 0.5055, |
| "step": 103 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 8.60826899039935e-06, |
| "loss": 0.4938, |
| "step": 104 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 8.43565534959769e-06, |
| "loss": 0.4953, |
| "step": 105 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 8.263518223330698e-06, |
| "loss": 0.452, |
| "step": 106 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 8.091910046234552e-06, |
| "loss": 0.4631, |
| "step": 107 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 7.92088309182241e-06, |
| "loss": 0.452, |
| "step": 108 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 7.750489456561351e-06, |
| "loss": 0.5013, |
| "step": 109 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 7.580781044003324e-06, |
| "loss": 0.4987, |
| "step": 110 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 7.411809548974792e-06, |
| "loss": 0.4759, |
| "step": 111 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 7.243626441830009e-06, |
| "loss": 0.4932, |
| "step": 112 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 7.076282952772634e-06, |
| "loss": 0.4847, |
| "step": 113 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 6.909830056250527e-06, |
| "loss": 0.437, |
| "step": 114 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 6.744318455428436e-06, |
| "loss": 0.4861, |
| "step": 115 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 6.579798566743314e-06, |
| "loss": 0.4599, |
| "step": 116 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 6.4163205045469975e-06, |
| "loss": 0.4861, |
| "step": 117 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 6.25393406584088e-06, |
| "loss": 0.4565, |
| "step": 118 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 6.092688715107265e-06, |
| "loss": 0.4403, |
| "step": 119 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 5.932633569242e-06, |
| "loss": 0.4631, |
| "step": 120 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 5.773817382593008e-06, |
| "loss": 0.4572, |
| "step": 121 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 5.616288532109225e-06, |
| "loss": 0.4597, |
| "step": 122 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 5.460095002604533e-06, |
| "loss": 0.4731, |
| "step": 123 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 5.305284372141095e-06, |
| "loss": 0.4798, |
| "step": 124 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 5.151903797536631e-06, |
| "loss": 0.4207, |
| "step": 125 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 5.000000000000003e-06, |
| "loss": 0.3084, |
| "step": 126 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 4.849619250899458e-06, |
| "loss": 0.3211, |
| "step": 127 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 4.700807357667953e-06, |
| "loss": 0.3206, |
| "step": 128 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 4.5536096498497295e-06, |
| "loss": 0.2936, |
| "step": 129 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 4.408070965292534e-06, |
| "loss": 0.2846, |
| "step": 130 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 4.264235636489542e-06, |
| "loss": 0.3028, |
| "step": 131 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 4.12214747707527e-06, |
| "loss": 0.309, |
| "step": 132 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 3.981849768479516e-06, |
| "loss": 0.2914, |
| "step": 133 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 3.8433852467434175e-06, |
| "loss": 0.2821, |
| "step": 134 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 3.7067960895016277e-06, |
| "loss": 0.2961, |
| "step": 135 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 3.5721239031346067e-06, |
| "loss": 0.3004, |
| "step": 136 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 3.4394097100949286e-06, |
| "loss": 0.281, |
| "step": 137 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 3.308693936411421e-06, |
| "loss": 0.2975, |
| "step": 138 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 3.1800163993750166e-06, |
| "loss": 0.2784, |
| "step": 139 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 3.0534162954100264e-06, |
| "loss": 0.2797, |
| "step": 140 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 2.9289321881345257e-06, |
| "loss": 0.2755, |
| "step": 141 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 2.8066019966134907e-06, |
| "loss": 0.3029, |
| "step": 142 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 2.6864629838082957e-06, |
| "loss": 0.2718, |
| "step": 143 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 2.5685517452260566e-06, |
| "loss": 0.2636, |
| "step": 144 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 2.45290419777228e-06, |
| "loss": 0.294, |
| "step": 145 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 2.339555568810221e-06, |
| "loss": 0.2779, |
| "step": 146 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 2.2285403854302912e-06, |
| "loss": 0.3099, |
| "step": 147 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 2.119892463932781e-06, |
| "loss": 0.2679, |
| "step": 148 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 2.013644899527074e-06, |
| "loss": 0.2911, |
| "step": 149 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 1.9098300562505266e-06, |
| "loss": 0.2653, |
| "step": 150 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 1.808479557110081e-06, |
| "loss": 0.2732, |
| "step": 151 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 1.709624274449584e-06, |
| "loss": 0.2805, |
| "step": 152 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 1.6132943205457607e-06, |
| "loss": 0.2853, |
| "step": 153 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 1.5195190384357405e-06, |
| "loss": 0.3033, |
| "step": 154 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 1.4283269929788779e-06, |
| "loss": 0.2935, |
| "step": 155 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 1.339745962155613e-06, |
| "loss": 0.2713, |
| "step": 156 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 1.2538029286060428e-06, |
| "loss": 0.2641, |
| "step": 157 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 1.1705240714107301e-06, |
| "loss": 0.2892, |
| "step": 158 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 1.0899347581163222e-06, |
| "loss": 0.2799, |
| "step": 159 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 1.012059537008332e-06, |
| "loss": 0.2715, |
| "step": 160 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.369221296335007e-07, |
| "loss": 0.297, |
| "step": 161 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 8.645454235739903e-07, |
| "loss": 0.2717, |
| "step": 162 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 7.949514654755963e-07, |
| "loss": 0.2658, |
| "step": 163 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 7.281614543321269e-07, |
| "loss": 0.2821, |
| "step": 164 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 6.641957350279838e-07, |
| "loss": 0.2912, |
| "step": 165 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 6.030737921409169e-07, |
| "loss": 0.2728, |
| "step": 166 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 5.448142440068316e-07, |
| "loss": 0.2915, |
| "step": 167 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 4.894348370484648e-07, |
| "loss": 0.3042, |
| "step": 168 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 4.3695244036964567e-07, |
| "loss": 0.2685, |
| "step": 169 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 3.8738304061681107e-07, |
| "loss": 0.3022, |
| "step": 170 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 3.4074173710931804e-07, |
| "loss": 0.2707, |
| "step": 171 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 2.970427372400353e-07, |
| "loss": 0.3072, |
| "step": 172 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 2.5629935214764866e-07, |
| "loss": 0.2731, |
| "step": 173 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 2.1852399266194312e-07, |
| "loss": 0.2907, |
| "step": 174 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 1.8372816552336025e-07, |
| "loss": 0.2757, |
| "step": 175 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 1.519224698779198e-07, |
| "loss": 0.2658, |
| "step": 176 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 1.231165940486234e-07, |
| "loss": 0.2864, |
| "step": 177 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 9.731931258429638e-08, |
| "loss": 0.2767, |
| "step": 178 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 7.453848358678018e-08, |
| "loss": 0.2858, |
| "step": 179 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 5.4781046317267103e-08, |
| "loss": 0.2683, |
| "step": 180 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 3.805301908254455e-08, |
| "loss": 0.2911, |
| "step": 181 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 2.4359497401758026e-08, |
| "loss": 0.2775, |
| "step": 182 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 1.370465245426167e-08, |
| "loss": 0.2777, |
| "step": 183 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 6.091729809042379e-09, |
| "loss": 0.2737, |
| "step": 184 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 1.5230484360873043e-09, |
| "loss": 0.2778, |
| "step": 185 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 0.0, |
| "loss": 0.2891, |
| "step": 186 |
| }, |
| { |
| "epoch": 2.99, |
| "step": 186, |
| "total_flos": 1.5592942139028275e+17, |
| "train_loss": 0.5238676683236194, |
| "train_runtime": 2078.491, |
| "train_samples_per_second": 11.491, |
| "train_steps_per_second": 0.089 |
| } |
| ], |
| "max_steps": 186, |
| "num_train_epochs": 3, |
| "total_flos": 1.5592942139028275e+17, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|