| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 1.1605415860735009, |
| "eval_steps": 500, |
| "global_step": 3000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 2e-06, |
| "loss": 1.2989, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 5.157651144475182e-06, |
| "loss": 1.2283, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 7.004758654352406e-06, |
| "loss": 1.2452, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 8.315302288950362e-06, |
| "loss": 1.2448, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 9.331838906210157e-06, |
| "loss": 1.257, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.0162409798827585e-05, |
| "loss": 1.2302, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.0864647482583226e-05, |
| "loss": 1.212, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.1472953433425543e-05, |
| "loss": 1.2297, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.200951730870481e-05, |
| "loss": 1.2718, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.248949005068534e-05, |
| "loss": 1.25, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.2923678209823691e-05, |
| "loss": 1.284, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.3320060943302767e-05, |
| "loss": 1.2596, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.3684697711049637e-05, |
| "loss": 1.212, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.4022298627058407e-05, |
| "loss": 1.1908, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.4336597560562562e-05, |
| "loss": 1.2259, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.4630604577900725e-05, |
| "loss": 1.2106, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.4906781718673912e-05, |
| "loss": 1.2671, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.516716845317999e-05, |
| "loss": 1.2532, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.5413473174472748e-05, |
| "loss": 1.2591, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.564714119516052e-05, |
| "loss": 1.2781, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.5869406136935633e-05, |
| "loss": 1.2656, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.6081329354298873e-05, |
| "loss": 1.2559, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.6283830587647817e-05, |
| "loss": 1.2312, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.647771208777795e-05, |
| "loss": 1.2369, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.6663677812420316e-05, |
| "loss": 1.243, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.684234885552482e-05, |
| "loss": 1.2245, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.7014275963057212e-05, |
| "loss": 1.2699, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.717994977153359e-05, |
| "loss": 1.2328, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.7339809249103263e-05, |
| "loss": 1.1813, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.7494248705037744e-05, |
| "loss": 1.2576, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.764362364944784e-05, |
| "loss": 1.2681, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.778825572237591e-05, |
| "loss": 1.2397, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.7928436864176096e-05, |
| "loss": 1.2427, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8064432863149094e-05, |
| "loss": 1.2252, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8196486388793384e-05, |
| "loss": 1.2142, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.832481959765517e-05, |
| "loss": 1.2665, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.844963638206834e-05, |
| "loss": 1.2361, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8571124318947933e-05, |
| "loss": 1.173, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8689456365402042e-05, |
| "loss": 1.2225, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8804792339635704e-05, |
| "loss": 1.2681, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.8917280218967596e-05, |
| "loss": 1.2564, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9027057281410815e-05, |
| "loss": 1.2393, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.913425111291042e-05, |
| "loss": 1.2745, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9238980498774055e-05, |
| "loss": 1.2593, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9341356214914966e-05, |
| "loss": 1.2387, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9441481732123002e-05, |
| "loss": 1.2835, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9539453844588976e-05, |
| "loss": 1.2224, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.963536323225313e-05, |
| "loss": 1.2948, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9729294965166455e-05, |
| "loss": 1.2309, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.98213289568955e-05, |
| "loss": 1.2903, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9911540373026317e-05, |
| "loss": 1.2801, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2e-05, |
| "loss": 1.2455, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2e-05, |
| "loss": 1.2622, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9996483001172335e-05, |
| "loss": 1.2051, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9992966002344665e-05, |
| "loss": 1.2206, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9989449003517e-05, |
| "loss": 1.2498, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9985932004689332e-05, |
| "loss": 1.2692, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9982415005861666e-05, |
| "loss": 1.2869, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9978898007034e-05, |
| "loss": 1.2664, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9975381008206333e-05, |
| "loss": 1.2224, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9971864009378663e-05, |
| "loss": 1.3077, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9968347010550997e-05, |
| "loss": 1.2306, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.996483001172333e-05, |
| "loss": 1.2494, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9961313012895664e-05, |
| "loss": 1.2461, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.9957796014067997e-05, |
| "loss": 1.2963, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.995427901524033e-05, |
| "loss": 1.2357, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.9950762016412664e-05, |
| "loss": 1.2395, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.9947245017584994e-05, |
| "loss": 1.1918, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.9943728018757328e-05, |
| "loss": 1.2161, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.994021101992966e-05, |
| "loss": 1.2473, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.9936694021101995e-05, |
| "loss": 1.2574, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.993317702227433e-05, |
| "loss": 1.2367, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.9929660023446662e-05, |
| "loss": 1.2481, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.9926143024618992e-05, |
| "loss": 1.3223, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.9922626025791326e-05, |
| "loss": 1.259, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.991910902696366e-05, |
| "loss": 1.2232, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.9915592028135993e-05, |
| "loss": 1.2023, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.9912075029308326e-05, |
| "loss": 1.2357, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.990855803048066e-05, |
| "loss": 1.2542, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.9905041031652993e-05, |
| "loss": 1.2384, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.9901524032825323e-05, |
| "loss": 1.2711, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.9898007033997657e-05, |
| "loss": 1.222, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.989449003516999e-05, |
| "loss": 1.2772, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.9890973036342324e-05, |
| "loss": 1.28, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.9887456037514657e-05, |
| "loss": 1.2511, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.988393903868699e-05, |
| "loss": 1.2441, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.988042203985932e-05, |
| "loss": 1.1894, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.9876905041031655e-05, |
| "loss": 1.2739, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.9873388042203988e-05, |
| "loss": 1.2734, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.986987104337632e-05, |
| "loss": 1.2593, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9866354044548652e-05, |
| "loss": 1.222, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9862837045720985e-05, |
| "loss": 1.2288, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.985932004689332e-05, |
| "loss": 1.2263, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9855803048065652e-05, |
| "loss": 1.2753, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9852286049237986e-05, |
| "loss": 1.2921, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9848769050410316e-05, |
| "loss": 1.1903, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.984525205158265e-05, |
| "loss": 1.2407, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9841735052754983e-05, |
| "loss": 1.2721, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9838218053927317e-05, |
| "loss": 1.2731, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.983470105509965e-05, |
| "loss": 1.2303, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9831184056271984e-05, |
| "loss": 1.2578, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9827667057444317e-05, |
| "loss": 1.2874, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9824150058616647e-05, |
| "loss": 1.2545, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.982063305978898e-05, |
| "loss": 1.2261, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9817116060961314e-05, |
| "loss": 1.2481, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9813599062133648e-05, |
| "loss": 1.2261, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.981008206330598e-05, |
| "loss": 1.2741, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9806565064478315e-05, |
| "loss": 1.2026, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9803048065650645e-05, |
| "loss": 1.2126, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.979953106682298e-05, |
| "loss": 1.2648, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9796014067995312e-05, |
| "loss": 1.2309, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9792497069167646e-05, |
| "loss": 1.2655, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.978898007033998e-05, |
| "loss": 1.2746, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9785463071512313e-05, |
| "loss": 1.2686, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9781946072684643e-05, |
| "loss": 1.2459, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9778429073856976e-05, |
| "loss": 1.2854, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.977491207502931e-05, |
| "loss": 1.2334, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.9771395076201643e-05, |
| "loss": 1.2291, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.9767878077373977e-05, |
| "loss": 1.2283, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.976436107854631e-05, |
| "loss": 1.2449, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.9760844079718644e-05, |
| "loss": 1.2809, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.9757327080890974e-05, |
| "loss": 1.1972, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.9753810082063308e-05, |
| "loss": 1.272, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.975029308323564e-05, |
| "loss": 1.2032, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.974677608440797e-05, |
| "loss": 1.1844, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.9743259085580305e-05, |
| "loss": 1.2546, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.973974208675264e-05, |
| "loss": 1.2803, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.9736225087924972e-05, |
| "loss": 1.2484, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.9732708089097305e-05, |
| "loss": 1.2881, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.972919109026964e-05, |
| "loss": 1.25, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.972567409144197e-05, |
| "loss": 1.2427, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.9722157092614303e-05, |
| "loss": 1.2256, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.9718640093786636e-05, |
| "loss": 1.2608, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.971512309495897e-05, |
| "loss": 1.2447, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.9711606096131303e-05, |
| "loss": 1.2333, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.9708089097303637e-05, |
| "loss": 1.2404, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.970457209847597e-05, |
| "loss": 1.2485, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.97010550996483e-05, |
| "loss": 1.281, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.9697538100820634e-05, |
| "loss": 1.2533, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.9694021101992967e-05, |
| "loss": 1.2072, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.96905041031653e-05, |
| "loss": 1.2717, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.9686987104337634e-05, |
| "loss": 1.3016, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.9683470105509968e-05, |
| "loss": 1.2499, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.9679953106682298e-05, |
| "loss": 1.2427, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.967643610785463e-05, |
| "loss": 1.2591, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.9672919109026965e-05, |
| "loss": 1.255, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.96694021101993e-05, |
| "loss": 1.2626, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.9665885111371632e-05, |
| "loss": 1.2348, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.9662368112543966e-05, |
| "loss": 1.2486, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.9658851113716296e-05, |
| "loss": 1.2569, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.965533411488863e-05, |
| "loss": 1.2986, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.9651817116060963e-05, |
| "loss": 1.2388, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.9648300117233296e-05, |
| "loss": 1.2505, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.964478311840563e-05, |
| "loss": 1.2587, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.9641266119577964e-05, |
| "loss": 1.2578, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.9637749120750297e-05, |
| "loss": 1.1769, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.9634232121922627e-05, |
| "loss": 1.236, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.963071512309496e-05, |
| "loss": 1.2021, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.9627198124267294e-05, |
| "loss": 1.2279, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.9623681125439624e-05, |
| "loss": 1.2757, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.9620164126611958e-05, |
| "loss": 1.2061, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.961664712778429e-05, |
| "loss": 1.2574, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.9613130128956625e-05, |
| "loss": 1.2492, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.960961313012896e-05, |
| "loss": 1.2149, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.9606096131301292e-05, |
| "loss": 1.2302, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.9602579132473622e-05, |
| "loss": 1.217, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.9599062133645956e-05, |
| "loss": 1.2454, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.959554513481829e-05, |
| "loss": 1.2599, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.9592028135990623e-05, |
| "loss": 1.2616, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.9588511137162956e-05, |
| "loss": 1.213, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.958499413833529e-05, |
| "loss": 1.2617, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.9581477139507623e-05, |
| "loss": 1.2285, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.9577960140679953e-05, |
| "loss": 1.2745, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.9574443141852287e-05, |
| "loss": 1.2792, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.957092614302462e-05, |
| "loss": 1.2086, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.9567409144196954e-05, |
| "loss": 1.2539, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.9563892145369288e-05, |
| "loss": 1.2103, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.956037514654162e-05, |
| "loss": 1.203, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.955685814771395e-05, |
| "loss": 1.2195, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.9553341148886285e-05, |
| "loss": 1.2237, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.9549824150058618e-05, |
| "loss": 1.2178, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.9546307151230952e-05, |
| "loss": 1.2866, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.9542790152403285e-05, |
| "loss": 1.254, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.953927315357562e-05, |
| "loss": 1.184, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.953575615474795e-05, |
| "loss": 1.2311, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.9532239155920282e-05, |
| "loss": 1.2549, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.9528722157092616e-05, |
| "loss": 1.2641, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.952520515826495e-05, |
| "loss": 1.2265, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.9521688159437283e-05, |
| "loss": 1.2519, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.9518171160609617e-05, |
| "loss": 1.2031, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.951465416178195e-05, |
| "loss": 1.2373, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.951113716295428e-05, |
| "loss": 1.2621, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.9507620164126614e-05, |
| "loss": 1.2545, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9504103165298947e-05, |
| "loss": 1.2649, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9500586166471277e-05, |
| "loss": 1.2677, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.949706916764361e-05, |
| "loss": 1.2643, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9493552168815944e-05, |
| "loss": 1.2076, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9490035169988278e-05, |
| "loss": 1.261, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.948651817116061e-05, |
| "loss": 1.276, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9483001172332945e-05, |
| "loss": 1.2272, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9479484173505275e-05, |
| "loss": 1.2521, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.947596717467761e-05, |
| "loss": 1.3199, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9472450175849942e-05, |
| "loss": 1.234, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9468933177022276e-05, |
| "loss": 1.2335, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.946541617819461e-05, |
| "loss": 1.2568, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9461899179366943e-05, |
| "loss": 1.2296, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9458382180539276e-05, |
| "loss": 1.243, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9454865181711606e-05, |
| "loss": 1.2448, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.945134818288394e-05, |
| "loss": 1.2355, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9447831184056273e-05, |
| "loss": 1.2777, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9444314185228607e-05, |
| "loss": 1.2366, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.944079718640094e-05, |
| "loss": 1.169, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9437280187573274e-05, |
| "loss": 1.1827, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9433763188745604e-05, |
| "loss": 1.2845, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9430246189917938e-05, |
| "loss": 1.2342, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.942672919109027e-05, |
| "loss": 1.1907, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9423212192262605e-05, |
| "loss": 1.2877, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9419695193434938e-05, |
| "loss": 1.258, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9416178194607272e-05, |
| "loss": 1.2525, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9412661195779602e-05, |
| "loss": 1.2251, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9409144196951935e-05, |
| "loss": 1.2556, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.940562719812427e-05, |
| "loss": 1.2242, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9402110199296603e-05, |
| "loss": 1.2639, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9398593200468936e-05, |
| "loss": 1.258, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.939507620164127e-05, |
| "loss": 1.2326, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9391559202813603e-05, |
| "loss": 1.256, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9388042203985933e-05, |
| "loss": 1.2565, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9384525205158267e-05, |
| "loss": 1.2284, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.93810082063306e-05, |
| "loss": 1.2535, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.937749120750293e-05, |
| "loss": 1.2684, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9373974208675264e-05, |
| "loss": 1.2659, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9370457209847597e-05, |
| "loss": 1.3303, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.936694021101993e-05, |
| "loss": 1.2513, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9363423212192265e-05, |
| "loss": 1.2688, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9359906213364598e-05, |
| "loss": 1.2292, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9356389214536928e-05, |
| "loss": 1.2336, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9352872215709262e-05, |
| "loss": 1.2747, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9349355216881595e-05, |
| "loss": 1.2719, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.934583821805393e-05, |
| "loss": 1.2507, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9342321219226262e-05, |
| "loss": 1.2515, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9338804220398596e-05, |
| "loss": 1.251, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.933528722157093e-05, |
| "loss": 1.1902, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.933177022274326e-05, |
| "loss": 1.2442, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9328253223915593e-05, |
| "loss": 1.2722, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9324736225087927e-05, |
| "loss": 1.2312, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.932121922626026e-05, |
| "loss": 1.2382, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9317702227432594e-05, |
| "loss": 1.2218, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9314185228604927e-05, |
| "loss": 1.2638, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9310668229777257e-05, |
| "loss": 1.2155, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.930715123094959e-05, |
| "loss": 1.2615, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9303634232121924e-05, |
| "loss": 1.194, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9300117233294258e-05, |
| "loss": 1.2642, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.929660023446659e-05, |
| "loss": 1.2044, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9293083235638925e-05, |
| "loss": 1.2729, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9289566236811255e-05, |
| "loss": 1.2696, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.928604923798359e-05, |
| "loss": 1.2028, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9282532239155922e-05, |
| "loss": 1.2785, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9279015240328256e-05, |
| "loss": 1.2503, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.927549824150059e-05, |
| "loss": 1.2359, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9271981242672923e-05, |
| "loss": 1.2321, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9268464243845256e-05, |
| "loss": 1.2836, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9264947245017586e-05, |
| "loss": 1.3116, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.926143024618992e-05, |
| "loss": 1.3002, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9257913247362253e-05, |
| "loss": 1.2434, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9254396248534583e-05, |
| "loss": 1.2622, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9250879249706917e-05, |
| "loss": 1.2534, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.924736225087925e-05, |
| "loss": 1.2206, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9243845252051584e-05, |
| "loss": 1.2562, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9240328253223918e-05, |
| "loss": 1.2212, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.923681125439625e-05, |
| "loss": 1.2322, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.923329425556858e-05, |
| "loss": 1.2502, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9229777256740915e-05, |
| "loss": 1.2332, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9226260257913248e-05, |
| "loss": 1.2911, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9222743259085582e-05, |
| "loss": 1.2281, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9219226260257915e-05, |
| "loss": 1.1978, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.921570926143025e-05, |
| "loss": 1.1962, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9212192262602582e-05, |
| "loss": 1.3173, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9208675263774912e-05, |
| "loss": 1.2807, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9205158264947246e-05, |
| "loss": 1.2426, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.920164126611958e-05, |
| "loss": 1.2626, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9198124267291913e-05, |
| "loss": 1.1852, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9194607268464247e-05, |
| "loss": 1.2201, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.919109026963658e-05, |
| "loss": 1.2632, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.918757327080891e-05, |
| "loss": 1.2871, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9184056271981244e-05, |
| "loss": 1.2534, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9180539273153577e-05, |
| "loss": 1.254, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.917702227432591e-05, |
| "loss": 1.1965, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9173505275498244e-05, |
| "loss": 1.2502, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9169988276670578e-05, |
| "loss": 1.301, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9166471277842908e-05, |
| "loss": 1.2426, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.916295427901524e-05, |
| "loss": 1.2193, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9159437280187575e-05, |
| "loss": 1.2496, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.915592028135991e-05, |
| "loss": 1.274, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.915240328253224e-05, |
| "loss": 1.2228, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9148886283704576e-05, |
| "loss": 1.215, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.914536928487691e-05, |
| "loss": 1.2546, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.914185228604924e-05, |
| "loss": 1.266, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9138335287221573e-05, |
| "loss": 1.2338, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9134818288393906e-05, |
| "loss": 1.3236, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9131301289566236e-05, |
| "loss": 1.2147, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.912778429073857e-05, |
| "loss": 1.2386, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9124267291910904e-05, |
| "loss": 1.2463, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9120750293083237e-05, |
| "loss": 1.1392, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.911723329425557e-05, |
| "loss": 1.2836, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9113716295427904e-05, |
| "loss": 1.2874, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9110199296600234e-05, |
| "loss": 1.2657, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9106682297772568e-05, |
| "loss": 1.2173, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.91031652989449e-05, |
| "loss": 1.1987, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9099648300117235e-05, |
| "loss": 1.2584, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.909613130128957e-05, |
| "loss": 1.2424, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9092614302461902e-05, |
| "loss": 1.2306, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9089097303634235e-05, |
| "loss": 1.2586, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9085580304806566e-05, |
| "loss": 1.2662, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.90820633059789e-05, |
| "loss": 1.2534, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9078546307151233e-05, |
| "loss": 1.2206, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9075029308323566e-05, |
| "loss": 1.2373, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.90715123094959e-05, |
| "loss": 1.2332, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9067995310668233e-05, |
| "loss": 1.2517, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9064478311840563e-05, |
| "loss": 1.3123, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9060961313012897e-05, |
| "loss": 1.2091, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.905744431418523e-05, |
| "loss": 1.2734, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9053927315357564e-05, |
| "loss": 1.2511, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9050410316529897e-05, |
| "loss": 1.2356, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.904689331770223e-05, |
| "loss": 1.2232, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.904337631887456e-05, |
| "loss": 1.2511, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9039859320046895e-05, |
| "loss": 1.2447, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9036342321219228e-05, |
| "loss": 1.1925, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9032825322391558e-05, |
| "loss": 1.2303, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9029308323563892e-05, |
| "loss": 1.2047, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.902579132473623e-05, |
| "loss": 1.2533, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9022274325908562e-05, |
| "loss": 1.298, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9018757327080892e-05, |
| "loss": 1.2282, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9015240328253226e-05, |
| "loss": 1.2214, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.901172332942556e-05, |
| "loss": 1.2398, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.900820633059789e-05, |
| "loss": 1.2506, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9004689331770223e-05, |
| "loss": 1.2901, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9001172332942557e-05, |
| "loss": 1.2468, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.899765533411489e-05, |
| "loss": 1.2553, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.8994138335287224e-05, |
| "loss": 1.194, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.8990621336459557e-05, |
| "loss": 1.2191, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.8987104337631887e-05, |
| "loss": 1.2174, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.898358733880422e-05, |
| "loss": 1.2463, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.8980070339976554e-05, |
| "loss": 1.2878, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.8976553341148888e-05, |
| "loss": 1.2729, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.897303634232122e-05, |
| "loss": 1.2497, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.8969519343493555e-05, |
| "loss": 1.2301, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.896600234466589e-05, |
| "loss": 1.2085, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.896248534583822e-05, |
| "loss": 1.2658, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.8958968347010552e-05, |
| "loss": 1.2373, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.8955451348182886e-05, |
| "loss": 1.2445, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.895193434935522e-05, |
| "loss": 1.2832, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.8948417350527553e-05, |
| "loss": 1.2441, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.8944900351699886e-05, |
| "loss": 1.2838, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.8941383352872216e-05, |
| "loss": 1.2053, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.893786635404455e-05, |
| "loss": 1.2471, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.8934349355216883e-05, |
| "loss": 1.2457, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.8930832356389217e-05, |
| "loss": 1.2298, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.892731535756155e-05, |
| "loss": 1.2036, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.8923798358733884e-05, |
| "loss": 1.2505, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.8920281359906214e-05, |
| "loss": 1.2387, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.8916764361078548e-05, |
| "loss": 1.2571, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.891324736225088e-05, |
| "loss": 1.2161, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.890973036342321e-05, |
| "loss": 1.2129, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.8906213364595545e-05, |
| "loss": 1.2685, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.890269636576788e-05, |
| "loss": 1.2546, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.8899179366940215e-05, |
| "loss": 1.2081, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.8895662368112545e-05, |
| "loss": 1.2629, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.889214536928488e-05, |
| "loss": 1.2167, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.8888628370457212e-05, |
| "loss": 1.3007, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.8885111371629543e-05, |
| "loss": 1.2934, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.8881594372801876e-05, |
| "loss": 1.2195, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.887807737397421e-05, |
| "loss": 1.2124, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.8874560375146543e-05, |
| "loss": 1.1977, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.8871043376318877e-05, |
| "loss": 1.2349, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.886752637749121e-05, |
| "loss": 1.1921, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.886400937866354e-05, |
| "loss": 1.2403, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.8860492379835874e-05, |
| "loss": 1.1933, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.8856975381008207e-05, |
| "loss": 1.216, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.885345838218054e-05, |
| "loss": 1.232, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.8849941383352874e-05, |
| "loss": 1.2421, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.8846424384525208e-05, |
| "loss": 1.2422, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.884290738569754e-05, |
| "loss": 1.266, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.883939038686987e-05, |
| "loss": 1.2018, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.8835873388042205e-05, |
| "loss": 1.2345, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.883235638921454e-05, |
| "loss": 1.263, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.8828839390386872e-05, |
| "loss": 1.2526, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.8825322391559206e-05, |
| "loss": 1.2809, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.882180539273154e-05, |
| "loss": 1.2513, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.881828839390387e-05, |
| "loss": 1.2032, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.8814771395076203e-05, |
| "loss": 1.2062, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.8811254396248536e-05, |
| "loss": 1.2834, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.880773739742087e-05, |
| "loss": 1.2314, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.8804220398593203e-05, |
| "loss": 1.2579, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.8800703399765537e-05, |
| "loss": 1.2137, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.8797186400937867e-05, |
| "loss": 1.2784, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.87936694021102e-05, |
| "loss": 1.2697, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.8790152403282534e-05, |
| "loss": 1.2865, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.8786635404454864e-05, |
| "loss": 1.2826, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.8783118405627198e-05, |
| "loss": 1.2356, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.877960140679953e-05, |
| "loss": 1.2545, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.8776084407971868e-05, |
| "loss": 1.2572, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.87725674091442e-05, |
| "loss": 1.233, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.8769050410316532e-05, |
| "loss": 1.2959, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.8765533411488865e-05, |
| "loss": 1.2132, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.8762016412661196e-05, |
| "loss": 1.2736, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.875849941383353e-05, |
| "loss": 1.2537, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.8754982415005863e-05, |
| "loss": 1.2195, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.8751465416178196e-05, |
| "loss": 1.2707, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.874794841735053e-05, |
| "loss": 1.2505, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.8744431418522863e-05, |
| "loss": 1.2184, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.8740914419695193e-05, |
| "loss": 1.2849, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.8737397420867527e-05, |
| "loss": 1.2546, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.873388042203986e-05, |
| "loss": 1.2703, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.8730363423212194e-05, |
| "loss": 1.2413, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.8726846424384527e-05, |
| "loss": 1.2217, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.872332942555686e-05, |
| "loss": 1.2984, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.871981242672919e-05, |
| "loss": 1.2561, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.8716295427901525e-05, |
| "loss": 1.1561, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.8712778429073858e-05, |
| "loss": 1.1898, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.870926143024619e-05, |
| "loss": 1.2337, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.8705744431418525e-05, |
| "loss": 1.2401, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.870222743259086e-05, |
| "loss": 1.2386, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.8698710433763192e-05, |
| "loss": 1.2284, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.8695193434935522e-05, |
| "loss": 1.2402, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.8691676436107856e-05, |
| "loss": 1.2609, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.868815943728019e-05, |
| "loss": 1.242, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.8684642438452523e-05, |
| "loss": 1.2398, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.8681125439624856e-05, |
| "loss": 1.2528, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.867760844079719e-05, |
| "loss": 1.2041, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.867409144196952e-05, |
| "loss": 1.2623, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.8670574443141854e-05, |
| "loss": 1.1978, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.8667057444314187e-05, |
| "loss": 1.2116, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.8663540445486517e-05, |
| "loss": 1.2481, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.866002344665885e-05, |
| "loss": 1.2083, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.8656506447831184e-05, |
| "loss": 1.2342, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.865298944900352e-05, |
| "loss": 1.2854, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.864947245017585e-05, |
| "loss": 1.299, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.8645955451348185e-05, |
| "loss": 1.2212, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.864243845252052e-05, |
| "loss": 1.2349, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.863892145369285e-05, |
| "loss": 1.2428, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.8635404454865182e-05, |
| "loss": 1.2691, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.8631887456037516e-05, |
| "loss": 1.2134, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.862837045720985e-05, |
| "loss": 1.2127, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.8624853458382183e-05, |
| "loss": 1.2266, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.8621336459554516e-05, |
| "loss": 1.1664, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.8617819460726846e-05, |
| "loss": 1.2076, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.861430246189918e-05, |
| "loss": 1.2489, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.8610785463071513e-05, |
| "loss": 1.2366, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.8607268464243847e-05, |
| "loss": 1.2396, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.860375146541618e-05, |
| "loss": 1.2524, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.8600234466588514e-05, |
| "loss": 1.2876, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.8596717467760844e-05, |
| "loss": 1.2603, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.8593200468933178e-05, |
| "loss": 1.2268, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.858968347010551e-05, |
| "loss": 1.2598, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.8586166471277845e-05, |
| "loss": 1.2346, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.8582649472450178e-05, |
| "loss": 1.2151, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.8579132473622512e-05, |
| "loss": 1.2147, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.8575615474794845e-05, |
| "loss": 1.239, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.8572098475967175e-05, |
| "loss": 1.2722, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.856858147713951e-05, |
| "loss": 1.258, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.8565064478311842e-05, |
| "loss": 1.1988, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.8561547479484176e-05, |
| "loss": 1.2802, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.855803048065651e-05, |
| "loss": 1.2147, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.8554513481828843e-05, |
| "loss": 1.2312, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.8550996483001173e-05, |
| "loss": 1.2385, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.8547479484173507e-05, |
| "loss": 1.2842, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.854396248534584e-05, |
| "loss": 1.2587, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.854044548651817e-05, |
| "loss": 1.2441, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.8536928487690504e-05, |
| "loss": 1.2699, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.8533411488862837e-05, |
| "loss": 1.2533, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.8529894490035174e-05, |
| "loss": 1.2665, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.8526377491207504e-05, |
| "loss": 1.2384, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.8522860492379838e-05, |
| "loss": 1.233, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.851934349355217e-05, |
| "loss": 1.2217, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.85158264947245e-05, |
| "loss": 1.2426, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.8512309495896835e-05, |
| "loss": 1.2631, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.850879249706917e-05, |
| "loss": 1.2111, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.8505275498241502e-05, |
| "loss": 1.245, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.8501758499413836e-05, |
| "loss": 1.2829, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.849824150058617e-05, |
| "loss": 1.2734, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.84947245017585e-05, |
| "loss": 1.2196, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.8491207502930833e-05, |
| "loss": 1.222, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.8487690504103166e-05, |
| "loss": 1.2464, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.84841735052755e-05, |
| "loss": 1.211, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.8480656506447833e-05, |
| "loss": 1.2223, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.8477139507620167e-05, |
| "loss": 1.2354, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.8473622508792497e-05, |
| "loss": 1.2661, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.847010550996483e-05, |
| "loss": 1.296, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.8466588511137164e-05, |
| "loss": 1.2618, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.8463071512309498e-05, |
| "loss": 1.2463, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.845955451348183e-05, |
| "loss": 1.2325, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.8456037514654165e-05, |
| "loss": 1.296, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.8452520515826498e-05, |
| "loss": 1.2115, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.844900351699883e-05, |
| "loss": 1.2071, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.8445486518171162e-05, |
| "loss": 1.208, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.8441969519343495e-05, |
| "loss": 1.2644, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.843845252051583e-05, |
| "loss": 1.2288, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.8434935521688162e-05, |
| "loss": 1.3065, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.8431418522860496e-05, |
| "loss": 1.2427, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.8427901524032826e-05, |
| "loss": 1.2271, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.842438452520516e-05, |
| "loss": 1.2964, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.8420867526377493e-05, |
| "loss": 1.2289, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.8417350527549823e-05, |
| "loss": 1.2448, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.8413833528722157e-05, |
| "loss": 1.2549, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.841031652989449e-05, |
| "loss": 1.2112, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.8406799531066824e-05, |
| "loss": 1.2396, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.8403282532239157e-05, |
| "loss": 1.2185, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.839976553341149e-05, |
| "loss": 1.275, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.8396248534583824e-05, |
| "loss": 1.2335, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.8392731535756155e-05, |
| "loss": 1.2339, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.8389214536928488e-05, |
| "loss": 1.2888, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.838569753810082e-05, |
| "loss": 1.2569, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.8382180539273155e-05, |
| "loss": 1.241, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.837866354044549e-05, |
| "loss": 1.205, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.8375146541617822e-05, |
| "loss": 1.2415, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.8371629542790152e-05, |
| "loss": 1.2244, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.8368112543962486e-05, |
| "loss": 1.2431, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.836459554513482e-05, |
| "loss": 1.2255, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.8361078546307153e-05, |
| "loss": 1.2609, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.8357561547479486e-05, |
| "loss": 1.2486, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.835404454865182e-05, |
| "loss": 1.221, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.835052754982415e-05, |
| "loss": 1.2345, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.8347010550996484e-05, |
| "loss": 1.2269, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.8343493552168817e-05, |
| "loss": 1.315, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.833997655334115e-05, |
| "loss": 1.2436, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.8336459554513484e-05, |
| "loss": 1.2165, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.8332942555685818e-05, |
| "loss": 1.1952, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.832942555685815e-05, |
| "loss": 1.2762, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.832590855803048e-05, |
| "loss": 1.2362, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.8322391559202815e-05, |
| "loss": 1.2126, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.831887456037515e-05, |
| "loss": 1.2915, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.8315357561547482e-05, |
| "loss": 1.2324, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.8311840562719816e-05, |
| "loss": 1.2356, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.830832356389215e-05, |
| "loss": 1.29, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.830480656506448e-05, |
| "loss": 1.2675, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.8301289566236813e-05, |
| "loss": 1.2509, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.8297772567409146e-05, |
| "loss": 1.1942, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.8294255568581476e-05, |
| "loss": 1.2482, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.829073856975381e-05, |
| "loss": 1.222, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.8287221570926143e-05, |
| "loss": 1.1825, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.8283704572098477e-05, |
| "loss": 1.2132, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.828018757327081e-05, |
| "loss": 1.2065, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.8276670574443144e-05, |
| "loss": 1.2542, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.8273153575615477e-05, |
| "loss": 1.2368, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.8269636576787808e-05, |
| "loss": 1.2427, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.826611957796014e-05, |
| "loss": 1.2293, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.8262602579132475e-05, |
| "loss": 1.2208, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.8259085580304808e-05, |
| "loss": 1.2099, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.8255568581477142e-05, |
| "loss": 1.2062, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.8252051582649475e-05, |
| "loss": 1.2628, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.8248534583821805e-05, |
| "loss": 1.2173, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.824501758499414e-05, |
| "loss": 1.2376, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.8241500586166472e-05, |
| "loss": 1.2251, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.8237983587338806e-05, |
| "loss": 1.2173, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.823446658851114e-05, |
| "loss": 1.2122, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.8230949589683473e-05, |
| "loss": 1.2551, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.8227432590855803e-05, |
| "loss": 1.1602, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.8223915592028137e-05, |
| "loss": 1.2016, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.822039859320047e-05, |
| "loss": 1.2688, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.8216881594372804e-05, |
| "loss": 1.2415, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.8213364595545137e-05, |
| "loss": 1.2547, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.820984759671747e-05, |
| "loss": 1.1908, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.8206330597889804e-05, |
| "loss": 1.2692, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.8202813599062134e-05, |
| "loss": 1.1732, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.8199296600234468e-05, |
| "loss": 1.1626, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.81957796014068e-05, |
| "loss": 1.2377, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.8192262602579135e-05, |
| "loss": 1.2539, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.818874560375147e-05, |
| "loss": 1.2281, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.8185228604923802e-05, |
| "loss": 1.2752, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.8181711606096132e-05, |
| "loss": 1.2484, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.8178194607268466e-05, |
| "loss": 1.2656, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.81746776084408e-05, |
| "loss": 1.2171, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.817116060961313e-05, |
| "loss": 1.2076, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.8167643610785463e-05, |
| "loss": 1.1949, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.8164126611957796e-05, |
| "loss": 1.2109, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.816060961313013e-05, |
| "loss": 1.2777, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.8157092614302463e-05, |
| "loss": 1.257, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.8153575615474797e-05, |
| "loss": 1.2477, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.815005861664713e-05, |
| "loss": 1.3005, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.814654161781946e-05, |
| "loss": 1.2617, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.8143024618991794e-05, |
| "loss": 1.192, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.8139507620164128e-05, |
| "loss": 1.2363, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.813599062133646e-05, |
| "loss": 1.2189, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.8132473622508795e-05, |
| "loss": 1.2423, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.8128956623681128e-05, |
| "loss": 1.2451, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.812543962485346e-05, |
| "loss": 1.2502, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.8121922626025792e-05, |
| "loss": 1.2545, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.8118405627198125e-05, |
| "loss": 1.2605, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.811488862837046e-05, |
| "loss": 1.2476, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.8111371629542793e-05, |
| "loss": 1.2166, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.8107854630715126e-05, |
| "loss": 1.2685, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.8104337631887456e-05, |
| "loss": 1.2278, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.810082063305979e-05, |
| "loss": 1.2384, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.8097303634232123e-05, |
| "loss": 1.2454, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.8093786635404457e-05, |
| "loss": 1.2411, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.809026963657679e-05, |
| "loss": 1.2019, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.8086752637749124e-05, |
| "loss": 1.2121, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.8083235638921457e-05, |
| "loss": 1.2337, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.8079718640093787e-05, |
| "loss": 1.2402, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.807620164126612e-05, |
| "loss": 1.2291, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.8072684642438455e-05, |
| "loss": 1.192, |
| "step": 601 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.8069167643610788e-05, |
| "loss": 1.2837, |
| "step": 602 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.806565064478312e-05, |
| "loss": 1.2488, |
| "step": 603 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.8062133645955455e-05, |
| "loss": 1.2531, |
| "step": 604 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.8058616647127785e-05, |
| "loss": 1.2352, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.805509964830012e-05, |
| "loss": 1.2934, |
| "step": 606 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.8051582649472452e-05, |
| "loss": 1.2085, |
| "step": 607 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.8048065650644782e-05, |
| "loss": 1.1819, |
| "step": 608 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.8044548651817116e-05, |
| "loss": 1.2495, |
| "step": 609 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.804103165298945e-05, |
| "loss": 1.2448, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.8037514654161783e-05, |
| "loss": 1.2192, |
| "step": 611 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.8033997655334116e-05, |
| "loss": 1.1966, |
| "step": 612 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.803048065650645e-05, |
| "loss": 1.2223, |
| "step": 613 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.8026963657678784e-05, |
| "loss": 1.268, |
| "step": 614 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.8023446658851114e-05, |
| "loss": 1.2605, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.8019929660023447e-05, |
| "loss": 1.2399, |
| "step": 616 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.801641266119578e-05, |
| "loss": 1.2502, |
| "step": 617 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.8012895662368114e-05, |
| "loss": 1.2445, |
| "step": 618 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.8009378663540448e-05, |
| "loss": 1.2567, |
| "step": 619 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.800586166471278e-05, |
| "loss": 1.2741, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.800234466588511e-05, |
| "loss": 1.1864, |
| "step": 621 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.7998827667057445e-05, |
| "loss": 1.2727, |
| "step": 622 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.799531066822978e-05, |
| "loss": 1.2955, |
| "step": 623 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.7991793669402112e-05, |
| "loss": 1.2457, |
| "step": 624 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.7988276670574446e-05, |
| "loss": 1.2304, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.798475967174678e-05, |
| "loss": 1.1873, |
| "step": 626 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.798124267291911e-05, |
| "loss": 1.2198, |
| "step": 627 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.7977725674091443e-05, |
| "loss": 1.2332, |
| "step": 628 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.7974208675263776e-05, |
| "loss": 1.2748, |
| "step": 629 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.797069167643611e-05, |
| "loss": 1.2596, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.7967174677608443e-05, |
| "loss": 1.1932, |
| "step": 631 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.7963657678780777e-05, |
| "loss": 1.2337, |
| "step": 632 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.796014067995311e-05, |
| "loss": 1.2026, |
| "step": 633 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.795662368112544e-05, |
| "loss": 1.2139, |
| "step": 634 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.7953106682297774e-05, |
| "loss": 1.212, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.7949589683470108e-05, |
| "loss": 1.211, |
| "step": 636 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.794607268464244e-05, |
| "loss": 1.1995, |
| "step": 637 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.7942555685814775e-05, |
| "loss": 1.2814, |
| "step": 638 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.7939038686987108e-05, |
| "loss": 1.2527, |
| "step": 639 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.7935521688159438e-05, |
| "loss": 1.2638, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.7932004689331772e-05, |
| "loss": 1.2279, |
| "step": 641 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.7928487690504105e-05, |
| "loss": 1.2442, |
| "step": 642 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.7924970691676435e-05, |
| "loss": 1.2455, |
| "step": 643 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.792145369284877e-05, |
| "loss": 1.2153, |
| "step": 644 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.7917936694021102e-05, |
| "loss": 1.2211, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.7914419695193436e-05, |
| "loss": 1.2745, |
| "step": 646 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.791090269636577e-05, |
| "loss": 1.2477, |
| "step": 647 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.7907385697538103e-05, |
| "loss": 1.2115, |
| "step": 648 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.7903868698710437e-05, |
| "loss": 1.2344, |
| "step": 649 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.7900351699882767e-05, |
| "loss": 1.2249, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.78968347010551e-05, |
| "loss": 1.2709, |
| "step": 651 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.7893317702227434e-05, |
| "loss": 1.2584, |
| "step": 652 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.7889800703399767e-05, |
| "loss": 1.2109, |
| "step": 653 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.78862837045721e-05, |
| "loss": 1.2237, |
| "step": 654 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.7882766705744434e-05, |
| "loss": 1.2189, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.7879249706916764e-05, |
| "loss": 1.2217, |
| "step": 656 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.7875732708089098e-05, |
| "loss": 1.1956, |
| "step": 657 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.787221570926143e-05, |
| "loss": 1.2118, |
| "step": 658 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.7868698710433765e-05, |
| "loss": 1.1965, |
| "step": 659 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.78651817116061e-05, |
| "loss": 1.2348, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.7861664712778432e-05, |
| "loss": 1.2818, |
| "step": 661 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.7858147713950762e-05, |
| "loss": 1.211, |
| "step": 662 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.7854630715123096e-05, |
| "loss": 1.186, |
| "step": 663 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.785111371629543e-05, |
| "loss": 1.2011, |
| "step": 664 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.7847596717467763e-05, |
| "loss": 1.2441, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.7844079718640096e-05, |
| "loss": 1.2067, |
| "step": 666 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.784056271981243e-05, |
| "loss": 1.2504, |
| "step": 667 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.7837045720984763e-05, |
| "loss": 1.2821, |
| "step": 668 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.7833528722157094e-05, |
| "loss": 1.207, |
| "step": 669 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.7830011723329427e-05, |
| "loss": 1.2287, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.782649472450176e-05, |
| "loss": 1.2221, |
| "step": 671 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.7822977725674094e-05, |
| "loss": 1.2432, |
| "step": 672 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.7819460726846428e-05, |
| "loss": 1.2513, |
| "step": 673 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.781594372801876e-05, |
| "loss": 1.2423, |
| "step": 674 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.781242672919109e-05, |
| "loss": 1.2265, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.7808909730363425e-05, |
| "loss": 1.1916, |
| "step": 676 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.780539273153576e-05, |
| "loss": 1.2143, |
| "step": 677 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.780187573270809e-05, |
| "loss": 1.2345, |
| "step": 678 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.7798358733880422e-05, |
| "loss": 1.2405, |
| "step": 679 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.7794841735052755e-05, |
| "loss": 1.24, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.779132473622509e-05, |
| "loss": 1.2473, |
| "step": 681 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.7787807737397423e-05, |
| "loss": 1.2167, |
| "step": 682 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.7784290738569756e-05, |
| "loss": 1.2578, |
| "step": 683 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.778077373974209e-05, |
| "loss": 1.1861, |
| "step": 684 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.777725674091442e-05, |
| "loss": 1.2148, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.7773739742086753e-05, |
| "loss": 1.2427, |
| "step": 686 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.7770222743259087e-05, |
| "loss": 1.2382, |
| "step": 687 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.776670574443142e-05, |
| "loss": 1.2037, |
| "step": 688 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.7763188745603754e-05, |
| "loss": 1.2089, |
| "step": 689 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.7759671746776087e-05, |
| "loss": 1.1841, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.7756154747948417e-05, |
| "loss": 1.1952, |
| "step": 691 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.775263774912075e-05, |
| "loss": 1.2935, |
| "step": 692 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.7749120750293085e-05, |
| "loss": 1.2569, |
| "step": 693 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.7745603751465418e-05, |
| "loss": 1.2097, |
| "step": 694 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.774208675263775e-05, |
| "loss": 1.2585, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.7738569753810085e-05, |
| "loss": 1.2303, |
| "step": 696 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.7735052754982415e-05, |
| "loss": 1.2454, |
| "step": 697 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.773153575615475e-05, |
| "loss": 1.2296, |
| "step": 698 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.7728018757327082e-05, |
| "loss": 1.2827, |
| "step": 699 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.7724501758499416e-05, |
| "loss": 1.2717, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.772098475967175e-05, |
| "loss": 1.2351, |
| "step": 701 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.7717467760844083e-05, |
| "loss": 1.2394, |
| "step": 702 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.7713950762016416e-05, |
| "loss": 1.2127, |
| "step": 703 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.7710433763188747e-05, |
| "loss": 1.2397, |
| "step": 704 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.770691676436108e-05, |
| "loss": 1.2192, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.7703399765533414e-05, |
| "loss": 1.1646, |
| "step": 706 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.7699882766705747e-05, |
| "loss": 1.2378, |
| "step": 707 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.769636576787808e-05, |
| "loss": 1.2601, |
| "step": 708 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.7692848769050414e-05, |
| "loss": 1.2738, |
| "step": 709 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.7689331770222744e-05, |
| "loss": 1.1934, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.7685814771395078e-05, |
| "loss": 1.2263, |
| "step": 711 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.768229777256741e-05, |
| "loss": 1.2583, |
| "step": 712 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.767878077373974e-05, |
| "loss": 1.2254, |
| "step": 713 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.7675263774912075e-05, |
| "loss": 1.2206, |
| "step": 714 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.767174677608441e-05, |
| "loss": 1.2699, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.7668229777256742e-05, |
| "loss": 1.2783, |
| "step": 716 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.7664712778429076e-05, |
| "loss": 1.2326, |
| "step": 717 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.766119577960141e-05, |
| "loss": 1.2422, |
| "step": 718 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.765767878077374e-05, |
| "loss": 1.2516, |
| "step": 719 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.7654161781946073e-05, |
| "loss": 1.2351, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.7650644783118406e-05, |
| "loss": 1.2112, |
| "step": 721 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.764712778429074e-05, |
| "loss": 1.2395, |
| "step": 722 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.7643610785463073e-05, |
| "loss": 1.2236, |
| "step": 723 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.7640093786635407e-05, |
| "loss": 1.2018, |
| "step": 724 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.763657678780774e-05, |
| "loss": 1.2212, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.763305978898007e-05, |
| "loss": 1.2594, |
| "step": 726 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.7629542790152404e-05, |
| "loss": 1.2411, |
| "step": 727 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.7626025791324738e-05, |
| "loss": 1.2374, |
| "step": 728 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.762250879249707e-05, |
| "loss": 1.2619, |
| "step": 729 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.7618991793669405e-05, |
| "loss": 1.2418, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.7615474794841738e-05, |
| "loss": 1.1731, |
| "step": 731 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.7611957796014068e-05, |
| "loss": 1.2345, |
| "step": 732 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.7608440797186402e-05, |
| "loss": 1.2609, |
| "step": 733 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.7604923798358735e-05, |
| "loss": 1.2324, |
| "step": 734 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.760140679953107e-05, |
| "loss": 1.2019, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.7597889800703402e-05, |
| "loss": 1.2101, |
| "step": 736 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.7594372801875736e-05, |
| "loss": 1.2622, |
| "step": 737 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.759085580304807e-05, |
| "loss": 1.2468, |
| "step": 738 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.75873388042204e-05, |
| "loss": 1.2087, |
| "step": 739 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.7583821805392733e-05, |
| "loss": 1.1822, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.7580304806565067e-05, |
| "loss": 1.2544, |
| "step": 741 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.75767878077374e-05, |
| "loss": 1.2456, |
| "step": 742 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.7573270808909734e-05, |
| "loss": 1.2101, |
| "step": 743 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.7569753810082067e-05, |
| "loss": 1.2187, |
| "step": 744 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.7566236811254397e-05, |
| "loss": 1.1712, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.756271981242673e-05, |
| "loss": 1.2286, |
| "step": 746 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.7559202813599064e-05, |
| "loss": 1.2493, |
| "step": 747 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.7555685814771394e-05, |
| "loss": 1.2653, |
| "step": 748 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.7552168815943728e-05, |
| "loss": 1.2754, |
| "step": 749 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.754865181711606e-05, |
| "loss": 1.2786, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.7545134818288395e-05, |
| "loss": 1.2543, |
| "step": 751 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.754161781946073e-05, |
| "loss": 1.255, |
| "step": 752 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.7538100820633062e-05, |
| "loss": 1.2517, |
| "step": 753 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.7534583821805392e-05, |
| "loss": 1.2621, |
| "step": 754 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.7531066822977726e-05, |
| "loss": 1.2596, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.752754982415006e-05, |
| "loss": 1.2099, |
| "step": 756 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.7524032825322393e-05, |
| "loss": 1.2378, |
| "step": 757 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.7520515826494726e-05, |
| "loss": 1.2029, |
| "step": 758 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.751699882766706e-05, |
| "loss": 1.2686, |
| "step": 759 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.7513481828839393e-05, |
| "loss": 1.1735, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.7509964830011724e-05, |
| "loss": 1.2531, |
| "step": 761 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.7506447831184057e-05, |
| "loss": 1.2284, |
| "step": 762 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.750293083235639e-05, |
| "loss": 1.2207, |
| "step": 763 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.7499413833528724e-05, |
| "loss": 1.2462, |
| "step": 764 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.7495896834701058e-05, |
| "loss": 1.2542, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.749237983587339e-05, |
| "loss": 1.1681, |
| "step": 766 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.748886283704572e-05, |
| "loss": 1.2399, |
| "step": 767 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.7485345838218055e-05, |
| "loss": 1.2274, |
| "step": 768 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.748182883939039e-05, |
| "loss": 1.1959, |
| "step": 769 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.7478311840562722e-05, |
| "loss": 1.2274, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.7474794841735055e-05, |
| "loss": 1.2078, |
| "step": 771 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.747127784290739e-05, |
| "loss": 1.1842, |
| "step": 772 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.7467760844079722e-05, |
| "loss": 1.1887, |
| "step": 773 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.7464243845252053e-05, |
| "loss": 1.2633, |
| "step": 774 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.7460726846424386e-05, |
| "loss": 1.2388, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.745720984759672e-05, |
| "loss": 1.2259, |
| "step": 776 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.7453692848769053e-05, |
| "loss": 1.2455, |
| "step": 777 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.7450175849941387e-05, |
| "loss": 1.2247, |
| "step": 778 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.744665885111372e-05, |
| "loss": 1.2343, |
| "step": 779 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.744314185228605e-05, |
| "loss": 1.1888, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.7439624853458384e-05, |
| "loss": 1.2337, |
| "step": 781 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.7436107854630717e-05, |
| "loss": 1.2296, |
| "step": 782 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.7432590855803048e-05, |
| "loss": 1.1954, |
| "step": 783 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.742907385697538e-05, |
| "loss": 1.2417, |
| "step": 784 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.7425556858147715e-05, |
| "loss": 1.2881, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.7422039859320048e-05, |
| "loss": 1.2129, |
| "step": 786 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.741852286049238e-05, |
| "loss": 1.2015, |
| "step": 787 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.7415005861664715e-05, |
| "loss": 1.1959, |
| "step": 788 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.7411488862837045e-05, |
| "loss": 1.2211, |
| "step": 789 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.740797186400938e-05, |
| "loss": 1.2806, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.7404454865181712e-05, |
| "loss": 1.1938, |
| "step": 791 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.7400937866354046e-05, |
| "loss": 1.2055, |
| "step": 792 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.739742086752638e-05, |
| "loss": 1.2078, |
| "step": 793 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.7393903868698713e-05, |
| "loss": 1.2384, |
| "step": 794 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.7390386869871046e-05, |
| "loss": 1.2032, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.7386869871043377e-05, |
| "loss": 1.2282, |
| "step": 796 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.738335287221571e-05, |
| "loss": 1.2322, |
| "step": 797 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.7379835873388044e-05, |
| "loss": 1.2009, |
| "step": 798 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.7376318874560377e-05, |
| "loss": 1.2523, |
| "step": 799 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.737280187573271e-05, |
| "loss": 1.2445, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.7369284876905044e-05, |
| "loss": 1.2236, |
| "step": 801 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.7365767878077374e-05, |
| "loss": 1.2332, |
| "step": 802 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.7362250879249708e-05, |
| "loss": 1.232, |
| "step": 803 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.735873388042204e-05, |
| "loss": 1.2675, |
| "step": 804 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.7355216881594375e-05, |
| "loss": 1.2252, |
| "step": 805 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.735169988276671e-05, |
| "loss": 1.2194, |
| "step": 806 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.7348182883939042e-05, |
| "loss": 1.2355, |
| "step": 807 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.7344665885111372e-05, |
| "loss": 1.2025, |
| "step": 808 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.7341148886283706e-05, |
| "loss": 1.2816, |
| "step": 809 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.733763188745604e-05, |
| "loss": 1.1988, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.7334114888628373e-05, |
| "loss": 1.2066, |
| "step": 811 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.7330597889800706e-05, |
| "loss": 1.1953, |
| "step": 812 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.732708089097304e-05, |
| "loss": 1.2049, |
| "step": 813 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.7323563892145373e-05, |
| "loss": 1.2298, |
| "step": 814 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.7320046893317703e-05, |
| "loss": 1.1559, |
| "step": 815 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.7316529894490037e-05, |
| "loss": 1.1855, |
| "step": 816 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.731301289566237e-05, |
| "loss": 1.2667, |
| "step": 817 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.73094958968347e-05, |
| "loss": 1.2067, |
| "step": 818 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.7305978898007034e-05, |
| "loss": 1.2441, |
| "step": 819 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.7302461899179368e-05, |
| "loss": 1.2182, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.72989449003517e-05, |
| "loss": 1.241, |
| "step": 821 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.7295427901524035e-05, |
| "loss": 1.214, |
| "step": 822 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.7291910902696368e-05, |
| "loss": 1.2641, |
| "step": 823 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.7288393903868698e-05, |
| "loss": 1.2176, |
| "step": 824 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.7284876905041032e-05, |
| "loss": 1.2682, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.7281359906213365e-05, |
| "loss": 1.2761, |
| "step": 826 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.72778429073857e-05, |
| "loss": 1.2357, |
| "step": 827 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.7274325908558032e-05, |
| "loss": 1.1662, |
| "step": 828 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.7270808909730366e-05, |
| "loss": 1.2373, |
| "step": 829 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.72672919109027e-05, |
| "loss": 1.2073, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.726377491207503e-05, |
| "loss": 1.2382, |
| "step": 831 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.7260257913247363e-05, |
| "loss": 1.2274, |
| "step": 832 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.7256740914419697e-05, |
| "loss": 1.2585, |
| "step": 833 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.7253223915592027e-05, |
| "loss": 1.1934, |
| "step": 834 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.724970691676436e-05, |
| "loss": 1.2315, |
| "step": 835 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.7246189917936694e-05, |
| "loss": 1.2209, |
| "step": 836 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.7242672919109027e-05, |
| "loss": 1.2044, |
| "step": 837 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.723915592028136e-05, |
| "loss": 1.187, |
| "step": 838 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.7235638921453694e-05, |
| "loss": 1.177, |
| "step": 839 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.7232121922626025e-05, |
| "loss": 1.2788, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.7228604923798358e-05, |
| "loss": 1.2469, |
| "step": 841 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.722508792497069e-05, |
| "loss": 1.2583, |
| "step": 842 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.7221570926143025e-05, |
| "loss": 1.2793, |
| "step": 843 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.721805392731536e-05, |
| "loss": 1.2584, |
| "step": 844 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.7214536928487692e-05, |
| "loss": 1.2586, |
| "step": 845 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.7211019929660026e-05, |
| "loss": 1.2582, |
| "step": 846 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.7207502930832356e-05, |
| "loss": 1.2139, |
| "step": 847 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.720398593200469e-05, |
| "loss": 1.2106, |
| "step": 848 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.7200468933177023e-05, |
| "loss": 1.2584, |
| "step": 849 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.7196951934349356e-05, |
| "loss": 1.2691, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.719343493552169e-05, |
| "loss": 1.2351, |
| "step": 851 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.7189917936694023e-05, |
| "loss": 1.2683, |
| "step": 852 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.7186400937866354e-05, |
| "loss": 1.2158, |
| "step": 853 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.7182883939038687e-05, |
| "loss": 1.2656, |
| "step": 854 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.717936694021102e-05, |
| "loss": 1.1984, |
| "step": 855 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.7175849941383354e-05, |
| "loss": 1.2199, |
| "step": 856 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.7172332942555688e-05, |
| "loss": 1.2354, |
| "step": 857 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.716881594372802e-05, |
| "loss": 1.1896, |
| "step": 858 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.716529894490035e-05, |
| "loss": 1.2445, |
| "step": 859 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.7161781946072685e-05, |
| "loss": 1.2579, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.715826494724502e-05, |
| "loss": 1.2018, |
| "step": 861 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.7154747948417352e-05, |
| "loss": 1.2197, |
| "step": 862 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.7151230949589685e-05, |
| "loss": 1.2314, |
| "step": 863 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.714771395076202e-05, |
| "loss": 1.2651, |
| "step": 864 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.7144196951934352e-05, |
| "loss": 1.2571, |
| "step": 865 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.7140679953106683e-05, |
| "loss": 1.2455, |
| "step": 866 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.7137162954279016e-05, |
| "loss": 1.2112, |
| "step": 867 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.713364595545135e-05, |
| "loss": 1.2315, |
| "step": 868 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.713012895662368e-05, |
| "loss": 1.2485, |
| "step": 869 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.7126611957796013e-05, |
| "loss": 1.2644, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.7123094958968347e-05, |
| "loss": 1.2575, |
| "step": 871 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.711957796014068e-05, |
| "loss": 1.2346, |
| "step": 872 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.7116060961313014e-05, |
| "loss": 1.2422, |
| "step": 873 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.7112543962485347e-05, |
| "loss": 1.3049, |
| "step": 874 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.7109026963657678e-05, |
| "loss": 1.242, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.710550996483001e-05, |
| "loss": 1.2649, |
| "step": 876 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.7101992966002345e-05, |
| "loss": 1.1927, |
| "step": 877 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.7098475967174678e-05, |
| "loss": 1.2343, |
| "step": 878 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.709495896834701e-05, |
| "loss": 1.2425, |
| "step": 879 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.7091441969519345e-05, |
| "loss": 1.2489, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.708792497069168e-05, |
| "loss": 1.2252, |
| "step": 881 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.708440797186401e-05, |
| "loss": 1.2066, |
| "step": 882 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.7080890973036342e-05, |
| "loss": 1.2299, |
| "step": 883 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.7077373974208676e-05, |
| "loss": 1.1831, |
| "step": 884 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.707385697538101e-05, |
| "loss": 1.1892, |
| "step": 885 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.7070339976553343e-05, |
| "loss": 1.193, |
| "step": 886 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.7066822977725676e-05, |
| "loss": 1.2375, |
| "step": 887 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.7063305978898007e-05, |
| "loss": 1.2058, |
| "step": 888 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.705978898007034e-05, |
| "loss": 1.2276, |
| "step": 889 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.7056271981242674e-05, |
| "loss": 1.233, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.7052754982415007e-05, |
| "loss": 1.2335, |
| "step": 891 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.704923798358734e-05, |
| "loss": 1.2326, |
| "step": 892 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.7045720984759674e-05, |
| "loss": 1.2496, |
| "step": 893 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.7042203985932004e-05, |
| "loss": 1.2282, |
| "step": 894 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.7038686987104338e-05, |
| "loss": 1.2639, |
| "step": 895 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.703516998827667e-05, |
| "loss": 1.2308, |
| "step": 896 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.7031652989449005e-05, |
| "loss": 1.2324, |
| "step": 897 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.702813599062134e-05, |
| "loss": 1.254, |
| "step": 898 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.7024618991793672e-05, |
| "loss": 1.1891, |
| "step": 899 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.7021101992966005e-05, |
| "loss": 1.2352, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.7017584994138336e-05, |
| "loss": 1.2331, |
| "step": 901 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.701406799531067e-05, |
| "loss": 1.2237, |
| "step": 902 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.7010550996483003e-05, |
| "loss": 1.226, |
| "step": 903 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.7007033997655333e-05, |
| "loss": 1.2707, |
| "step": 904 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.7003516998827666e-05, |
| "loss": 1.2357, |
| "step": 905 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.7e-05, |
| "loss": 1.2435, |
| "step": 906 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.6996483001172333e-05, |
| "loss": 1.2302, |
| "step": 907 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.6992966002344667e-05, |
| "loss": 1.2818, |
| "step": 908 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.6989449003517e-05, |
| "loss": 1.2869, |
| "step": 909 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.698593200468933e-05, |
| "loss": 1.2461, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.6982415005861664e-05, |
| "loss": 1.2377, |
| "step": 911 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.6978898007033998e-05, |
| "loss": 1.2034, |
| "step": 912 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.697538100820633e-05, |
| "loss": 1.2708, |
| "step": 913 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.6971864009378665e-05, |
| "loss": 1.234, |
| "step": 914 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.6968347010550998e-05, |
| "loss": 1.2279, |
| "step": 915 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.6964830011723332e-05, |
| "loss": 1.2383, |
| "step": 916 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.6961313012895662e-05, |
| "loss": 1.18, |
| "step": 917 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.6957796014067995e-05, |
| "loss": 1.2041, |
| "step": 918 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.695427901524033e-05, |
| "loss": 1.2745, |
| "step": 919 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.6950762016412662e-05, |
| "loss": 1.2055, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.6947245017584996e-05, |
| "loss": 1.2346, |
| "step": 921 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.694372801875733e-05, |
| "loss": 1.221, |
| "step": 922 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.694021101992966e-05, |
| "loss": 1.2078, |
| "step": 923 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.6936694021101993e-05, |
| "loss": 1.2249, |
| "step": 924 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.6933177022274327e-05, |
| "loss": 1.2538, |
| "step": 925 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.692966002344666e-05, |
| "loss": 1.224, |
| "step": 926 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.6926143024618994e-05, |
| "loss": 1.1912, |
| "step": 927 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.6922626025791327e-05, |
| "loss": 1.2678, |
| "step": 928 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.6919109026963657e-05, |
| "loss": 1.2229, |
| "step": 929 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.691559202813599e-05, |
| "loss": 1.2504, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.6912075029308324e-05, |
| "loss": 1.1941, |
| "step": 931 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.6908558030480655e-05, |
| "loss": 1.2699, |
| "step": 932 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.690504103165299e-05, |
| "loss": 1.1976, |
| "step": 933 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.6901524032825325e-05, |
| "loss": 1.2477, |
| "step": 934 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.689800703399766e-05, |
| "loss": 1.2239, |
| "step": 935 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.689449003516999e-05, |
| "loss": 1.2252, |
| "step": 936 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.6890973036342322e-05, |
| "loss": 1.2528, |
| "step": 937 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.6887456037514656e-05, |
| "loss": 1.2248, |
| "step": 938 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.6883939038686986e-05, |
| "loss": 1.1717, |
| "step": 939 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.688042203985932e-05, |
| "loss": 1.2232, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.6876905041031653e-05, |
| "loss": 1.2437, |
| "step": 941 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.6873388042203986e-05, |
| "loss": 1.2202, |
| "step": 942 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.686987104337632e-05, |
| "loss": 1.2124, |
| "step": 943 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.6866354044548653e-05, |
| "loss": 1.1595, |
| "step": 944 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.6862837045720984e-05, |
| "loss": 1.2464, |
| "step": 945 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.6859320046893317e-05, |
| "loss": 1.2345, |
| "step": 946 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.685580304806565e-05, |
| "loss": 1.2488, |
| "step": 947 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.6852286049237984e-05, |
| "loss": 1.198, |
| "step": 948 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.6848769050410318e-05, |
| "loss": 1.2446, |
| "step": 949 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.684525205158265e-05, |
| "loss": 1.1555, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.6841735052754985e-05, |
| "loss": 1.2188, |
| "step": 951 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.6838218053927315e-05, |
| "loss": 1.2191, |
| "step": 952 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.683470105509965e-05, |
| "loss": 1.258, |
| "step": 953 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.6831184056271982e-05, |
| "loss": 1.2403, |
| "step": 954 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.6827667057444315e-05, |
| "loss": 1.1876, |
| "step": 955 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.682415005861665e-05, |
| "loss": 1.2353, |
| "step": 956 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.6820633059788983e-05, |
| "loss": 1.24, |
| "step": 957 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.6817116060961313e-05, |
| "loss": 1.22, |
| "step": 958 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.6813599062133646e-05, |
| "loss": 1.1702, |
| "step": 959 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.681008206330598e-05, |
| "loss": 1.2506, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.6806565064478313e-05, |
| "loss": 1.2703, |
| "step": 961 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.6803048065650647e-05, |
| "loss": 1.2122, |
| "step": 962 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.679953106682298e-05, |
| "loss": 1.2401, |
| "step": 963 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.679601406799531e-05, |
| "loss": 1.2534, |
| "step": 964 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.6792497069167644e-05, |
| "loss": 1.1836, |
| "step": 965 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.6788980070339977e-05, |
| "loss": 1.2292, |
| "step": 966 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.6785463071512308e-05, |
| "loss": 1.2939, |
| "step": 967 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.6781946072684644e-05, |
| "loss": 1.2228, |
| "step": 968 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.6778429073856978e-05, |
| "loss": 1.2332, |
| "step": 969 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.677491207502931e-05, |
| "loss": 1.2071, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.677139507620164e-05, |
| "loss": 1.2094, |
| "step": 971 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.6767878077373975e-05, |
| "loss": 1.2221, |
| "step": 972 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.676436107854631e-05, |
| "loss": 1.2475, |
| "step": 973 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.676084407971864e-05, |
| "loss": 1.1795, |
| "step": 974 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.6757327080890972e-05, |
| "loss": 1.2568, |
| "step": 975 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.6753810082063306e-05, |
| "loss": 1.258, |
| "step": 976 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.675029308323564e-05, |
| "loss": 1.2449, |
| "step": 977 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.6746776084407973e-05, |
| "loss": 1.2751, |
| "step": 978 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.6743259085580306e-05, |
| "loss": 1.2129, |
| "step": 979 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.6739742086752637e-05, |
| "loss": 1.2767, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.673622508792497e-05, |
| "loss": 1.2405, |
| "step": 981 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.6732708089097304e-05, |
| "loss": 1.2269, |
| "step": 982 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.6729191090269637e-05, |
| "loss": 1.2185, |
| "step": 983 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.672567409144197e-05, |
| "loss": 1.213, |
| "step": 984 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.6722157092614304e-05, |
| "loss": 1.241, |
| "step": 985 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.6718640093786638e-05, |
| "loss": 1.192, |
| "step": 986 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.6715123094958968e-05, |
| "loss": 1.1963, |
| "step": 987 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.67116060961313e-05, |
| "loss": 1.2643, |
| "step": 988 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.6708089097303635e-05, |
| "loss": 1.1732, |
| "step": 989 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.670457209847597e-05, |
| "loss": 1.2412, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.6701055099648302e-05, |
| "loss": 1.2445, |
| "step": 991 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.6697538100820636e-05, |
| "loss": 1.2235, |
| "step": 992 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.6694021101992966e-05, |
| "loss": 1.2661, |
| "step": 993 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.66905041031653e-05, |
| "loss": 1.2322, |
| "step": 994 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.6686987104337633e-05, |
| "loss": 1.2718, |
| "step": 995 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.6683470105509966e-05, |
| "loss": 1.266, |
| "step": 996 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.66799531066823e-05, |
| "loss": 1.2123, |
| "step": 997 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.6676436107854633e-05, |
| "loss": 1.2409, |
| "step": 998 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.6672919109026963e-05, |
| "loss": 1.2367, |
| "step": 999 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.6669402110199297e-05, |
| "loss": 1.1988, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.666588511137163e-05, |
| "loss": 1.2741, |
| "step": 1001 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.666236811254396e-05, |
| "loss": 1.2287, |
| "step": 1002 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.6658851113716298e-05, |
| "loss": 1.2512, |
| "step": 1003 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.665533411488863e-05, |
| "loss": 1.2263, |
| "step": 1004 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.6651817116060965e-05, |
| "loss": 1.2249, |
| "step": 1005 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.6648300117233295e-05, |
| "loss": 1.2559, |
| "step": 1006 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.6644783118405628e-05, |
| "loss": 1.2055, |
| "step": 1007 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.6641266119577962e-05, |
| "loss": 1.262, |
| "step": 1008 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.6637749120750292e-05, |
| "loss": 1.2156, |
| "step": 1009 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.6634232121922625e-05, |
| "loss": 1.2881, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.663071512309496e-05, |
| "loss": 1.2044, |
| "step": 1011 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.6627198124267292e-05, |
| "loss": 1.2553, |
| "step": 1012 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.6623681125439626e-05, |
| "loss": 1.1956, |
| "step": 1013 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.662016412661196e-05, |
| "loss": 1.2455, |
| "step": 1014 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.661664712778429e-05, |
| "loss": 1.2303, |
| "step": 1015 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.6613130128956623e-05, |
| "loss": 1.1659, |
| "step": 1016 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.6609613130128957e-05, |
| "loss": 1.2483, |
| "step": 1017 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.660609613130129e-05, |
| "loss": 1.2604, |
| "step": 1018 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.6602579132473624e-05, |
| "loss": 1.1778, |
| "step": 1019 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.6599062133645957e-05, |
| "loss": 1.2196, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.6595545134818287e-05, |
| "loss": 1.2368, |
| "step": 1021 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.659202813599062e-05, |
| "loss": 1.2605, |
| "step": 1022 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.6588511137162954e-05, |
| "loss": 1.2068, |
| "step": 1023 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.6584994138335288e-05, |
| "loss": 1.2491, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.658147713950762e-05, |
| "loss": 1.2746, |
| "step": 1025 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.6577960140679955e-05, |
| "loss": 1.244, |
| "step": 1026 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.657444314185229e-05, |
| "loss": 1.2401, |
| "step": 1027 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.657092614302462e-05, |
| "loss": 1.2352, |
| "step": 1028 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.6567409144196952e-05, |
| "loss": 1.2439, |
| "step": 1029 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.6563892145369286e-05, |
| "loss": 1.2281, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.656037514654162e-05, |
| "loss": 1.2215, |
| "step": 1031 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.6556858147713953e-05, |
| "loss": 1.198, |
| "step": 1032 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.6553341148886286e-05, |
| "loss": 1.2122, |
| "step": 1033 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.6549824150058616e-05, |
| "loss": 1.2016, |
| "step": 1034 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.654630715123095e-05, |
| "loss": 1.232, |
| "step": 1035 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.6542790152403283e-05, |
| "loss": 1.2464, |
| "step": 1036 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.6539273153575614e-05, |
| "loss": 1.2095, |
| "step": 1037 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.6535756154747947e-05, |
| "loss": 1.2248, |
| "step": 1038 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.6532239155920284e-05, |
| "loss": 1.2665, |
| "step": 1039 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.6528722157092618e-05, |
| "loss": 1.2255, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.6525205158264948e-05, |
| "loss": 1.2349, |
| "step": 1041 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.652168815943728e-05, |
| "loss": 1.1908, |
| "step": 1042 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.6518171160609615e-05, |
| "loss": 1.2259, |
| "step": 1043 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.6514654161781945e-05, |
| "loss": 1.2231, |
| "step": 1044 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.651113716295428e-05, |
| "loss": 1.2542, |
| "step": 1045 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.6507620164126612e-05, |
| "loss": 1.2049, |
| "step": 1046 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.6504103165298945e-05, |
| "loss": 1.2247, |
| "step": 1047 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.650058616647128e-05, |
| "loss": 1.216, |
| "step": 1048 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.6497069167643613e-05, |
| "loss": 1.2658, |
| "step": 1049 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.6493552168815943e-05, |
| "loss": 1.2034, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.6490035169988276e-05, |
| "loss": 1.2629, |
| "step": 1051 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.648651817116061e-05, |
| "loss": 1.2326, |
| "step": 1052 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.6483001172332943e-05, |
| "loss": 1.2205, |
| "step": 1053 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.6479484173505277e-05, |
| "loss": 1.2312, |
| "step": 1054 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.647596717467761e-05, |
| "loss": 1.2595, |
| "step": 1055 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.647245017584994e-05, |
| "loss": 1.2246, |
| "step": 1056 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.6468933177022274e-05, |
| "loss": 1.222, |
| "step": 1057 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.6465416178194607e-05, |
| "loss": 1.2485, |
| "step": 1058 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.646189917936694e-05, |
| "loss": 1.2199, |
| "step": 1059 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.6458382180539275e-05, |
| "loss": 1.2202, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.6454865181711608e-05, |
| "loss": 1.2504, |
| "step": 1061 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.645134818288394e-05, |
| "loss": 1.2358, |
| "step": 1062 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.6447831184056272e-05, |
| "loss": 1.2815, |
| "step": 1063 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.6444314185228605e-05, |
| "loss": 1.2333, |
| "step": 1064 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.644079718640094e-05, |
| "loss": 1.2197, |
| "step": 1065 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.6437280187573272e-05, |
| "loss": 1.2426, |
| "step": 1066 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.6433763188745606e-05, |
| "loss": 1.1726, |
| "step": 1067 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.643024618991794e-05, |
| "loss": 1.2121, |
| "step": 1068 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.642672919109027e-05, |
| "loss": 1.2481, |
| "step": 1069 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.6423212192262603e-05, |
| "loss": 1.2059, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.6419695193434937e-05, |
| "loss": 1.2021, |
| "step": 1071 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.6416178194607267e-05, |
| "loss": 1.2403, |
| "step": 1072 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.64126611957796e-05, |
| "loss": 1.2439, |
| "step": 1073 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.6409144196951937e-05, |
| "loss": 1.2033, |
| "step": 1074 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.640562719812427e-05, |
| "loss": 1.2458, |
| "step": 1075 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.64021101992966e-05, |
| "loss": 1.2772, |
| "step": 1076 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.6398593200468934e-05, |
| "loss": 1.245, |
| "step": 1077 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.6395076201641268e-05, |
| "loss": 1.2211, |
| "step": 1078 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.6391559202813598e-05, |
| "loss": 1.2307, |
| "step": 1079 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.638804220398593e-05, |
| "loss": 1.2453, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.6384525205158265e-05, |
| "loss": 1.2306, |
| "step": 1081 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.63810082063306e-05, |
| "loss": 1.2008, |
| "step": 1082 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.6377491207502932e-05, |
| "loss": 1.2114, |
| "step": 1083 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.6373974208675266e-05, |
| "loss": 1.2651, |
| "step": 1084 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.6370457209847596e-05, |
| "loss": 1.2106, |
| "step": 1085 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.636694021101993e-05, |
| "loss": 1.1907, |
| "step": 1086 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.6363423212192263e-05, |
| "loss": 1.2141, |
| "step": 1087 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.6359906213364596e-05, |
| "loss": 1.2011, |
| "step": 1088 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.635638921453693e-05, |
| "loss": 1.2121, |
| "step": 1089 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.6352872215709263e-05, |
| "loss": 1.2389, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.6349355216881593e-05, |
| "loss": 1.2557, |
| "step": 1091 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.6345838218053927e-05, |
| "loss": 1.2157, |
| "step": 1092 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.634232121922626e-05, |
| "loss": 1.239, |
| "step": 1093 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.6338804220398594e-05, |
| "loss": 1.2122, |
| "step": 1094 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.6335287221570928e-05, |
| "loss": 1.1996, |
| "step": 1095 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.633177022274326e-05, |
| "loss": 1.2346, |
| "step": 1096 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.6328253223915595e-05, |
| "loss": 1.2145, |
| "step": 1097 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.6324736225087925e-05, |
| "loss": 1.2174, |
| "step": 1098 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.6321219226260258e-05, |
| "loss": 1.2082, |
| "step": 1099 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.6317702227432592e-05, |
| "loss": 1.2199, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.6314185228604925e-05, |
| "loss": 1.2299, |
| "step": 1101 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.631066822977726e-05, |
| "loss": 1.191, |
| "step": 1102 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.6307151230949592e-05, |
| "loss": 1.2279, |
| "step": 1103 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.6303634232121922e-05, |
| "loss": 1.1994, |
| "step": 1104 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.6300117233294256e-05, |
| "loss": 1.2622, |
| "step": 1105 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.629660023446659e-05, |
| "loss": 1.2549, |
| "step": 1106 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.629308323563892e-05, |
| "loss": 1.2179, |
| "step": 1107 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.6289566236811253e-05, |
| "loss": 1.2341, |
| "step": 1108 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.6286049237983587e-05, |
| "loss": 1.2697, |
| "step": 1109 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.628253223915592e-05, |
| "loss": 1.2299, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.6279015240328254e-05, |
| "loss": 1.2508, |
| "step": 1111 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.6275498241500587e-05, |
| "loss": 1.2275, |
| "step": 1112 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.627198124267292e-05, |
| "loss": 1.2379, |
| "step": 1113 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.626846424384525e-05, |
| "loss": 1.2442, |
| "step": 1114 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.6264947245017584e-05, |
| "loss": 1.2146, |
| "step": 1115 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.6261430246189918e-05, |
| "loss": 1.2144, |
| "step": 1116 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.625791324736225e-05, |
| "loss": 1.257, |
| "step": 1117 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.6254396248534585e-05, |
| "loss": 1.2365, |
| "step": 1118 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.625087924970692e-05, |
| "loss": 1.2534, |
| "step": 1119 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.624736225087925e-05, |
| "loss": 1.2337, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.6243845252051582e-05, |
| "loss": 1.1922, |
| "step": 1121 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.6240328253223916e-05, |
| "loss": 1.2206, |
| "step": 1122 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.623681125439625e-05, |
| "loss": 1.2103, |
| "step": 1123 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.6233294255568583e-05, |
| "loss": 1.2365, |
| "step": 1124 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.6229777256740916e-05, |
| "loss": 1.2553, |
| "step": 1125 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.6226260257913246e-05, |
| "loss": 1.2673, |
| "step": 1126 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.622274325908558e-05, |
| "loss": 1.2005, |
| "step": 1127 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.6219226260257914e-05, |
| "loss": 1.2383, |
| "step": 1128 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.6215709261430247e-05, |
| "loss": 1.1619, |
| "step": 1129 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.621219226260258e-05, |
| "loss": 1.2334, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.6208675263774914e-05, |
| "loss": 1.2069, |
| "step": 1131 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.6205158264947248e-05, |
| "loss": 1.2439, |
| "step": 1132 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.6201641266119578e-05, |
| "loss": 1.2191, |
| "step": 1133 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.619812426729191e-05, |
| "loss": 1.2768, |
| "step": 1134 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.6194607268464245e-05, |
| "loss": 1.1972, |
| "step": 1135 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.619109026963658e-05, |
| "loss": 1.1716, |
| "step": 1136 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.6187573270808912e-05, |
| "loss": 1.2209, |
| "step": 1137 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.6184056271981245e-05, |
| "loss": 1.2019, |
| "step": 1138 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.6180539273153576e-05, |
| "loss": 1.1963, |
| "step": 1139 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.617702227432591e-05, |
| "loss": 1.2076, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.6173505275498243e-05, |
| "loss": 1.2119, |
| "step": 1141 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.6169988276670573e-05, |
| "loss": 1.2739, |
| "step": 1142 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.6166471277842906e-05, |
| "loss": 1.2496, |
| "step": 1143 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.616295427901524e-05, |
| "loss": 1.2581, |
| "step": 1144 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.6159437280187573e-05, |
| "loss": 1.2636, |
| "step": 1145 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.6155920281359907e-05, |
| "loss": 1.2384, |
| "step": 1146 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.615240328253224e-05, |
| "loss": 1.1999, |
| "step": 1147 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.6148886283704574e-05, |
| "loss": 1.2429, |
| "step": 1148 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.6145369284876904e-05, |
| "loss": 1.2405, |
| "step": 1149 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.6141852286049238e-05, |
| "loss": 1.216, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.613833528722157e-05, |
| "loss": 1.1931, |
| "step": 1151 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.6134818288393905e-05, |
| "loss": 1.204, |
| "step": 1152 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.6131301289566238e-05, |
| "loss": 1.1752, |
| "step": 1153 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.612778429073857e-05, |
| "loss": 1.1927, |
| "step": 1154 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.6124267291910902e-05, |
| "loss": 1.1986, |
| "step": 1155 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.6120750293083235e-05, |
| "loss": 1.2415, |
| "step": 1156 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.611723329425557e-05, |
| "loss": 1.1907, |
| "step": 1157 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.6113716295427902e-05, |
| "loss": 1.203, |
| "step": 1158 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.6110199296600236e-05, |
| "loss": 1.2111, |
| "step": 1159 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.610668229777257e-05, |
| "loss": 1.2394, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.61031652989449e-05, |
| "loss": 1.2248, |
| "step": 1161 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.6099648300117233e-05, |
| "loss": 1.19, |
| "step": 1162 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.6096131301289567e-05, |
| "loss": 1.2122, |
| "step": 1163 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.60926143024619e-05, |
| "loss": 1.2849, |
| "step": 1164 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.6089097303634234e-05, |
| "loss": 1.2292, |
| "step": 1165 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.6085580304806567e-05, |
| "loss": 1.2411, |
| "step": 1166 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.60820633059789e-05, |
| "loss": 1.2494, |
| "step": 1167 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.607854630715123e-05, |
| "loss": 1.2275, |
| "step": 1168 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.6075029308323564e-05, |
| "loss": 1.2151, |
| "step": 1169 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.6071512309495898e-05, |
| "loss": 1.2393, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.606799531066823e-05, |
| "loss": 1.2184, |
| "step": 1171 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.6064478311840565e-05, |
| "loss": 1.2407, |
| "step": 1172 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.60609613130129e-05, |
| "loss": 1.2247, |
| "step": 1173 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.605744431418523e-05, |
| "loss": 1.1849, |
| "step": 1174 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.6053927315357562e-05, |
| "loss": 1.1702, |
| "step": 1175 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.6050410316529896e-05, |
| "loss": 1.217, |
| "step": 1176 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.6046893317702226e-05, |
| "loss": 1.2258, |
| "step": 1177 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.604337631887456e-05, |
| "loss": 1.2378, |
| "step": 1178 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.6039859320046893e-05, |
| "loss": 1.2007, |
| "step": 1179 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.6036342321219226e-05, |
| "loss": 1.2444, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.603282532239156e-05, |
| "loss": 1.1853, |
| "step": 1181 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.6029308323563893e-05, |
| "loss": 1.1744, |
| "step": 1182 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.6025791324736227e-05, |
| "loss": 1.2526, |
| "step": 1183 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.6022274325908557e-05, |
| "loss": 1.2007, |
| "step": 1184 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.601875732708089e-05, |
| "loss": 1.1935, |
| "step": 1185 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.6015240328253224e-05, |
| "loss": 1.2495, |
| "step": 1186 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.6011723329425558e-05, |
| "loss": 1.2665, |
| "step": 1187 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.600820633059789e-05, |
| "loss": 1.2183, |
| "step": 1188 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.6004689331770225e-05, |
| "loss": 1.2188, |
| "step": 1189 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.6001172332942555e-05, |
| "loss": 1.204, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.5997655334114888e-05, |
| "loss": 1.286, |
| "step": 1191 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.5994138335287222e-05, |
| "loss": 1.2708, |
| "step": 1192 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.5990621336459555e-05, |
| "loss": 1.2398, |
| "step": 1193 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.598710433763189e-05, |
| "loss": 1.1917, |
| "step": 1194 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.5983587338804222e-05, |
| "loss": 1.2226, |
| "step": 1195 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.5980070339976553e-05, |
| "loss": 1.1732, |
| "step": 1196 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.5976553341148886e-05, |
| "loss": 1.2343, |
| "step": 1197 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.597303634232122e-05, |
| "loss": 1.1882, |
| "step": 1198 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.5969519343493553e-05, |
| "loss": 1.2133, |
| "step": 1199 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.5966002344665887e-05, |
| "loss": 1.2564, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.596248534583822e-05, |
| "loss": 1.2838, |
| "step": 1201 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.5958968347010554e-05, |
| "loss": 1.1787, |
| "step": 1202 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.5955451348182884e-05, |
| "loss": 1.2315, |
| "step": 1203 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.5951934349355217e-05, |
| "loss": 1.2288, |
| "step": 1204 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.594841735052755e-05, |
| "loss": 1.22, |
| "step": 1205 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.5944900351699884e-05, |
| "loss": 1.2488, |
| "step": 1206 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.5941383352872218e-05, |
| "loss": 1.2061, |
| "step": 1207 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.593786635404455e-05, |
| "loss": 1.2062, |
| "step": 1208 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.593434935521688e-05, |
| "loss": 1.217, |
| "step": 1209 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.5930832356389215e-05, |
| "loss": 1.2538, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.592731535756155e-05, |
| "loss": 1.2482, |
| "step": 1211 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.592379835873388e-05, |
| "loss": 1.2026, |
| "step": 1212 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.5920281359906212e-05, |
| "loss": 1.221, |
| "step": 1213 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.5916764361078546e-05, |
| "loss": 1.2054, |
| "step": 1214 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.591324736225088e-05, |
| "loss": 1.2032, |
| "step": 1215 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.5909730363423213e-05, |
| "loss": 1.2165, |
| "step": 1216 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.5906213364595546e-05, |
| "loss": 1.2594, |
| "step": 1217 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.590269636576788e-05, |
| "loss": 1.2232, |
| "step": 1218 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.589917936694021e-05, |
| "loss": 1.2237, |
| "step": 1219 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.5895662368112544e-05, |
| "loss": 1.1981, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.5892145369284877e-05, |
| "loss": 1.1558, |
| "step": 1221 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.588862837045721e-05, |
| "loss": 1.2359, |
| "step": 1222 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.5885111371629544e-05, |
| "loss": 1.2455, |
| "step": 1223 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.5881594372801878e-05, |
| "loss": 1.2105, |
| "step": 1224 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.5878077373974208e-05, |
| "loss": 1.2657, |
| "step": 1225 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.587456037514654e-05, |
| "loss": 1.2194, |
| "step": 1226 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.5871043376318875e-05, |
| "loss": 1.2768, |
| "step": 1227 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.586752637749121e-05, |
| "loss": 1.2089, |
| "step": 1228 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.5864009378663542e-05, |
| "loss": 1.2833, |
| "step": 1229 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.5860492379835875e-05, |
| "loss": 1.2078, |
| "step": 1230 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.5856975381008206e-05, |
| "loss": 1.27, |
| "step": 1231 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.585345838218054e-05, |
| "loss": 1.222, |
| "step": 1232 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.5849941383352873e-05, |
| "loss": 1.2686, |
| "step": 1233 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.5846424384525206e-05, |
| "loss": 1.2563, |
| "step": 1234 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.584290738569754e-05, |
| "loss": 1.2188, |
| "step": 1235 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.5839390386869873e-05, |
| "loss": 1.2644, |
| "step": 1236 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.5835873388042207e-05, |
| "loss": 1.2422, |
| "step": 1237 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.5832356389214537e-05, |
| "loss": 1.2344, |
| "step": 1238 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.582883939038687e-05, |
| "loss": 1.1985, |
| "step": 1239 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.5825322391559204e-05, |
| "loss": 1.2275, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.5821805392731537e-05, |
| "loss": 1.2306, |
| "step": 1241 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.581828839390387e-05, |
| "loss": 1.1954, |
| "step": 1242 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.5814771395076204e-05, |
| "loss": 1.2313, |
| "step": 1243 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.5811254396248535e-05, |
| "loss": 1.255, |
| "step": 1244 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.5807737397420868e-05, |
| "loss": 1.1995, |
| "step": 1245 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.58042203985932e-05, |
| "loss": 1.2066, |
| "step": 1246 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.5800703399765532e-05, |
| "loss": 1.2759, |
| "step": 1247 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.5797186400937865e-05, |
| "loss": 1.2552, |
| "step": 1248 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.57936694021102e-05, |
| "loss": 1.2117, |
| "step": 1249 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.5790152403282532e-05, |
| "loss": 1.214, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.5786635404454866e-05, |
| "loss": 1.2498, |
| "step": 1251 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.57831184056272e-05, |
| "loss": 1.221, |
| "step": 1252 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.5779601406799533e-05, |
| "loss": 1.1825, |
| "step": 1253 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.5776084407971863e-05, |
| "loss": 1.203, |
| "step": 1254 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.5772567409144197e-05, |
| "loss": 1.2366, |
| "step": 1255 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.576905041031653e-05, |
| "loss": 1.1896, |
| "step": 1256 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.5765533411488864e-05, |
| "loss": 1.2454, |
| "step": 1257 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.5762016412661197e-05, |
| "loss": 1.2318, |
| "step": 1258 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.575849941383353e-05, |
| "loss": 1.2906, |
| "step": 1259 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.575498241500586e-05, |
| "loss": 1.2873, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.5751465416178194e-05, |
| "loss": 1.1845, |
| "step": 1261 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.5747948417350528e-05, |
| "loss": 1.2133, |
| "step": 1262 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.574443141852286e-05, |
| "loss": 1.2232, |
| "step": 1263 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.5740914419695195e-05, |
| "loss": 1.2203, |
| "step": 1264 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.573739742086753e-05, |
| "loss": 1.1917, |
| "step": 1265 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.573388042203986e-05, |
| "loss": 1.2329, |
| "step": 1266 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.5730363423212192e-05, |
| "loss": 1.1737, |
| "step": 1267 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.5726846424384526e-05, |
| "loss": 1.2294, |
| "step": 1268 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.572332942555686e-05, |
| "loss": 1.2758, |
| "step": 1269 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.5719812426729193e-05, |
| "loss": 1.2924, |
| "step": 1270 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.5716295427901526e-05, |
| "loss": 1.2148, |
| "step": 1271 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.571277842907386e-05, |
| "loss": 1.2524, |
| "step": 1272 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.570926143024619e-05, |
| "loss": 1.1779, |
| "step": 1273 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.5705744431418523e-05, |
| "loss": 1.1859, |
| "step": 1274 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.5702227432590857e-05, |
| "loss": 1.2316, |
| "step": 1275 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.569871043376319e-05, |
| "loss": 1.2256, |
| "step": 1276 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.5695193434935524e-05, |
| "loss": 1.1993, |
| "step": 1277 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.5691676436107857e-05, |
| "loss": 1.2029, |
| "step": 1278 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.5688159437280188e-05, |
| "loss": 1.2189, |
| "step": 1279 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.568464243845252e-05, |
| "loss": 1.3076, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.5681125439624855e-05, |
| "loss": 1.2262, |
| "step": 1281 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.5677608440797185e-05, |
| "loss": 1.2094, |
| "step": 1282 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.567409144196952e-05, |
| "loss": 1.2041, |
| "step": 1283 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.5670574443141852e-05, |
| "loss": 1.2598, |
| "step": 1284 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.5667057444314185e-05, |
| "loss": 1.2057, |
| "step": 1285 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.566354044548652e-05, |
| "loss": 1.2753, |
| "step": 1286 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.5660023446658852e-05, |
| "loss": 1.2381, |
| "step": 1287 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.5656506447831183e-05, |
| "loss": 1.197, |
| "step": 1288 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.5652989449003516e-05, |
| "loss": 1.2315, |
| "step": 1289 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.564947245017585e-05, |
| "loss": 1.2037, |
| "step": 1290 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.5645955451348183e-05, |
| "loss": 1.2277, |
| "step": 1291 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.5642438452520517e-05, |
| "loss": 1.2214, |
| "step": 1292 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.563892145369285e-05, |
| "loss": 1.2337, |
| "step": 1293 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.5635404454865184e-05, |
| "loss": 1.1863, |
| "step": 1294 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.5631887456037514e-05, |
| "loss": 1.2409, |
| "step": 1295 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.5628370457209847e-05, |
| "loss": 1.2133, |
| "step": 1296 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.562485345838218e-05, |
| "loss": 1.2646, |
| "step": 1297 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.5621336459554514e-05, |
| "loss": 1.2103, |
| "step": 1298 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.5617819460726848e-05, |
| "loss": 1.1925, |
| "step": 1299 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.561430246189918e-05, |
| "loss": 1.173, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.561078546307151e-05, |
| "loss": 1.209, |
| "step": 1301 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.5607268464243845e-05, |
| "loss": 1.2408, |
| "step": 1302 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.560375146541618e-05, |
| "loss": 1.2749, |
| "step": 1303 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.5600234466588512e-05, |
| "loss": 1.2443, |
| "step": 1304 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.5596717467760846e-05, |
| "loss": 1.2274, |
| "step": 1305 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.559320046893318e-05, |
| "loss": 1.2118, |
| "step": 1306 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.5589683470105513e-05, |
| "loss": 1.2105, |
| "step": 1307 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.5586166471277843e-05, |
| "loss": 1.2724, |
| "step": 1308 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.5582649472450176e-05, |
| "loss": 1.2398, |
| "step": 1309 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.557913247362251e-05, |
| "loss": 1.247, |
| "step": 1310 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.5575615474794843e-05, |
| "loss": 1.2091, |
| "step": 1311 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.5572098475967177e-05, |
| "loss": 1.1997, |
| "step": 1312 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.556858147713951e-05, |
| "loss": 1.2428, |
| "step": 1313 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.556506447831184e-05, |
| "loss": 1.1909, |
| "step": 1314 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.5561547479484174e-05, |
| "loss": 1.2309, |
| "step": 1315 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.5558030480656508e-05, |
| "loss": 1.2119, |
| "step": 1316 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.5554513481828838e-05, |
| "loss": 1.2483, |
| "step": 1317 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.555099648300117e-05, |
| "loss": 1.2069, |
| "step": 1318 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.5547479484173505e-05, |
| "loss": 1.1998, |
| "step": 1319 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.554396248534584e-05, |
| "loss": 1.2156, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.5540445486518172e-05, |
| "loss": 1.1994, |
| "step": 1321 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.5536928487690505e-05, |
| "loss": 1.2535, |
| "step": 1322 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.5533411488862836e-05, |
| "loss": 1.214, |
| "step": 1323 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.552989449003517e-05, |
| "loss": 1.2357, |
| "step": 1324 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.5526377491207503e-05, |
| "loss": 1.1742, |
| "step": 1325 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.5522860492379836e-05, |
| "loss": 1.2288, |
| "step": 1326 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.551934349355217e-05, |
| "loss": 1.2169, |
| "step": 1327 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.5515826494724503e-05, |
| "loss": 1.2325, |
| "step": 1328 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.5512309495896837e-05, |
| "loss": 1.239, |
| "step": 1329 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.5508792497069167e-05, |
| "loss": 1.2658, |
| "step": 1330 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.55052754982415e-05, |
| "loss": 1.2246, |
| "step": 1331 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.5501758499413834e-05, |
| "loss": 1.2576, |
| "step": 1332 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.5498241500586167e-05, |
| "loss": 1.2253, |
| "step": 1333 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.54947245017585e-05, |
| "loss": 1.2203, |
| "step": 1334 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.5491207502930834e-05, |
| "loss": 1.2381, |
| "step": 1335 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.5487690504103165e-05, |
| "loss": 1.2598, |
| "step": 1336 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.5484173505275498e-05, |
| "loss": 1.2289, |
| "step": 1337 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.548065650644783e-05, |
| "loss": 1.2194, |
| "step": 1338 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.5477139507620165e-05, |
| "loss": 1.2315, |
| "step": 1339 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.54736225087925e-05, |
| "loss": 1.2632, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.5470105509964832e-05, |
| "loss": 1.2073, |
| "step": 1341 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.5466588511137166e-05, |
| "loss": 1.2254, |
| "step": 1342 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.5463071512309496e-05, |
| "loss": 1.2053, |
| "step": 1343 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.545955451348183e-05, |
| "loss": 1.2409, |
| "step": 1344 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.5456037514654163e-05, |
| "loss": 1.2681, |
| "step": 1345 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.5452520515826496e-05, |
| "loss": 1.253, |
| "step": 1346 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.544900351699883e-05, |
| "loss": 1.271, |
| "step": 1347 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.5445486518171164e-05, |
| "loss": 1.2674, |
| "step": 1348 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.5441969519343494e-05, |
| "loss": 1.2031, |
| "step": 1349 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.5438452520515827e-05, |
| "loss": 1.2016, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.543493552168816e-05, |
| "loss": 1.2699, |
| "step": 1351 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.543141852286049e-05, |
| "loss": 1.2271, |
| "step": 1352 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.5427901524032824e-05, |
| "loss": 1.2153, |
| "step": 1353 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.5424384525205158e-05, |
| "loss": 1.1862, |
| "step": 1354 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.542086752637749e-05, |
| "loss": 1.3138, |
| "step": 1355 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.5417350527549825e-05, |
| "loss": 1.1836, |
| "step": 1356 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.541383352872216e-05, |
| "loss": 1.2527, |
| "step": 1357 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.541031652989449e-05, |
| "loss": 1.2443, |
| "step": 1358 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.5406799531066822e-05, |
| "loss": 1.2378, |
| "step": 1359 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.5403282532239156e-05, |
| "loss": 1.2366, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.539976553341149e-05, |
| "loss": 1.2085, |
| "step": 1361 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.5396248534583823e-05, |
| "loss": 1.1944, |
| "step": 1362 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.5392731535756156e-05, |
| "loss": 1.2371, |
| "step": 1363 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.538921453692849e-05, |
| "loss": 1.2625, |
| "step": 1364 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.538569753810082e-05, |
| "loss": 1.2392, |
| "step": 1365 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.5382180539273153e-05, |
| "loss": 1.2381, |
| "step": 1366 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.5378663540445487e-05, |
| "loss": 1.1968, |
| "step": 1367 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.537514654161782e-05, |
| "loss": 1.24, |
| "step": 1368 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.5371629542790154e-05, |
| "loss": 1.2073, |
| "step": 1369 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.5368112543962488e-05, |
| "loss": 1.2344, |
| "step": 1370 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.5364595545134818e-05, |
| "loss": 1.2355, |
| "step": 1371 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.536107854630715e-05, |
| "loss": 1.2587, |
| "step": 1372 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.5357561547479485e-05, |
| "loss": 1.2676, |
| "step": 1373 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.5354044548651818e-05, |
| "loss": 1.2697, |
| "step": 1374 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.5350527549824152e-05, |
| "loss": 1.2134, |
| "step": 1375 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.5347010550996485e-05, |
| "loss": 1.1815, |
| "step": 1376 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.534349355216882e-05, |
| "loss": 1.2664, |
| "step": 1377 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.533997655334115e-05, |
| "loss": 1.2275, |
| "step": 1378 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.5336459554513482e-05, |
| "loss": 1.2512, |
| "step": 1379 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.5332942555685816e-05, |
| "loss": 1.2261, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.532942555685815e-05, |
| "loss": 1.2325, |
| "step": 1381 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.5325908558030483e-05, |
| "loss": 1.2691, |
| "step": 1382 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.5322391559202817e-05, |
| "loss": 1.2446, |
| "step": 1383 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.5318874560375147e-05, |
| "loss": 1.2019, |
| "step": 1384 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.531535756154748e-05, |
| "loss": 1.2174, |
| "step": 1385 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.5311840562719814e-05, |
| "loss": 1.217, |
| "step": 1386 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.5308323563892144e-05, |
| "loss": 1.2328, |
| "step": 1387 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.5304806565064477e-05, |
| "loss": 1.2367, |
| "step": 1388 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.530128956623681e-05, |
| "loss": 1.2236, |
| "step": 1389 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.5297772567409144e-05, |
| "loss": 1.2103, |
| "step": 1390 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.5294255568581478e-05, |
| "loss": 1.2284, |
| "step": 1391 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.529073856975381e-05, |
| "loss": 1.191, |
| "step": 1392 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.528722157092614e-05, |
| "loss": 1.2285, |
| "step": 1393 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.5283704572098475e-05, |
| "loss": 1.24, |
| "step": 1394 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.528018757327081e-05, |
| "loss": 1.2403, |
| "step": 1395 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.5276670574443142e-05, |
| "loss": 1.1818, |
| "step": 1396 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.5273153575615476e-05, |
| "loss": 1.2022, |
| "step": 1397 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.526963657678781e-05, |
| "loss": 1.1895, |
| "step": 1398 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.5266119577960143e-05, |
| "loss": 1.2211, |
| "step": 1399 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.5262602579132473e-05, |
| "loss": 1.1824, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.5259085580304806e-05, |
| "loss": 1.2447, |
| "step": 1401 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.525556858147714e-05, |
| "loss": 1.2318, |
| "step": 1402 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.5252051582649472e-05, |
| "loss": 1.2505, |
| "step": 1403 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.5248534583821805e-05, |
| "loss": 1.2797, |
| "step": 1404 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.5245017584994139e-05, |
| "loss": 1.2491, |
| "step": 1405 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.524150058616647e-05, |
| "loss": 1.2297, |
| "step": 1406 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.5237983587338804e-05, |
| "loss": 1.2347, |
| "step": 1407 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.5234466588511138e-05, |
| "loss": 1.2245, |
| "step": 1408 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.523094958968347e-05, |
| "loss": 1.2277, |
| "step": 1409 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.5227432590855803e-05, |
| "loss": 1.1845, |
| "step": 1410 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.5223915592028137e-05, |
| "loss": 1.2624, |
| "step": 1411 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.5220398593200468e-05, |
| "loss": 1.1971, |
| "step": 1412 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.5216881594372802e-05, |
| "loss": 1.2711, |
| "step": 1413 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.5213364595545135e-05, |
| "loss": 1.2287, |
| "step": 1414 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.5209847596717469e-05, |
| "loss": 1.1874, |
| "step": 1415 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.52063305978898e-05, |
| "loss": 1.1961, |
| "step": 1416 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.5202813599062134e-05, |
| "loss": 1.2311, |
| "step": 1417 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.5199296600234468e-05, |
| "loss": 1.228, |
| "step": 1418 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.5195779601406798e-05, |
| "loss": 1.2908, |
| "step": 1419 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.5192262602579132e-05, |
| "loss": 1.2002, |
| "step": 1420 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.5188745603751467e-05, |
| "loss": 1.2147, |
| "step": 1421 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.5185228604923797e-05, |
| "loss": 1.2251, |
| "step": 1422 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.518171160609613e-05, |
| "loss": 1.2489, |
| "step": 1423 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.5178194607268464e-05, |
| "loss": 1.2737, |
| "step": 1424 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.5174677608440796e-05, |
| "loss": 1.2549, |
| "step": 1425 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.517116060961313e-05, |
| "loss": 1.2153, |
| "step": 1426 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.5167643610785463e-05, |
| "loss": 1.1863, |
| "step": 1427 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.5164126611957795e-05, |
| "loss": 1.2295, |
| "step": 1428 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.5160609613130128e-05, |
| "loss": 1.2035, |
| "step": 1429 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.5157092614302462e-05, |
| "loss": 1.2322, |
| "step": 1430 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.5153575615474795e-05, |
| "loss": 1.2438, |
| "step": 1431 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.5150058616647127e-05, |
| "loss": 1.2477, |
| "step": 1432 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.514654161781946e-05, |
| "loss": 1.2123, |
| "step": 1433 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.5143024618991794e-05, |
| "loss": 1.2148, |
| "step": 1434 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.5139507620164126e-05, |
| "loss": 1.2466, |
| "step": 1435 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.513599062133646e-05, |
| "loss": 1.2467, |
| "step": 1436 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.5132473622508793e-05, |
| "loss": 1.2274, |
| "step": 1437 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.5128956623681125e-05, |
| "loss": 1.1963, |
| "step": 1438 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.5125439624853458e-05, |
| "loss": 1.1986, |
| "step": 1439 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.5121922626025792e-05, |
| "loss": 1.2463, |
| "step": 1440 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.5118405627198124e-05, |
| "loss": 1.2602, |
| "step": 1441 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.5114888628370457e-05, |
| "loss": 1.1855, |
| "step": 1442 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.511137162954279e-05, |
| "loss": 1.2492, |
| "step": 1443 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.5107854630715123e-05, |
| "loss": 1.2376, |
| "step": 1444 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.5104337631887456e-05, |
| "loss": 1.1955, |
| "step": 1445 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.510082063305979e-05, |
| "loss": 1.2263, |
| "step": 1446 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.5097303634232121e-05, |
| "loss": 1.2599, |
| "step": 1447 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.5093786635404455e-05, |
| "loss": 1.2159, |
| "step": 1448 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.5090269636576789e-05, |
| "loss": 1.2194, |
| "step": 1449 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.5086752637749122e-05, |
| "loss": 1.2095, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.5083235638921454e-05, |
| "loss": 1.208, |
| "step": 1451 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.5079718640093787e-05, |
| "loss": 1.1836, |
| "step": 1452 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.5076201641266121e-05, |
| "loss": 1.2236, |
| "step": 1453 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.5072684642438451e-05, |
| "loss": 1.2317, |
| "step": 1454 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.5069167643610785e-05, |
| "loss": 1.2048, |
| "step": 1455 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.5065650644783118e-05, |
| "loss": 1.2183, |
| "step": 1456 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.506213364595545e-05, |
| "loss": 1.1751, |
| "step": 1457 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.5058616647127783e-05, |
| "loss": 1.181, |
| "step": 1458 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.5055099648300117e-05, |
| "loss": 1.2032, |
| "step": 1459 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.5051582649472449e-05, |
| "loss": 1.2494, |
| "step": 1460 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.5048065650644782e-05, |
| "loss": 1.2559, |
| "step": 1461 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.5044548651817116e-05, |
| "loss": 1.1735, |
| "step": 1462 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.5041031652989448e-05, |
| "loss": 1.1952, |
| "step": 1463 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.5037514654161781e-05, |
| "loss": 1.2134, |
| "step": 1464 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.5033997655334115e-05, |
| "loss": 1.2108, |
| "step": 1465 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.5030480656506448e-05, |
| "loss": 1.1964, |
| "step": 1466 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.502696365767878e-05, |
| "loss": 1.2221, |
| "step": 1467 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.5023446658851114e-05, |
| "loss": 1.2253, |
| "step": 1468 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.5019929660023447e-05, |
| "loss": 1.2106, |
| "step": 1469 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.5016412661195779e-05, |
| "loss": 1.2542, |
| "step": 1470 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.5012895662368112e-05, |
| "loss": 1.223, |
| "step": 1471 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.5009378663540446e-05, |
| "loss": 1.2443, |
| "step": 1472 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.5005861664712778e-05, |
| "loss": 1.2745, |
| "step": 1473 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.5002344665885111e-05, |
| "loss": 1.1359, |
| "step": 1474 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.4998827667057445e-05, |
| "loss": 1.1425, |
| "step": 1475 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.4995310668229777e-05, |
| "loss": 1.2442, |
| "step": 1476 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.499179366940211e-05, |
| "loss": 1.2423, |
| "step": 1477 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.4988276670574444e-05, |
| "loss": 1.2195, |
| "step": 1478 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.4984759671746776e-05, |
| "loss": 1.2609, |
| "step": 1479 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.4981242672919109e-05, |
| "loss": 1.2114, |
| "step": 1480 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.4977725674091443e-05, |
| "loss": 1.2642, |
| "step": 1481 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.4974208675263774e-05, |
| "loss": 1.2269, |
| "step": 1482 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.4970691676436108e-05, |
| "loss": 1.2788, |
| "step": 1483 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.4967174677608442e-05, |
| "loss": 1.2489, |
| "step": 1484 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.4963657678780775e-05, |
| "loss": 1.2209, |
| "step": 1485 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.4960140679953107e-05, |
| "loss": 1.2547, |
| "step": 1486 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.495662368112544e-05, |
| "loss": 1.2031, |
| "step": 1487 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.4953106682297774e-05, |
| "loss": 1.2622, |
| "step": 1488 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.4949589683470104e-05, |
| "loss": 1.1903, |
| "step": 1489 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.4946072684642438e-05, |
| "loss": 1.204, |
| "step": 1490 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.4942555685814771e-05, |
| "loss": 1.1957, |
| "step": 1491 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.4939038686987103e-05, |
| "loss": 1.2413, |
| "step": 1492 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.4935521688159436e-05, |
| "loss": 1.2303, |
| "step": 1493 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.493200468933177e-05, |
| "loss": 1.2271, |
| "step": 1494 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.4928487690504102e-05, |
| "loss": 1.2209, |
| "step": 1495 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.4924970691676435e-05, |
| "loss": 1.2308, |
| "step": 1496 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.4921453692848769e-05, |
| "loss": 1.2298, |
| "step": 1497 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.49179366940211e-05, |
| "loss": 1.2582, |
| "step": 1498 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.4914419695193434e-05, |
| "loss": 1.2682, |
| "step": 1499 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.4910902696365768e-05, |
| "loss": 1.2521, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.49073856975381e-05, |
| "loss": 1.2158, |
| "step": 1501 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.4903868698710433e-05, |
| "loss": 1.2603, |
| "step": 1502 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.4900351699882767e-05, |
| "loss": 1.2172, |
| "step": 1503 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.48968347010551e-05, |
| "loss": 1.2017, |
| "step": 1504 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.4893317702227432e-05, |
| "loss": 1.2523, |
| "step": 1505 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.4889800703399766e-05, |
| "loss": 1.2109, |
| "step": 1506 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.4886283704572099e-05, |
| "loss": 1.2383, |
| "step": 1507 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.4882766705744431e-05, |
| "loss": 1.2243, |
| "step": 1508 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.4879249706916764e-05, |
| "loss": 1.2396, |
| "step": 1509 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.4875732708089098e-05, |
| "loss": 1.196, |
| "step": 1510 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.487221570926143e-05, |
| "loss": 1.2174, |
| "step": 1511 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.4868698710433763e-05, |
| "loss": 1.2221, |
| "step": 1512 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4865181711606097e-05, |
| "loss": 1.2393, |
| "step": 1513 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4861664712778429e-05, |
| "loss": 1.2181, |
| "step": 1514 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4858147713950762e-05, |
| "loss": 1.2085, |
| "step": 1515 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4854630715123096e-05, |
| "loss": 1.2426, |
| "step": 1516 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4851113716295428e-05, |
| "loss": 1.2302, |
| "step": 1517 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4847596717467761e-05, |
| "loss": 1.2276, |
| "step": 1518 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4844079718640095e-05, |
| "loss": 1.2425, |
| "step": 1519 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4840562719812428e-05, |
| "loss": 1.196, |
| "step": 1520 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.483704572098476e-05, |
| "loss": 1.2464, |
| "step": 1521 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4833528722157093e-05, |
| "loss": 1.2466, |
| "step": 1522 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4830011723329427e-05, |
| "loss": 1.2306, |
| "step": 1523 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4826494724501757e-05, |
| "loss": 1.2393, |
| "step": 1524 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.482297772567409e-05, |
| "loss": 1.1882, |
| "step": 1525 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4819460726846424e-05, |
| "loss": 1.2348, |
| "step": 1526 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4815943728018756e-05, |
| "loss": 1.218, |
| "step": 1527 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.481242672919109e-05, |
| "loss": 1.1784, |
| "step": 1528 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4808909730363423e-05, |
| "loss": 1.2092, |
| "step": 1529 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4805392731535755e-05, |
| "loss": 1.1527, |
| "step": 1530 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4801875732708088e-05, |
| "loss": 1.2126, |
| "step": 1531 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4798358733880422e-05, |
| "loss": 1.1655, |
| "step": 1532 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4794841735052754e-05, |
| "loss": 1.287, |
| "step": 1533 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4791324736225087e-05, |
| "loss": 1.2112, |
| "step": 1534 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.478780773739742e-05, |
| "loss": 1.2487, |
| "step": 1535 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4784290738569753e-05, |
| "loss": 1.1741, |
| "step": 1536 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4780773739742086e-05, |
| "loss": 1.2354, |
| "step": 1537 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.477725674091442e-05, |
| "loss": 1.217, |
| "step": 1538 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4773739742086753e-05, |
| "loss": 1.2007, |
| "step": 1539 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4770222743259085e-05, |
| "loss": 1.2256, |
| "step": 1540 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4766705744431419e-05, |
| "loss": 1.2159, |
| "step": 1541 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4763188745603752e-05, |
| "loss": 1.2273, |
| "step": 1542 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4759671746776084e-05, |
| "loss": 1.2301, |
| "step": 1543 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4756154747948417e-05, |
| "loss": 1.2175, |
| "step": 1544 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4752637749120751e-05, |
| "loss": 1.1568, |
| "step": 1545 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4749120750293083e-05, |
| "loss": 1.1938, |
| "step": 1546 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4745603751465416e-05, |
| "loss": 1.2032, |
| "step": 1547 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.474208675263775e-05, |
| "loss": 1.2681, |
| "step": 1548 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4738569753810082e-05, |
| "loss": 1.2212, |
| "step": 1549 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4735052754982415e-05, |
| "loss": 1.2402, |
| "step": 1550 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4731535756154749e-05, |
| "loss": 1.2071, |
| "step": 1551 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.472801875732708e-05, |
| "loss": 1.2314, |
| "step": 1552 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4724501758499414e-05, |
| "loss": 1.205, |
| "step": 1553 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4720984759671748e-05, |
| "loss": 1.2514, |
| "step": 1554 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4717467760844081e-05, |
| "loss": 1.2572, |
| "step": 1555 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4713950762016413e-05, |
| "loss": 1.2557, |
| "step": 1556 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4710433763188746e-05, |
| "loss": 1.2648, |
| "step": 1557 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.470691676436108e-05, |
| "loss": 1.2312, |
| "step": 1558 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.470339976553341e-05, |
| "loss": 1.219, |
| "step": 1559 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4699882766705744e-05, |
| "loss": 1.2048, |
| "step": 1560 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4696365767878077e-05, |
| "loss": 1.1931, |
| "step": 1561 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4692848769050409e-05, |
| "loss": 1.162, |
| "step": 1562 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4689331770222743e-05, |
| "loss": 1.2345, |
| "step": 1563 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4685814771395076e-05, |
| "loss": 1.2298, |
| "step": 1564 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4682297772567408e-05, |
| "loss": 1.2038, |
| "step": 1565 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4678780773739741e-05, |
| "loss": 1.2182, |
| "step": 1566 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4675263774912075e-05, |
| "loss": 1.2359, |
| "step": 1567 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4671746776084407e-05, |
| "loss": 1.2097, |
| "step": 1568 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.466822977725674e-05, |
| "loss": 1.2182, |
| "step": 1569 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4664712778429074e-05, |
| "loss": 1.1856, |
| "step": 1570 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4661195779601406e-05, |
| "loss": 1.2012, |
| "step": 1571 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.465767878077374e-05, |
| "loss": 1.2458, |
| "step": 1572 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4654161781946073e-05, |
| "loss": 1.2226, |
| "step": 1573 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4650644783118406e-05, |
| "loss": 1.2772, |
| "step": 1574 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4647127784290738e-05, |
| "loss": 1.2493, |
| "step": 1575 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4643610785463072e-05, |
| "loss": 1.2456, |
| "step": 1576 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4640093786635405e-05, |
| "loss": 1.2404, |
| "step": 1577 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4636576787807737e-05, |
| "loss": 1.2283, |
| "step": 1578 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.463305978898007e-05, |
| "loss": 1.1942, |
| "step": 1579 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4629542790152404e-05, |
| "loss": 1.2799, |
| "step": 1580 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4626025791324736e-05, |
| "loss": 1.2666, |
| "step": 1581 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.462250879249707e-05, |
| "loss": 1.1934, |
| "step": 1582 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4618991793669403e-05, |
| "loss": 1.2007, |
| "step": 1583 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4615474794841735e-05, |
| "loss": 1.2099, |
| "step": 1584 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4611957796014068e-05, |
| "loss": 1.2401, |
| "step": 1585 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4608440797186402e-05, |
| "loss": 1.2031, |
| "step": 1586 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4604923798358734e-05, |
| "loss": 1.2036, |
| "step": 1587 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4601406799531067e-05, |
| "loss": 1.2584, |
| "step": 1588 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.45978898007034e-05, |
| "loss": 1.2336, |
| "step": 1589 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.459437280187573e-05, |
| "loss": 1.2249, |
| "step": 1590 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.4590855803048064e-05, |
| "loss": 1.2335, |
| "step": 1591 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.45873388042204e-05, |
| "loss": 1.2163, |
| "step": 1592 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.4583821805392733e-05, |
| "loss": 1.248, |
| "step": 1593 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.4580304806565063e-05, |
| "loss": 1.2276, |
| "step": 1594 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.4576787807737397e-05, |
| "loss": 1.2554, |
| "step": 1595 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.457327080890973e-05, |
| "loss": 1.1845, |
| "step": 1596 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.4569753810082062e-05, |
| "loss": 1.1922, |
| "step": 1597 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.4566236811254396e-05, |
| "loss": 1.2067, |
| "step": 1598 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.4562719812426729e-05, |
| "loss": 1.2052, |
| "step": 1599 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.4559202813599061e-05, |
| "loss": 1.2, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.4555685814771394e-05, |
| "loss": 1.2136, |
| "step": 1601 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.4552168815943728e-05, |
| "loss": 1.2218, |
| "step": 1602 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.454865181711606e-05, |
| "loss": 1.215, |
| "step": 1603 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.4545134818288393e-05, |
| "loss": 1.2456, |
| "step": 1604 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.4541617819460727e-05, |
| "loss": 1.2409, |
| "step": 1605 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.4538100820633059e-05, |
| "loss": 1.1815, |
| "step": 1606 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.4534583821805392e-05, |
| "loss": 1.2119, |
| "step": 1607 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.4531066822977726e-05, |
| "loss": 1.1752, |
| "step": 1608 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.452754982415006e-05, |
| "loss": 1.2051, |
| "step": 1609 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.4524032825322391e-05, |
| "loss": 1.2217, |
| "step": 1610 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.4520515826494725e-05, |
| "loss": 1.1874, |
| "step": 1611 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.4516998827667058e-05, |
| "loss": 1.2521, |
| "step": 1612 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.451348182883939e-05, |
| "loss": 1.1921, |
| "step": 1613 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.4509964830011723e-05, |
| "loss": 1.2577, |
| "step": 1614 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.4506447831184057e-05, |
| "loss": 1.2428, |
| "step": 1615 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4502930832356389e-05, |
| "loss": 1.2073, |
| "step": 1616 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4499413833528722e-05, |
| "loss": 1.2197, |
| "step": 1617 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4495896834701056e-05, |
| "loss": 1.1998, |
| "step": 1618 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4492379835873388e-05, |
| "loss": 1.2459, |
| "step": 1619 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4488862837045721e-05, |
| "loss": 1.2402, |
| "step": 1620 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4485345838218055e-05, |
| "loss": 1.288, |
| "step": 1621 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4481828839390387e-05, |
| "loss": 1.2008, |
| "step": 1622 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.447831184056272e-05, |
| "loss": 1.2618, |
| "step": 1623 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4474794841735054e-05, |
| "loss": 1.2085, |
| "step": 1624 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4471277842907384e-05, |
| "loss": 1.2337, |
| "step": 1625 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4467760844079717e-05, |
| "loss": 1.2472, |
| "step": 1626 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4464243845252053e-05, |
| "loss": 1.2596, |
| "step": 1627 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4460726846424386e-05, |
| "loss": 1.2621, |
| "step": 1628 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4457209847596716e-05, |
| "loss": 1.261, |
| "step": 1629 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.445369284876905e-05, |
| "loss": 1.2138, |
| "step": 1630 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4450175849941383e-05, |
| "loss": 1.1868, |
| "step": 1631 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4446658851113715e-05, |
| "loss": 1.2306, |
| "step": 1632 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4443141852286049e-05, |
| "loss": 1.2151, |
| "step": 1633 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4439624853458382e-05, |
| "loss": 1.2464, |
| "step": 1634 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4436107854630714e-05, |
| "loss": 1.2345, |
| "step": 1635 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4432590855803047e-05, |
| "loss": 1.2387, |
| "step": 1636 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4429073856975381e-05, |
| "loss": 1.237, |
| "step": 1637 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4425556858147713e-05, |
| "loss": 1.206, |
| "step": 1638 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4422039859320046e-05, |
| "loss": 1.2088, |
| "step": 1639 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.441852286049238e-05, |
| "loss": 1.1782, |
| "step": 1640 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4415005861664712e-05, |
| "loss": 1.1844, |
| "step": 1641 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4411488862837045e-05, |
| "loss": 1.1998, |
| "step": 1642 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4407971864009379e-05, |
| "loss": 1.2079, |
| "step": 1643 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4404454865181712e-05, |
| "loss": 1.2166, |
| "step": 1644 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4400937866354044e-05, |
| "loss": 1.2129, |
| "step": 1645 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4397420867526378e-05, |
| "loss": 1.2511, |
| "step": 1646 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4393903868698711e-05, |
| "loss": 1.1897, |
| "step": 1647 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4390386869871043e-05, |
| "loss": 1.2149, |
| "step": 1648 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4386869871043376e-05, |
| "loss": 1.2512, |
| "step": 1649 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.438335287221571e-05, |
| "loss": 1.186, |
| "step": 1650 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4379835873388042e-05, |
| "loss": 1.2112, |
| "step": 1651 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4376318874560375e-05, |
| "loss": 1.2464, |
| "step": 1652 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4372801875732709e-05, |
| "loss": 1.2232, |
| "step": 1653 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.436928487690504e-05, |
| "loss": 1.1947, |
| "step": 1654 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4365767878077374e-05, |
| "loss": 1.2503, |
| "step": 1655 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4362250879249708e-05, |
| "loss": 1.207, |
| "step": 1656 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.435873388042204e-05, |
| "loss": 1.2326, |
| "step": 1657 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4355216881594373e-05, |
| "loss": 1.192, |
| "step": 1658 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4351699882766707e-05, |
| "loss": 1.2446, |
| "step": 1659 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4348182883939037e-05, |
| "loss": 1.2152, |
| "step": 1660 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.434466588511137e-05, |
| "loss": 1.2232, |
| "step": 1661 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4341148886283704e-05, |
| "loss": 1.2329, |
| "step": 1662 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4337631887456039e-05, |
| "loss": 1.2114, |
| "step": 1663 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.433411488862837e-05, |
| "loss": 1.227, |
| "step": 1664 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4330597889800703e-05, |
| "loss": 1.1745, |
| "step": 1665 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4327080890973036e-05, |
| "loss": 1.2053, |
| "step": 1666 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4323563892145368e-05, |
| "loss": 1.1685, |
| "step": 1667 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4320046893317702e-05, |
| "loss": 1.1735, |
| "step": 1668 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4316529894490035e-05, |
| "loss": 1.2156, |
| "step": 1669 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4313012895662367e-05, |
| "loss": 1.2673, |
| "step": 1670 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.43094958968347e-05, |
| "loss": 1.2102, |
| "step": 1671 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4305978898007034e-05, |
| "loss": 1.2208, |
| "step": 1672 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4302461899179366e-05, |
| "loss": 1.1901, |
| "step": 1673 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.42989449003517e-05, |
| "loss": 1.2153, |
| "step": 1674 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4295427901524033e-05, |
| "loss": 1.1849, |
| "step": 1675 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4291910902696365e-05, |
| "loss": 1.2409, |
| "step": 1676 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4288393903868698e-05, |
| "loss": 1.2249, |
| "step": 1677 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4284876905041032e-05, |
| "loss": 1.183, |
| "step": 1678 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4281359906213364e-05, |
| "loss": 1.2748, |
| "step": 1679 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4277842907385697e-05, |
| "loss": 1.2006, |
| "step": 1680 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.427432590855803e-05, |
| "loss": 1.2231, |
| "step": 1681 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4270808909730364e-05, |
| "loss": 1.1905, |
| "step": 1682 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4267291910902696e-05, |
| "loss": 1.1927, |
| "step": 1683 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.426377491207503e-05, |
| "loss": 1.1946, |
| "step": 1684 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4260257913247363e-05, |
| "loss": 1.1925, |
| "step": 1685 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4256740914419695e-05, |
| "loss": 1.2325, |
| "step": 1686 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4253223915592028e-05, |
| "loss": 1.2257, |
| "step": 1687 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4249706916764362e-05, |
| "loss": 1.2307, |
| "step": 1688 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4246189917936694e-05, |
| "loss": 1.1727, |
| "step": 1689 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4242672919109027e-05, |
| "loss": 1.2376, |
| "step": 1690 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.423915592028136e-05, |
| "loss": 1.2065, |
| "step": 1691 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4235638921453693e-05, |
| "loss": 1.2148, |
| "step": 1692 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.4232121922626026e-05, |
| "loss": 1.2278, |
| "step": 1693 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.422860492379836e-05, |
| "loss": 1.1572, |
| "step": 1694 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.422508792497069e-05, |
| "loss": 1.2114, |
| "step": 1695 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4221570926143023e-05, |
| "loss": 1.2622, |
| "step": 1696 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4218053927315357e-05, |
| "loss": 1.1979, |
| "step": 1697 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4214536928487692e-05, |
| "loss": 1.1278, |
| "step": 1698 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4211019929660022e-05, |
| "loss": 1.2357, |
| "step": 1699 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4207502930832356e-05, |
| "loss": 1.223, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.420398593200469e-05, |
| "loss": 1.2319, |
| "step": 1701 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4200468933177021e-05, |
| "loss": 1.2102, |
| "step": 1702 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4196951934349355e-05, |
| "loss": 1.2009, |
| "step": 1703 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4193434935521688e-05, |
| "loss": 1.2568, |
| "step": 1704 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.418991793669402e-05, |
| "loss": 1.1936, |
| "step": 1705 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4186400937866353e-05, |
| "loss": 1.2279, |
| "step": 1706 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4182883939038687e-05, |
| "loss": 1.1693, |
| "step": 1707 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4179366940211019e-05, |
| "loss": 1.2268, |
| "step": 1708 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4175849941383352e-05, |
| "loss": 1.1787, |
| "step": 1709 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4172332942555686e-05, |
| "loss": 1.2104, |
| "step": 1710 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4168815943728018e-05, |
| "loss": 1.2374, |
| "step": 1711 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4165298944900351e-05, |
| "loss": 1.215, |
| "step": 1712 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4161781946072685e-05, |
| "loss": 1.2079, |
| "step": 1713 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4158264947245017e-05, |
| "loss": 1.1771, |
| "step": 1714 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.415474794841735e-05, |
| "loss": 1.1782, |
| "step": 1715 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4151230949589684e-05, |
| "loss": 1.2516, |
| "step": 1716 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4147713950762017e-05, |
| "loss": 1.196, |
| "step": 1717 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4144196951934349e-05, |
| "loss": 1.2479, |
| "step": 1718 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.4140679953106683e-05, |
| "loss": 1.2179, |
| "step": 1719 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.4137162954279016e-05, |
| "loss": 1.1923, |
| "step": 1720 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.4133645955451348e-05, |
| "loss": 1.2614, |
| "step": 1721 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.4130128956623681e-05, |
| "loss": 1.2342, |
| "step": 1722 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.4126611957796015e-05, |
| "loss": 1.2796, |
| "step": 1723 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.4123094958968347e-05, |
| "loss": 1.1987, |
| "step": 1724 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.411957796014068e-05, |
| "loss": 1.2612, |
| "step": 1725 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.4116060961313014e-05, |
| "loss": 1.1774, |
| "step": 1726 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.4112543962485346e-05, |
| "loss": 1.2257, |
| "step": 1727 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.410902696365768e-05, |
| "loss": 1.2239, |
| "step": 1728 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.4105509964830013e-05, |
| "loss": 1.1513, |
| "step": 1729 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.4101992966002343e-05, |
| "loss": 1.1968, |
| "step": 1730 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.4098475967174676e-05, |
| "loss": 1.2387, |
| "step": 1731 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.409495896834701e-05, |
| "loss": 1.2198, |
| "step": 1732 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.4091441969519345e-05, |
| "loss": 1.2015, |
| "step": 1733 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.4087924970691675e-05, |
| "loss": 1.2393, |
| "step": 1734 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.4084407971864009e-05, |
| "loss": 1.2193, |
| "step": 1735 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.4080890973036342e-05, |
| "loss": 1.1713, |
| "step": 1736 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.4077373974208674e-05, |
| "loss": 1.1787, |
| "step": 1737 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.4073856975381008e-05, |
| "loss": 1.2528, |
| "step": 1738 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.4070339976553341e-05, |
| "loss": 1.2745, |
| "step": 1739 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.4066822977725673e-05, |
| "loss": 1.251, |
| "step": 1740 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.4063305978898007e-05, |
| "loss": 1.2044, |
| "step": 1741 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.405978898007034e-05, |
| "loss": 1.2197, |
| "step": 1742 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.4056271981242672e-05, |
| "loss": 1.2156, |
| "step": 1743 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.4052754982415005e-05, |
| "loss": 1.2523, |
| "step": 1744 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.4049237983587339e-05, |
| "loss": 1.1816, |
| "step": 1745 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.404572098475967e-05, |
| "loss": 1.2435, |
| "step": 1746 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.4042203985932004e-05, |
| "loss": 1.1767, |
| "step": 1747 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.4038686987104338e-05, |
| "loss": 1.2407, |
| "step": 1748 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.403516998827667e-05, |
| "loss": 1.1905, |
| "step": 1749 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.4031652989449003e-05, |
| "loss": 1.2543, |
| "step": 1750 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.4028135990621337e-05, |
| "loss": 1.2262, |
| "step": 1751 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.402461899179367e-05, |
| "loss": 1.2329, |
| "step": 1752 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.4021101992966002e-05, |
| "loss": 1.2319, |
| "step": 1753 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.4017584994138336e-05, |
| "loss": 1.2059, |
| "step": 1754 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.4014067995310669e-05, |
| "loss": 1.2335, |
| "step": 1755 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.4010550996483001e-05, |
| "loss": 1.2007, |
| "step": 1756 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.4007033997655334e-05, |
| "loss": 1.2309, |
| "step": 1757 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.4003516998827668e-05, |
| "loss": 1.1705, |
| "step": 1758 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.4e-05, |
| "loss": 1.2111, |
| "step": 1759 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.3996483001172333e-05, |
| "loss": 1.2106, |
| "step": 1760 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.3992966002344667e-05, |
| "loss": 1.1813, |
| "step": 1761 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.3989449003516999e-05, |
| "loss": 1.2188, |
| "step": 1762 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.3985932004689332e-05, |
| "loss": 1.2214, |
| "step": 1763 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.3982415005861666e-05, |
| "loss": 1.2332, |
| "step": 1764 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.3978898007033996e-05, |
| "loss": 1.2036, |
| "step": 1765 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.397538100820633e-05, |
| "loss": 1.1711, |
| "step": 1766 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.3971864009378663e-05, |
| "loss": 1.24, |
| "step": 1767 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.3968347010550996e-05, |
| "loss": 1.2587, |
| "step": 1768 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.3964830011723328e-05, |
| "loss": 1.2186, |
| "step": 1769 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.3961313012895662e-05, |
| "loss": 1.2233, |
| "step": 1770 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.3957796014067995e-05, |
| "loss": 1.2248, |
| "step": 1771 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.3954279015240327e-05, |
| "loss": 1.2431, |
| "step": 1772 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.395076201641266e-05, |
| "loss": 1.2578, |
| "step": 1773 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.3947245017584994e-05, |
| "loss": 1.2221, |
| "step": 1774 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.3943728018757326e-05, |
| "loss": 1.2141, |
| "step": 1775 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.394021101992966e-05, |
| "loss": 1.2598, |
| "step": 1776 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.3936694021101993e-05, |
| "loss": 1.2312, |
| "step": 1777 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.3933177022274325e-05, |
| "loss": 1.2151, |
| "step": 1778 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.3929660023446658e-05, |
| "loss": 1.1931, |
| "step": 1779 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.3926143024618992e-05, |
| "loss": 1.2581, |
| "step": 1780 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.3922626025791324e-05, |
| "loss": 1.276, |
| "step": 1781 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.3919109026963657e-05, |
| "loss": 1.2633, |
| "step": 1782 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.391559202813599e-05, |
| "loss": 1.1935, |
| "step": 1783 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.3912075029308323e-05, |
| "loss": 1.2106, |
| "step": 1784 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.3908558030480656e-05, |
| "loss": 1.1757, |
| "step": 1785 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.390504103165299e-05, |
| "loss": 1.1917, |
| "step": 1786 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.3901524032825323e-05, |
| "loss": 1.2271, |
| "step": 1787 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.3898007033997655e-05, |
| "loss": 1.233, |
| "step": 1788 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.3894490035169989e-05, |
| "loss": 1.2188, |
| "step": 1789 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.3890973036342322e-05, |
| "loss": 1.2508, |
| "step": 1790 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.3887456037514654e-05, |
| "loss": 1.2102, |
| "step": 1791 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.3883939038686987e-05, |
| "loss": 1.2317, |
| "step": 1792 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.3880422039859321e-05, |
| "loss": 1.2171, |
| "step": 1793 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.3876905041031653e-05, |
| "loss": 1.1819, |
| "step": 1794 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.3873388042203986e-05, |
| "loss": 1.2283, |
| "step": 1795 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.386987104337632e-05, |
| "loss": 1.2479, |
| "step": 1796 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.386635404454865e-05, |
| "loss": 1.2424, |
| "step": 1797 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.3862837045720985e-05, |
| "loss": 1.2071, |
| "step": 1798 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.3859320046893319e-05, |
| "loss": 1.214, |
| "step": 1799 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.3855803048065649e-05, |
| "loss": 1.1818, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.3852286049237982e-05, |
| "loss": 1.2428, |
| "step": 1801 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.3848769050410316e-05, |
| "loss": 1.2008, |
| "step": 1802 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.3845252051582648e-05, |
| "loss": 1.2311, |
| "step": 1803 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.3841735052754981e-05, |
| "loss": 1.2253, |
| "step": 1804 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.3838218053927315e-05, |
| "loss": 1.2285, |
| "step": 1805 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.3834701055099648e-05, |
| "loss": 1.2213, |
| "step": 1806 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.383118405627198e-05, |
| "loss": 1.1902, |
| "step": 1807 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.3827667057444314e-05, |
| "loss": 1.2142, |
| "step": 1808 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.3824150058616647e-05, |
| "loss": 1.2618, |
| "step": 1809 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.3820633059788979e-05, |
| "loss": 1.2074, |
| "step": 1810 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.3817116060961313e-05, |
| "loss": 1.2397, |
| "step": 1811 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.3813599062133646e-05, |
| "loss": 1.1755, |
| "step": 1812 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.3810082063305978e-05, |
| "loss": 1.2054, |
| "step": 1813 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.3806565064478311e-05, |
| "loss": 1.2412, |
| "step": 1814 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.3803048065650645e-05, |
| "loss": 1.2179, |
| "step": 1815 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.3799531066822977e-05, |
| "loss": 1.2361, |
| "step": 1816 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.379601406799531e-05, |
| "loss": 1.2581, |
| "step": 1817 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.3792497069167644e-05, |
| "loss": 1.2315, |
| "step": 1818 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.3788980070339976e-05, |
| "loss": 1.2574, |
| "step": 1819 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.378546307151231e-05, |
| "loss": 1.1946, |
| "step": 1820 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.3781946072684643e-05, |
| "loss": 1.2552, |
| "step": 1821 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.3778429073856976e-05, |
| "loss": 1.191, |
| "step": 1822 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.3774912075029308e-05, |
| "loss": 1.1998, |
| "step": 1823 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.3771395076201642e-05, |
| "loss": 1.2514, |
| "step": 1824 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.3767878077373975e-05, |
| "loss": 1.2301, |
| "step": 1825 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.3764361078546307e-05, |
| "loss": 1.2621, |
| "step": 1826 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.376084407971864e-05, |
| "loss": 1.1842, |
| "step": 1827 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.3757327080890974e-05, |
| "loss": 1.2081, |
| "step": 1828 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.3753810082063306e-05, |
| "loss": 1.2372, |
| "step": 1829 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.375029308323564e-05, |
| "loss": 1.2102, |
| "step": 1830 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.3746776084407973e-05, |
| "loss": 1.169, |
| "step": 1831 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.3743259085580303e-05, |
| "loss": 1.2192, |
| "step": 1832 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.3739742086752638e-05, |
| "loss": 1.1861, |
| "step": 1833 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.3736225087924972e-05, |
| "loss": 1.1673, |
| "step": 1834 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.3732708089097302e-05, |
| "loss": 1.2335, |
| "step": 1835 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.3729191090269635e-05, |
| "loss": 1.2061, |
| "step": 1836 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.3725674091441969e-05, |
| "loss": 1.23, |
| "step": 1837 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.37221570926143e-05, |
| "loss": 1.1918, |
| "step": 1838 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.3718640093786634e-05, |
| "loss": 1.1594, |
| "step": 1839 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.3715123094958968e-05, |
| "loss": 1.1697, |
| "step": 1840 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.3711606096131301e-05, |
| "loss": 1.2151, |
| "step": 1841 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.3708089097303633e-05, |
| "loss": 1.2151, |
| "step": 1842 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.3704572098475967e-05, |
| "loss": 1.2176, |
| "step": 1843 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.37010550996483e-05, |
| "loss": 1.224, |
| "step": 1844 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.3697538100820632e-05, |
| "loss": 1.25, |
| "step": 1845 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.3694021101992966e-05, |
| "loss": 1.2788, |
| "step": 1846 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.3690504103165299e-05, |
| "loss": 1.2098, |
| "step": 1847 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.3686987104337631e-05, |
| "loss": 1.2382, |
| "step": 1848 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.3683470105509964e-05, |
| "loss": 1.2265, |
| "step": 1849 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.3679953106682298e-05, |
| "loss": 1.1955, |
| "step": 1850 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.367643610785463e-05, |
| "loss": 1.2722, |
| "step": 1851 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.3672919109026963e-05, |
| "loss": 1.215, |
| "step": 1852 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.3669402110199297e-05, |
| "loss": 1.1967, |
| "step": 1853 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.3665885111371629e-05, |
| "loss": 1.2093, |
| "step": 1854 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.3662368112543962e-05, |
| "loss": 1.2117, |
| "step": 1855 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.3658851113716296e-05, |
| "loss": 1.1771, |
| "step": 1856 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.365533411488863e-05, |
| "loss": 1.2698, |
| "step": 1857 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.3651817116060961e-05, |
| "loss": 1.2357, |
| "step": 1858 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.3648300117233295e-05, |
| "loss": 1.216, |
| "step": 1859 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.3644783118405628e-05, |
| "loss": 1.2684, |
| "step": 1860 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.364126611957796e-05, |
| "loss": 1.2508, |
| "step": 1861 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.3637749120750294e-05, |
| "loss": 1.305, |
| "step": 1862 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.3634232121922627e-05, |
| "loss": 1.209, |
| "step": 1863 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.3630715123094959e-05, |
| "loss": 1.2133, |
| "step": 1864 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.3627198124267292e-05, |
| "loss": 1.2818, |
| "step": 1865 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.3623681125439626e-05, |
| "loss": 1.1934, |
| "step": 1866 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.3620164126611956e-05, |
| "loss": 1.2161, |
| "step": 1867 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.3616647127784291e-05, |
| "loss": 1.167, |
| "step": 1868 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.3613130128956625e-05, |
| "loss": 1.1878, |
| "step": 1869 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.3609613130128955e-05, |
| "loss": 1.1702, |
| "step": 1870 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.3606096131301288e-05, |
| "loss": 1.2245, |
| "step": 1871 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.3602579132473622e-05, |
| "loss": 1.2401, |
| "step": 1872 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.3599062133645954e-05, |
| "loss": 1.2622, |
| "step": 1873 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.3595545134818287e-05, |
| "loss": 1.1907, |
| "step": 1874 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.3592028135990621e-05, |
| "loss": 1.2414, |
| "step": 1875 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.3588511137162954e-05, |
| "loss": 1.2365, |
| "step": 1876 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.3584994138335286e-05, |
| "loss": 1.2104, |
| "step": 1877 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.358147713950762e-05, |
| "loss": 1.2096, |
| "step": 1878 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.3577960140679953e-05, |
| "loss": 1.2114, |
| "step": 1879 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.3574443141852285e-05, |
| "loss": 1.1521, |
| "step": 1880 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.3570926143024619e-05, |
| "loss": 1.2185, |
| "step": 1881 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.3567409144196952e-05, |
| "loss": 1.2249, |
| "step": 1882 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.3563892145369284e-05, |
| "loss": 1.2331, |
| "step": 1883 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.3560375146541617e-05, |
| "loss": 1.216, |
| "step": 1884 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.3556858147713951e-05, |
| "loss": 1.2455, |
| "step": 1885 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.3553341148886283e-05, |
| "loss": 1.2237, |
| "step": 1886 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.3549824150058616e-05, |
| "loss": 1.2425, |
| "step": 1887 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.354630715123095e-05, |
| "loss": 1.1681, |
| "step": 1888 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.3542790152403282e-05, |
| "loss": 1.1844, |
| "step": 1889 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.3539273153575615e-05, |
| "loss": 1.2427, |
| "step": 1890 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.3535756154747949e-05, |
| "loss": 1.2855, |
| "step": 1891 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.353223915592028e-05, |
| "loss": 1.2269, |
| "step": 1892 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.3528722157092614e-05, |
| "loss": 1.2174, |
| "step": 1893 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.3525205158264948e-05, |
| "loss": 1.1992, |
| "step": 1894 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.3521688159437281e-05, |
| "loss": 1.1968, |
| "step": 1895 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.3518171160609613e-05, |
| "loss": 1.2337, |
| "step": 1896 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.3514654161781947e-05, |
| "loss": 1.2451, |
| "step": 1897 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.351113716295428e-05, |
| "loss": 1.2417, |
| "step": 1898 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.3507620164126612e-05, |
| "loss": 1.2036, |
| "step": 1899 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.3504103165298945e-05, |
| "loss": 1.2011, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.3500586166471279e-05, |
| "loss": 1.1984, |
| "step": 1901 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.3497069167643609e-05, |
| "loss": 1.2061, |
| "step": 1902 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.3493552168815943e-05, |
| "loss": 1.2303, |
| "step": 1903 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.3490035169988278e-05, |
| "loss": 1.179, |
| "step": 1904 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.3486518171160608e-05, |
| "loss": 1.2063, |
| "step": 1905 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.3483001172332941e-05, |
| "loss": 1.2309, |
| "step": 1906 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.3479484173505275e-05, |
| "loss": 1.2293, |
| "step": 1907 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.3475967174677607e-05, |
| "loss": 1.2014, |
| "step": 1908 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.347245017584994e-05, |
| "loss": 1.2527, |
| "step": 1909 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.3468933177022274e-05, |
| "loss": 1.2166, |
| "step": 1910 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.3465416178194607e-05, |
| "loss": 1.1899, |
| "step": 1911 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.346189917936694e-05, |
| "loss": 1.203, |
| "step": 1912 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.3458382180539273e-05, |
| "loss": 1.1705, |
| "step": 1913 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.3454865181711606e-05, |
| "loss": 1.2173, |
| "step": 1914 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.3451348182883938e-05, |
| "loss": 1.1736, |
| "step": 1915 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.3447831184056272e-05, |
| "loss": 1.1637, |
| "step": 1916 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.3444314185228605e-05, |
| "loss": 1.2593, |
| "step": 1917 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.3440797186400937e-05, |
| "loss": 1.2267, |
| "step": 1918 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.343728018757327e-05, |
| "loss": 1.1689, |
| "step": 1919 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.3433763188745604e-05, |
| "loss": 1.204, |
| "step": 1920 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.3430246189917936e-05, |
| "loss": 1.187, |
| "step": 1921 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.342672919109027e-05, |
| "loss": 1.2174, |
| "step": 1922 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.3423212192262603e-05, |
| "loss": 1.2043, |
| "step": 1923 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.3419695193434935e-05, |
| "loss": 1.2476, |
| "step": 1924 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.3416178194607268e-05, |
| "loss": 1.2235, |
| "step": 1925 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.3412661195779602e-05, |
| "loss": 1.2443, |
| "step": 1926 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.3409144196951934e-05, |
| "loss": 1.1968, |
| "step": 1927 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.3405627198124267e-05, |
| "loss": 1.2082, |
| "step": 1928 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.34021101992966e-05, |
| "loss": 1.2211, |
| "step": 1929 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.3398593200468934e-05, |
| "loss": 1.2031, |
| "step": 1930 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.3395076201641266e-05, |
| "loss": 1.2112, |
| "step": 1931 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.33915592028136e-05, |
| "loss": 1.1744, |
| "step": 1932 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.3388042203985933e-05, |
| "loss": 1.1835, |
| "step": 1933 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.3384525205158265e-05, |
| "loss": 1.2161, |
| "step": 1934 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.3381008206330598e-05, |
| "loss": 1.2502, |
| "step": 1935 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.3377491207502932e-05, |
| "loss": 1.2322, |
| "step": 1936 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.3373974208675262e-05, |
| "loss": 1.265, |
| "step": 1937 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.3370457209847596e-05, |
| "loss": 1.1892, |
| "step": 1938 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.336694021101993e-05, |
| "loss": 1.2489, |
| "step": 1939 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.3363423212192261e-05, |
| "loss": 1.2299, |
| "step": 1940 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.3359906213364594e-05, |
| "loss": 1.1895, |
| "step": 1941 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.3356389214536928e-05, |
| "loss": 1.1915, |
| "step": 1942 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.335287221570926e-05, |
| "loss": 1.2282, |
| "step": 1943 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.3349355216881593e-05, |
| "loss": 1.214, |
| "step": 1944 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.3345838218053927e-05, |
| "loss": 1.1991, |
| "step": 1945 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.334232121922626e-05, |
| "loss": 1.2218, |
| "step": 1946 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.3338804220398592e-05, |
| "loss": 1.2125, |
| "step": 1947 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.3335287221570926e-05, |
| "loss": 1.1889, |
| "step": 1948 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.333177022274326e-05, |
| "loss": 1.2195, |
| "step": 1949 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.3328253223915591e-05, |
| "loss": 1.1985, |
| "step": 1950 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.3324736225087925e-05, |
| "loss": 1.1861, |
| "step": 1951 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.3321219226260258e-05, |
| "loss": 1.2135, |
| "step": 1952 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.331770222743259e-05, |
| "loss": 1.2549, |
| "step": 1953 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.3314185228604924e-05, |
| "loss": 1.1822, |
| "step": 1954 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.3310668229777257e-05, |
| "loss": 1.2019, |
| "step": 1955 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.3307151230949589e-05, |
| "loss": 1.202, |
| "step": 1956 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.3303634232121922e-05, |
| "loss": 1.224, |
| "step": 1957 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.3300117233294256e-05, |
| "loss": 1.1709, |
| "step": 1958 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.3296600234466588e-05, |
| "loss": 1.1869, |
| "step": 1959 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.3293083235638921e-05, |
| "loss": 1.2224, |
| "step": 1960 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.3289566236811255e-05, |
| "loss": 1.2009, |
| "step": 1961 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.3286049237983587e-05, |
| "loss": 1.1752, |
| "step": 1962 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.328253223915592e-05, |
| "loss": 1.1565, |
| "step": 1963 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.3279015240328254e-05, |
| "loss": 1.2126, |
| "step": 1964 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.3275498241500587e-05, |
| "loss": 1.2623, |
| "step": 1965 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.3271981242672919e-05, |
| "loss": 1.2661, |
| "step": 1966 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.3268464243845253e-05, |
| "loss": 1.1885, |
| "step": 1967 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.3264947245017586e-05, |
| "loss": 1.2019, |
| "step": 1968 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.3261430246189918e-05, |
| "loss": 1.1774, |
| "step": 1969 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.3257913247362251e-05, |
| "loss": 1.2006, |
| "step": 1970 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.3254396248534585e-05, |
| "loss": 1.1864, |
| "step": 1971 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.3250879249706915e-05, |
| "loss": 1.1649, |
| "step": 1972 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.3247362250879249e-05, |
| "loss": 1.1896, |
| "step": 1973 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.3243845252051584e-05, |
| "loss": 1.2718, |
| "step": 1974 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.3240328253223914e-05, |
| "loss": 1.2216, |
| "step": 1975 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.3236811254396248e-05, |
| "loss": 1.2651, |
| "step": 1976 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.3233294255568581e-05, |
| "loss": 1.231, |
| "step": 1977 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.3229777256740913e-05, |
| "loss": 1.236, |
| "step": 1978 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.3226260257913246e-05, |
| "loss": 1.2362, |
| "step": 1979 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.322274325908558e-05, |
| "loss": 1.2319, |
| "step": 1980 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.3219226260257912e-05, |
| "loss": 1.2182, |
| "step": 1981 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.3215709261430245e-05, |
| "loss": 1.1932, |
| "step": 1982 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.3212192262602579e-05, |
| "loss": 1.1597, |
| "step": 1983 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.3208675263774912e-05, |
| "loss": 1.2216, |
| "step": 1984 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.3205158264947244e-05, |
| "loss": 1.2597, |
| "step": 1985 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.3201641266119578e-05, |
| "loss": 1.2491, |
| "step": 1986 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.3198124267291911e-05, |
| "loss": 1.177, |
| "step": 1987 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.3194607268464243e-05, |
| "loss": 1.1986, |
| "step": 1988 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.3191090269636577e-05, |
| "loss": 1.2567, |
| "step": 1989 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.318757327080891e-05, |
| "loss": 1.2589, |
| "step": 1990 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.3184056271981242e-05, |
| "loss": 1.1817, |
| "step": 1991 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.3180539273153575e-05, |
| "loss": 1.2126, |
| "step": 1992 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.3177022274325909e-05, |
| "loss": 1.2656, |
| "step": 1993 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.317350527549824e-05, |
| "loss": 1.1892, |
| "step": 1994 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.3169988276670574e-05, |
| "loss": 1.2405, |
| "step": 1995 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.3166471277842908e-05, |
| "loss": 1.2038, |
| "step": 1996 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.316295427901524e-05, |
| "loss": 1.2192, |
| "step": 1997 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.3159437280187573e-05, |
| "loss": 1.2051, |
| "step": 1998 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.3155920281359907e-05, |
| "loss": 1.255, |
| "step": 1999 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.315240328253224e-05, |
| "loss": 1.2248, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.3148886283704572e-05, |
| "loss": 1.209, |
| "step": 2001 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.3145369284876906e-05, |
| "loss": 1.2337, |
| "step": 2002 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.3141852286049239e-05, |
| "loss": 1.1905, |
| "step": 2003 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.3138335287221571e-05, |
| "loss": 1.1893, |
| "step": 2004 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.3134818288393904e-05, |
| "loss": 1.2038, |
| "step": 2005 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.3131301289566238e-05, |
| "loss": 1.2288, |
| "step": 2006 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.3127784290738568e-05, |
| "loss": 1.2317, |
| "step": 2007 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.3124267291910902e-05, |
| "loss": 1.2391, |
| "step": 2008 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.3120750293083235e-05, |
| "loss": 1.1933, |
| "step": 2009 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.3117233294255567e-05, |
| "loss": 1.1992, |
| "step": 2010 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.31137162954279e-05, |
| "loss": 1.2594, |
| "step": 2011 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.3110199296600234e-05, |
| "loss": 1.2282, |
| "step": 2012 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.3106682297772566e-05, |
| "loss": 1.1743, |
| "step": 2013 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.31031652989449e-05, |
| "loss": 1.1926, |
| "step": 2014 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.3099648300117233e-05, |
| "loss": 1.2164, |
| "step": 2015 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.3096131301289565e-05, |
| "loss": 1.253, |
| "step": 2016 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.3092614302461898e-05, |
| "loss": 1.2754, |
| "step": 2017 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.3089097303634232e-05, |
| "loss": 1.2148, |
| "step": 2018 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.3085580304806565e-05, |
| "loss": 1.1891, |
| "step": 2019 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.3082063305978897e-05, |
| "loss": 1.2122, |
| "step": 2020 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.307854630715123e-05, |
| "loss": 1.2298, |
| "step": 2021 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.3075029308323564e-05, |
| "loss": 1.2055, |
| "step": 2022 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.3071512309495896e-05, |
| "loss": 1.2023, |
| "step": 2023 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.306799531066823e-05, |
| "loss": 1.185, |
| "step": 2024 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.3064478311840563e-05, |
| "loss": 1.2418, |
| "step": 2025 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.3060961313012895e-05, |
| "loss": 1.2047, |
| "step": 2026 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.3057444314185228e-05, |
| "loss": 1.2001, |
| "step": 2027 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.3053927315357562e-05, |
| "loss": 1.2232, |
| "step": 2028 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.3050410316529894e-05, |
| "loss": 1.2191, |
| "step": 2029 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.3046893317702227e-05, |
| "loss": 1.2237, |
| "step": 2030 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.3043376318874561e-05, |
| "loss": 1.212, |
| "step": 2031 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.3039859320046893e-05, |
| "loss": 1.2472, |
| "step": 2032 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.3036342321219226e-05, |
| "loss": 1.2226, |
| "step": 2033 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.303282532239156e-05, |
| "loss": 1.252, |
| "step": 2034 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.3029308323563893e-05, |
| "loss": 1.2119, |
| "step": 2035 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.3025791324736225e-05, |
| "loss": 1.2242, |
| "step": 2036 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.3022274325908559e-05, |
| "loss": 1.1766, |
| "step": 2037 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.3018757327080892e-05, |
| "loss": 1.1575, |
| "step": 2038 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.3015240328253224e-05, |
| "loss": 1.2064, |
| "step": 2039 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.3011723329425558e-05, |
| "loss": 1.1466, |
| "step": 2040 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.3008206330597891e-05, |
| "loss": 1.2172, |
| "step": 2041 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.3004689331770221e-05, |
| "loss": 1.1389, |
| "step": 2042 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.3001172332942555e-05, |
| "loss": 1.2635, |
| "step": 2043 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.2997655334114888e-05, |
| "loss": 1.2286, |
| "step": 2044 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.299413833528722e-05, |
| "loss": 1.2112, |
| "step": 2045 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.2990621336459554e-05, |
| "loss": 1.1804, |
| "step": 2046 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.2987104337631887e-05, |
| "loss": 1.2334, |
| "step": 2047 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.2983587338804219e-05, |
| "loss": 1.2006, |
| "step": 2048 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.2980070339976552e-05, |
| "loss": 1.1265, |
| "step": 2049 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.2976553341148886e-05, |
| "loss": 1.2109, |
| "step": 2050 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.2973036342321218e-05, |
| "loss": 1.1794, |
| "step": 2051 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.2969519343493551e-05, |
| "loss": 1.2103, |
| "step": 2052 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.2966002344665885e-05, |
| "loss": 1.243, |
| "step": 2053 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.2962485345838218e-05, |
| "loss": 1.2368, |
| "step": 2054 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.295896834701055e-05, |
| "loss": 1.24, |
| "step": 2055 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.2955451348182884e-05, |
| "loss": 1.2259, |
| "step": 2056 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.2951934349355217e-05, |
| "loss": 1.2465, |
| "step": 2057 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.2948417350527549e-05, |
| "loss": 1.2475, |
| "step": 2058 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.2944900351699883e-05, |
| "loss": 1.1879, |
| "step": 2059 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.2941383352872216e-05, |
| "loss": 1.2556, |
| "step": 2060 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.2937866354044548e-05, |
| "loss": 1.2405, |
| "step": 2061 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.2934349355216881e-05, |
| "loss": 1.1979, |
| "step": 2062 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.2930832356389215e-05, |
| "loss": 1.2097, |
| "step": 2063 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.2927315357561547e-05, |
| "loss": 1.2662, |
| "step": 2064 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.292379835873388e-05, |
| "loss": 1.1876, |
| "step": 2065 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.2920281359906214e-05, |
| "loss": 1.2376, |
| "step": 2066 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.2916764361078546e-05, |
| "loss": 1.1897, |
| "step": 2067 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.291324736225088e-05, |
| "loss": 1.1989, |
| "step": 2068 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.2909730363423213e-05, |
| "loss": 1.1837, |
| "step": 2069 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.2906213364595546e-05, |
| "loss": 1.2327, |
| "step": 2070 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.2902696365767878e-05, |
| "loss": 1.1924, |
| "step": 2071 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.2899179366940212e-05, |
| "loss": 1.239, |
| "step": 2072 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.2895662368112545e-05, |
| "loss": 1.187, |
| "step": 2073 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.2892145369284877e-05, |
| "loss": 1.2766, |
| "step": 2074 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.288862837045721e-05, |
| "loss": 1.1857, |
| "step": 2075 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.2885111371629544e-05, |
| "loss": 1.2284, |
| "step": 2076 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.2881594372801874e-05, |
| "loss": 1.2357, |
| "step": 2077 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.2878077373974208e-05, |
| "loss": 1.2294, |
| "step": 2078 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.2874560375146541e-05, |
| "loss": 1.2291, |
| "step": 2079 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.2871043376318873e-05, |
| "loss": 1.2088, |
| "step": 2080 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.2867526377491207e-05, |
| "loss": 1.1814, |
| "step": 2081 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.286400937866354e-05, |
| "loss": 1.193, |
| "step": 2082 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.2860492379835872e-05, |
| "loss": 1.2052, |
| "step": 2083 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.2856975381008205e-05, |
| "loss": 1.234, |
| "step": 2084 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.2853458382180539e-05, |
| "loss": 1.2265, |
| "step": 2085 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.284994138335287e-05, |
| "loss": 1.2027, |
| "step": 2086 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.2846424384525204e-05, |
| "loss": 1.2111, |
| "step": 2087 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.2842907385697538e-05, |
| "loss": 1.2068, |
| "step": 2088 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.2839390386869871e-05, |
| "loss": 1.2512, |
| "step": 2089 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.2835873388042203e-05, |
| "loss": 1.2176, |
| "step": 2090 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.2832356389214537e-05, |
| "loss": 1.1487, |
| "step": 2091 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.282883939038687e-05, |
| "loss": 1.2284, |
| "step": 2092 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.2825322391559202e-05, |
| "loss": 1.259, |
| "step": 2093 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.2821805392731536e-05, |
| "loss": 1.1901, |
| "step": 2094 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.281828839390387e-05, |
| "loss": 1.1748, |
| "step": 2095 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.2814771395076201e-05, |
| "loss": 1.1885, |
| "step": 2096 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.2811254396248535e-05, |
| "loss": 1.22, |
| "step": 2097 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.2807737397420868e-05, |
| "loss": 1.1821, |
| "step": 2098 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.28042203985932e-05, |
| "loss": 1.2135, |
| "step": 2099 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.2800703399765533e-05, |
| "loss": 1.1703, |
| "step": 2100 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.2797186400937867e-05, |
| "loss": 1.1776, |
| "step": 2101 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.2793669402110199e-05, |
| "loss": 1.2032, |
| "step": 2102 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.2790152403282532e-05, |
| "loss": 1.2185, |
| "step": 2103 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.2786635404454866e-05, |
| "loss": 1.2152, |
| "step": 2104 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.2783118405627198e-05, |
| "loss": 1.2413, |
| "step": 2105 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.2779601406799531e-05, |
| "loss": 1.2181, |
| "step": 2106 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.2776084407971865e-05, |
| "loss": 1.1901, |
| "step": 2107 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.2772567409144198e-05, |
| "loss": 1.2358, |
| "step": 2108 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.276905041031653e-05, |
| "loss": 1.224, |
| "step": 2109 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.2765533411488864e-05, |
| "loss": 1.2041, |
| "step": 2110 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.2762016412661197e-05, |
| "loss": 1.1962, |
| "step": 2111 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.2758499413833527e-05, |
| "loss": 1.2136, |
| "step": 2112 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.275498241500586e-05, |
| "loss": 1.235, |
| "step": 2113 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.2751465416178194e-05, |
| "loss": 1.2344, |
| "step": 2114 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.2747948417350526e-05, |
| "loss": 1.2255, |
| "step": 2115 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.274443141852286e-05, |
| "loss": 1.2763, |
| "step": 2116 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.2740914419695193e-05, |
| "loss": 1.1784, |
| "step": 2117 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.2737397420867525e-05, |
| "loss": 1.2433, |
| "step": 2118 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.2733880422039858e-05, |
| "loss": 1.2533, |
| "step": 2119 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.2730363423212192e-05, |
| "loss": 1.2452, |
| "step": 2120 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.2726846424384524e-05, |
| "loss": 1.1999, |
| "step": 2121 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.2723329425556857e-05, |
| "loss": 1.2263, |
| "step": 2122 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.2719812426729191e-05, |
| "loss": 1.1704, |
| "step": 2123 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.2716295427901524e-05, |
| "loss": 1.1729, |
| "step": 2124 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.2712778429073856e-05, |
| "loss": 1.2284, |
| "step": 2125 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.270926143024619e-05, |
| "loss": 1.2183, |
| "step": 2126 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.2705744431418523e-05, |
| "loss": 1.2646, |
| "step": 2127 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.2702227432590855e-05, |
| "loss": 1.2217, |
| "step": 2128 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.2698710433763189e-05, |
| "loss": 1.2428, |
| "step": 2129 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.2695193434935522e-05, |
| "loss": 1.1794, |
| "step": 2130 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.2691676436107854e-05, |
| "loss": 1.118, |
| "step": 2131 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.2688159437280188e-05, |
| "loss": 1.2201, |
| "step": 2132 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.2684642438452521e-05, |
| "loss": 1.2217, |
| "step": 2133 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.2681125439624853e-05, |
| "loss": 1.2575, |
| "step": 2134 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.2677608440797186e-05, |
| "loss": 1.2429, |
| "step": 2135 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.267409144196952e-05, |
| "loss": 1.16, |
| "step": 2136 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.2670574443141852e-05, |
| "loss": 1.2594, |
| "step": 2137 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.2667057444314185e-05, |
| "loss": 1.215, |
| "step": 2138 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.2663540445486519e-05, |
| "loss": 1.1889, |
| "step": 2139 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.266002344665885e-05, |
| "loss": 1.1797, |
| "step": 2140 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.2656506447831184e-05, |
| "loss": 1.246, |
| "step": 2141 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.2652989449003518e-05, |
| "loss": 1.2205, |
| "step": 2142 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.2649472450175851e-05, |
| "loss": 1.2247, |
| "step": 2143 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.2645955451348181e-05, |
| "loss": 1.2386, |
| "step": 2144 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.2642438452520517e-05, |
| "loss": 1.1771, |
| "step": 2145 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.263892145369285e-05, |
| "loss": 1.2271, |
| "step": 2146 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.263540445486518e-05, |
| "loss": 1.2118, |
| "step": 2147 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.2631887456037514e-05, |
| "loss": 1.2132, |
| "step": 2148 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.2628370457209847e-05, |
| "loss": 1.1952, |
| "step": 2149 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.2624853458382179e-05, |
| "loss": 1.2036, |
| "step": 2150 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.2621336459554513e-05, |
| "loss": 1.1786, |
| "step": 2151 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.2617819460726846e-05, |
| "loss": 1.2196, |
| "step": 2152 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.2614302461899178e-05, |
| "loss": 1.2213, |
| "step": 2153 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.2610785463071512e-05, |
| "loss": 1.1862, |
| "step": 2154 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.2607268464243845e-05, |
| "loss": 1.2372, |
| "step": 2155 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.2603751465416177e-05, |
| "loss": 1.1867, |
| "step": 2156 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.260023446658851e-05, |
| "loss": 1.2192, |
| "step": 2157 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.2596717467760844e-05, |
| "loss": 1.2797, |
| "step": 2158 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.2593200468933177e-05, |
| "loss": 1.1766, |
| "step": 2159 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.258968347010551e-05, |
| "loss": 1.1666, |
| "step": 2160 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.2586166471277843e-05, |
| "loss": 1.2031, |
| "step": 2161 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.2582649472450176e-05, |
| "loss": 1.1801, |
| "step": 2162 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.2579132473622508e-05, |
| "loss": 1.2002, |
| "step": 2163 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.2575615474794842e-05, |
| "loss": 1.2804, |
| "step": 2164 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.2572098475967175e-05, |
| "loss": 1.1876, |
| "step": 2165 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.2568581477139507e-05, |
| "loss": 1.1699, |
| "step": 2166 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.256506447831184e-05, |
| "loss": 1.2579, |
| "step": 2167 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.2561547479484174e-05, |
| "loss": 1.1625, |
| "step": 2168 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.2558030480656506e-05, |
| "loss": 1.2263, |
| "step": 2169 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.255451348182884e-05, |
| "loss": 1.2462, |
| "step": 2170 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.2550996483001173e-05, |
| "loss": 1.2065, |
| "step": 2171 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.2547479484173505e-05, |
| "loss": 1.2393, |
| "step": 2172 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.2543962485345838e-05, |
| "loss": 1.2457, |
| "step": 2173 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.2540445486518172e-05, |
| "loss": 1.2074, |
| "step": 2174 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.2536928487690504e-05, |
| "loss": 1.1832, |
| "step": 2175 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.2533411488862837e-05, |
| "loss": 1.219, |
| "step": 2176 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.252989449003517e-05, |
| "loss": 1.2635, |
| "step": 2177 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.2526377491207504e-05, |
| "loss": 1.2363, |
| "step": 2178 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.2522860492379834e-05, |
| "loss": 1.2441, |
| "step": 2179 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.251934349355217e-05, |
| "loss": 1.1859, |
| "step": 2180 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.2515826494724503e-05, |
| "loss": 1.1949, |
| "step": 2181 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.2512309495896833e-05, |
| "loss": 1.2125, |
| "step": 2182 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.2508792497069167e-05, |
| "loss": 1.1783, |
| "step": 2183 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.25052754982415e-05, |
| "loss": 1.2147, |
| "step": 2184 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.2501758499413832e-05, |
| "loss": 1.2228, |
| "step": 2185 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.2498241500586166e-05, |
| "loss": 1.2071, |
| "step": 2186 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.24947245017585e-05, |
| "loss": 1.2096, |
| "step": 2187 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.2491207502930831e-05, |
| "loss": 1.1859, |
| "step": 2188 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.2487690504103165e-05, |
| "loss": 1.2074, |
| "step": 2189 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.2484173505275498e-05, |
| "loss": 1.1805, |
| "step": 2190 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.248065650644783e-05, |
| "loss": 1.2424, |
| "step": 2191 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.2477139507620163e-05, |
| "loss": 1.2268, |
| "step": 2192 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.2473622508792497e-05, |
| "loss": 1.2341, |
| "step": 2193 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.2470105509964829e-05, |
| "loss": 1.2137, |
| "step": 2194 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.2466588511137162e-05, |
| "loss": 1.1779, |
| "step": 2195 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.2463071512309496e-05, |
| "loss": 1.1971, |
| "step": 2196 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.245955451348183e-05, |
| "loss": 1.1975, |
| "step": 2197 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.2456037514654161e-05, |
| "loss": 1.244, |
| "step": 2198 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.2452520515826495e-05, |
| "loss": 1.2217, |
| "step": 2199 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.2449003516998828e-05, |
| "loss": 1.1651, |
| "step": 2200 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.244548651817116e-05, |
| "loss": 1.2197, |
| "step": 2201 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.2441969519343494e-05, |
| "loss": 1.2671, |
| "step": 2202 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.2438452520515827e-05, |
| "loss": 1.2739, |
| "step": 2203 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.2434935521688159e-05, |
| "loss": 1.2585, |
| "step": 2204 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.2431418522860492e-05, |
| "loss": 1.2657, |
| "step": 2205 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.2427901524032826e-05, |
| "loss": 1.2268, |
| "step": 2206 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.2424384525205158e-05, |
| "loss": 1.2002, |
| "step": 2207 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.2420867526377491e-05, |
| "loss": 1.2038, |
| "step": 2208 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.2417350527549825e-05, |
| "loss": 1.2291, |
| "step": 2209 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.2413833528722157e-05, |
| "loss": 1.2397, |
| "step": 2210 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.241031652989449e-05, |
| "loss": 1.2357, |
| "step": 2211 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.2406799531066824e-05, |
| "loss": 1.215, |
| "step": 2212 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.2403282532239157e-05, |
| "loss": 1.2083, |
| "step": 2213 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.2399765533411487e-05, |
| "loss": 1.2343, |
| "step": 2214 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.2396248534583823e-05, |
| "loss": 1.2235, |
| "step": 2215 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.2392731535756156e-05, |
| "loss": 1.215, |
| "step": 2216 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.2389214536928486e-05, |
| "loss": 1.1713, |
| "step": 2217 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.238569753810082e-05, |
| "loss": 1.21, |
| "step": 2218 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.2382180539273153e-05, |
| "loss": 1.2131, |
| "step": 2219 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.2378663540445485e-05, |
| "loss": 1.2508, |
| "step": 2220 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.2375146541617819e-05, |
| "loss": 1.2008, |
| "step": 2221 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.2371629542790152e-05, |
| "loss": 1.2274, |
| "step": 2222 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.2368112543962484e-05, |
| "loss": 1.2536, |
| "step": 2223 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.2364595545134818e-05, |
| "loss": 1.2324, |
| "step": 2224 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.2361078546307151e-05, |
| "loss": 1.2509, |
| "step": 2225 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.2357561547479483e-05, |
| "loss": 1.2459, |
| "step": 2226 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.2354044548651816e-05, |
| "loss": 1.179, |
| "step": 2227 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.235052754982415e-05, |
| "loss": 1.2232, |
| "step": 2228 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.2347010550996482e-05, |
| "loss": 1.1759, |
| "step": 2229 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.2343493552168815e-05, |
| "loss": 1.1988, |
| "step": 2230 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.2339976553341149e-05, |
| "loss": 1.2548, |
| "step": 2231 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.2336459554513482e-05, |
| "loss": 1.2339, |
| "step": 2232 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.2332942555685814e-05, |
| "loss": 1.2594, |
| "step": 2233 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.2329425556858148e-05, |
| "loss": 1.2062, |
| "step": 2234 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.2325908558030481e-05, |
| "loss": 1.2161, |
| "step": 2235 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.2322391559202813e-05, |
| "loss": 1.1799, |
| "step": 2236 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.2318874560375147e-05, |
| "loss": 1.2545, |
| "step": 2237 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.231535756154748e-05, |
| "loss": 1.1718, |
| "step": 2238 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.2311840562719812e-05, |
| "loss": 1.2226, |
| "step": 2239 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.2308323563892145e-05, |
| "loss": 1.1798, |
| "step": 2240 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.2304806565064479e-05, |
| "loss": 1.2325, |
| "step": 2241 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.230128956623681e-05, |
| "loss": 1.2065, |
| "step": 2242 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.2297772567409144e-05, |
| "loss": 1.2253, |
| "step": 2243 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.2294255568581478e-05, |
| "loss": 1.2016, |
| "step": 2244 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.229073856975381e-05, |
| "loss": 1.1764, |
| "step": 2245 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.2287221570926143e-05, |
| "loss": 1.1903, |
| "step": 2246 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.2283704572098477e-05, |
| "loss": 1.2284, |
| "step": 2247 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.228018757327081e-05, |
| "loss": 1.2186, |
| "step": 2248 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.227667057444314e-05, |
| "loss": 1.2147, |
| "step": 2249 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.2273153575615474e-05, |
| "loss": 1.2278, |
| "step": 2250 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.226963657678781e-05, |
| "loss": 1.2218, |
| "step": 2251 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.226611957796014e-05, |
| "loss": 1.2258, |
| "step": 2252 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.2262602579132473e-05, |
| "loss": 1.2139, |
| "step": 2253 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.2259085580304806e-05, |
| "loss": 1.2149, |
| "step": 2254 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.2255568581477138e-05, |
| "loss": 1.2178, |
| "step": 2255 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.2252051582649472e-05, |
| "loss": 1.1479, |
| "step": 2256 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.2248534583821805e-05, |
| "loss": 1.2278, |
| "step": 2257 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.2245017584994137e-05, |
| "loss": 1.2184, |
| "step": 2258 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.224150058616647e-05, |
| "loss": 1.1996, |
| "step": 2259 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.2237983587338804e-05, |
| "loss": 1.1327, |
| "step": 2260 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.2234466588511136e-05, |
| "loss": 1.1867, |
| "step": 2261 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.223094958968347e-05, |
| "loss": 1.2265, |
| "step": 2262 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.2227432590855803e-05, |
| "loss": 1.229, |
| "step": 2263 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.2223915592028135e-05, |
| "loss": 1.188, |
| "step": 2264 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.2220398593200468e-05, |
| "loss": 1.1784, |
| "step": 2265 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.2216881594372802e-05, |
| "loss": 1.2367, |
| "step": 2266 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.2213364595545135e-05, |
| "loss": 1.2636, |
| "step": 2267 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.2209847596717467e-05, |
| "loss": 1.1718, |
| "step": 2268 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.22063305978898e-05, |
| "loss": 1.253, |
| "step": 2269 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.2202813599062134e-05, |
| "loss": 1.229, |
| "step": 2270 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.2199296600234466e-05, |
| "loss": 1.2032, |
| "step": 2271 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.21957796014068e-05, |
| "loss": 1.225, |
| "step": 2272 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.2192262602579133e-05, |
| "loss": 1.2215, |
| "step": 2273 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.2188745603751465e-05, |
| "loss": 1.2465, |
| "step": 2274 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.2185228604923799e-05, |
| "loss": 1.2451, |
| "step": 2275 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.2181711606096132e-05, |
| "loss": 1.251, |
| "step": 2276 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.2178194607268464e-05, |
| "loss": 1.165, |
| "step": 2277 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.2174677608440797e-05, |
| "loss": 1.1654, |
| "step": 2278 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.2171160609613131e-05, |
| "loss": 1.1686, |
| "step": 2279 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.2167643610785463e-05, |
| "loss": 1.1878, |
| "step": 2280 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.2164126611957796e-05, |
| "loss": 1.2237, |
| "step": 2281 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.216060961313013e-05, |
| "loss": 1.2133, |
| "step": 2282 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.215709261430246e-05, |
| "loss": 1.2236, |
| "step": 2283 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.2153575615474793e-05, |
| "loss": 1.2471, |
| "step": 2284 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.2150058616647127e-05, |
| "loss": 1.2281, |
| "step": 2285 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.2146541617819462e-05, |
| "loss": 1.1952, |
| "step": 2286 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.2143024618991792e-05, |
| "loss": 1.2214, |
| "step": 2287 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2139507620164126e-05, |
| "loss": 1.209, |
| "step": 2288 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.213599062133646e-05, |
| "loss": 1.2385, |
| "step": 2289 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2132473622508791e-05, |
| "loss": 1.2154, |
| "step": 2290 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2128956623681125e-05, |
| "loss": 1.2335, |
| "step": 2291 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2125439624853458e-05, |
| "loss": 1.2318, |
| "step": 2292 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.212192262602579e-05, |
| "loss": 1.2062, |
| "step": 2293 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2118405627198124e-05, |
| "loss": 1.2292, |
| "step": 2294 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2114888628370457e-05, |
| "loss": 1.2348, |
| "step": 2295 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2111371629542789e-05, |
| "loss": 1.1781, |
| "step": 2296 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2107854630715122e-05, |
| "loss": 1.2116, |
| "step": 2297 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2104337631887456e-05, |
| "loss": 1.2787, |
| "step": 2298 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2100820633059788e-05, |
| "loss": 1.201, |
| "step": 2299 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2097303634232121e-05, |
| "loss": 1.2171, |
| "step": 2300 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2093786635404455e-05, |
| "loss": 1.2303, |
| "step": 2301 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2090269636576788e-05, |
| "loss": 1.2155, |
| "step": 2302 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.208675263774912e-05, |
| "loss": 1.2049, |
| "step": 2303 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2083235638921454e-05, |
| "loss": 1.206, |
| "step": 2304 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2079718640093787e-05, |
| "loss": 1.2143, |
| "step": 2305 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2076201641266119e-05, |
| "loss": 1.2249, |
| "step": 2306 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2072684642438453e-05, |
| "loss": 1.2115, |
| "step": 2307 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2069167643610786e-05, |
| "loss": 1.1846, |
| "step": 2308 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2065650644783118e-05, |
| "loss": 1.1764, |
| "step": 2309 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2062133645955452e-05, |
| "loss": 1.2162, |
| "step": 2310 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2058616647127785e-05, |
| "loss": 1.226, |
| "step": 2311 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2055099648300117e-05, |
| "loss": 1.2046, |
| "step": 2312 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.205158264947245e-05, |
| "loss": 1.1668, |
| "step": 2313 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.2048065650644784e-05, |
| "loss": 1.2119, |
| "step": 2314 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.2044548651817116e-05, |
| "loss": 1.2261, |
| "step": 2315 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.204103165298945e-05, |
| "loss": 1.1855, |
| "step": 2316 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.2037514654161783e-05, |
| "loss": 1.2112, |
| "step": 2317 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.2033997655334113e-05, |
| "loss": 1.1966, |
| "step": 2318 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.2030480656506446e-05, |
| "loss": 1.2119, |
| "step": 2319 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.202696365767878e-05, |
| "loss": 1.2038, |
| "step": 2320 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.2023446658851114e-05, |
| "loss": 1.2889, |
| "step": 2321 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.2019929660023445e-05, |
| "loss": 1.2334, |
| "step": 2322 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.2016412661195779e-05, |
| "loss": 1.2213, |
| "step": 2323 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.2012895662368112e-05, |
| "loss": 1.222, |
| "step": 2324 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.2009378663540444e-05, |
| "loss": 1.2185, |
| "step": 2325 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.2005861664712778e-05, |
| "loss": 1.2319, |
| "step": 2326 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.2002344665885111e-05, |
| "loss": 1.2376, |
| "step": 2327 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.1998827667057443e-05, |
| "loss": 1.2381, |
| "step": 2328 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.1995310668229777e-05, |
| "loss": 1.2177, |
| "step": 2329 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.199179366940211e-05, |
| "loss": 1.2132, |
| "step": 2330 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.1988276670574442e-05, |
| "loss": 1.1789, |
| "step": 2331 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.1984759671746776e-05, |
| "loss": 1.1751, |
| "step": 2332 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.1981242672919109e-05, |
| "loss": 1.2336, |
| "step": 2333 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.1977725674091441e-05, |
| "loss": 1.2323, |
| "step": 2334 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.1974208675263774e-05, |
| "loss": 1.2538, |
| "step": 2335 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.1970691676436108e-05, |
| "loss": 1.2132, |
| "step": 2336 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.1967174677608441e-05, |
| "loss": 1.1817, |
| "step": 2337 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.1963657678780773e-05, |
| "loss": 1.183, |
| "step": 2338 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.1960140679953107e-05, |
| "loss": 1.2202, |
| "step": 2339 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.195662368112544e-05, |
| "loss": 1.1611, |
| "step": 2340 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1953106682297772e-05, |
| "loss": 1.2355, |
| "step": 2341 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1949589683470106e-05, |
| "loss": 1.2083, |
| "step": 2342 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.194607268464244e-05, |
| "loss": 1.245, |
| "step": 2343 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1942555685814771e-05, |
| "loss": 1.2361, |
| "step": 2344 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1939038686987105e-05, |
| "loss": 1.253, |
| "step": 2345 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1935521688159438e-05, |
| "loss": 1.2206, |
| "step": 2346 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.193200468933177e-05, |
| "loss": 1.1951, |
| "step": 2347 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1928487690504103e-05, |
| "loss": 1.2637, |
| "step": 2348 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1924970691676437e-05, |
| "loss": 1.1656, |
| "step": 2349 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1921453692848769e-05, |
| "loss": 1.2135, |
| "step": 2350 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1917936694021102e-05, |
| "loss": 1.2743, |
| "step": 2351 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1914419695193436e-05, |
| "loss": 1.2146, |
| "step": 2352 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1910902696365766e-05, |
| "loss": 1.2232, |
| "step": 2353 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.19073856975381e-05, |
| "loss": 1.1663, |
| "step": 2354 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1903868698710433e-05, |
| "loss": 1.1835, |
| "step": 2355 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1900351699882767e-05, |
| "loss": 1.2389, |
| "step": 2356 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1896834701055098e-05, |
| "loss": 1.2216, |
| "step": 2357 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1893317702227432e-05, |
| "loss": 1.2396, |
| "step": 2358 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1889800703399765e-05, |
| "loss": 1.2112, |
| "step": 2359 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1886283704572097e-05, |
| "loss": 1.1971, |
| "step": 2360 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.188276670574443e-05, |
| "loss": 1.2206, |
| "step": 2361 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1879249706916764e-05, |
| "loss": 1.2287, |
| "step": 2362 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1875732708089096e-05, |
| "loss": 1.1893, |
| "step": 2363 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.187221570926143e-05, |
| "loss": 1.2103, |
| "step": 2364 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.1868698710433763e-05, |
| "loss": 1.18, |
| "step": 2365 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.1865181711606095e-05, |
| "loss": 1.2202, |
| "step": 2366 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.1861664712778429e-05, |
| "loss": 1.1973, |
| "step": 2367 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.1858147713950762e-05, |
| "loss": 1.1813, |
| "step": 2368 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.1854630715123094e-05, |
| "loss": 1.1732, |
| "step": 2369 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.1851113716295427e-05, |
| "loss": 1.237, |
| "step": 2370 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.1847596717467761e-05, |
| "loss": 1.1929, |
| "step": 2371 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.1844079718640093e-05, |
| "loss": 1.1697, |
| "step": 2372 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.1840562719812426e-05, |
| "loss": 1.1895, |
| "step": 2373 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.183704572098476e-05, |
| "loss": 1.2302, |
| "step": 2374 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.1833528722157093e-05, |
| "loss": 1.2571, |
| "step": 2375 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.1830011723329425e-05, |
| "loss": 1.2288, |
| "step": 2376 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.1826494724501759e-05, |
| "loss": 1.2421, |
| "step": 2377 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.1822977725674092e-05, |
| "loss": 1.2098, |
| "step": 2378 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.1819460726846424e-05, |
| "loss": 1.2195, |
| "step": 2379 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.1815943728018758e-05, |
| "loss": 1.2213, |
| "step": 2380 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.1812426729191091e-05, |
| "loss": 1.2416, |
| "step": 2381 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.1808909730363423e-05, |
| "loss": 1.2447, |
| "step": 2382 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.1805392731535756e-05, |
| "loss": 1.2336, |
| "step": 2383 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.180187573270809e-05, |
| "loss": 1.2705, |
| "step": 2384 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.179835873388042e-05, |
| "loss": 1.1578, |
| "step": 2385 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.1794841735052755e-05, |
| "loss": 1.1486, |
| "step": 2386 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.1791324736225089e-05, |
| "loss": 1.2257, |
| "step": 2387 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.1787807737397419e-05, |
| "loss": 1.2334, |
| "step": 2388 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.1784290738569753e-05, |
| "loss": 1.1879, |
| "step": 2389 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.1780773739742086e-05, |
| "loss": 1.1954, |
| "step": 2390 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.177725674091442e-05, |
| "loss": 1.2174, |
| "step": 2391 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.1773739742086751e-05, |
| "loss": 1.2, |
| "step": 2392 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.1770222743259085e-05, |
| "loss": 1.2226, |
| "step": 2393 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.1766705744431418e-05, |
| "loss": 1.2159, |
| "step": 2394 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.176318874560375e-05, |
| "loss": 1.2164, |
| "step": 2395 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.1759671746776084e-05, |
| "loss": 1.2146, |
| "step": 2396 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.1756154747948417e-05, |
| "loss": 1.1985, |
| "step": 2397 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.175263774912075e-05, |
| "loss": 1.2689, |
| "step": 2398 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.1749120750293083e-05, |
| "loss": 1.204, |
| "step": 2399 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.1745603751465416e-05, |
| "loss": 1.2374, |
| "step": 2400 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.1742086752637748e-05, |
| "loss": 1.2317, |
| "step": 2401 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.1738569753810082e-05, |
| "loss": 1.1554, |
| "step": 2402 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.1735052754982415e-05, |
| "loss": 1.1644, |
| "step": 2403 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.1731535756154747e-05, |
| "loss": 1.2056, |
| "step": 2404 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.172801875732708e-05, |
| "loss": 1.2016, |
| "step": 2405 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.1724501758499414e-05, |
| "loss": 1.1875, |
| "step": 2406 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.1720984759671746e-05, |
| "loss": 1.1956, |
| "step": 2407 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.171746776084408e-05, |
| "loss": 1.1988, |
| "step": 2408 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.1713950762016413e-05, |
| "loss": 1.2198, |
| "step": 2409 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.1710433763188746e-05, |
| "loss": 1.2398, |
| "step": 2410 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.1706916764361078e-05, |
| "loss": 1.2321, |
| "step": 2411 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.1703399765533412e-05, |
| "loss": 1.1807, |
| "step": 2412 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.1699882766705745e-05, |
| "loss": 1.2446, |
| "step": 2413 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.1696365767878077e-05, |
| "loss": 1.2367, |
| "step": 2414 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.169284876905041e-05, |
| "loss": 1.2371, |
| "step": 2415 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.1689331770222744e-05, |
| "loss": 1.2177, |
| "step": 2416 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.1685814771395076e-05, |
| "loss": 1.1892, |
| "step": 2417 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.168229777256741e-05, |
| "loss": 1.218, |
| "step": 2418 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.1678780773739743e-05, |
| "loss": 1.2213, |
| "step": 2419 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.1675263774912073e-05, |
| "loss": 1.2009, |
| "step": 2420 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.1671746776084408e-05, |
| "loss": 1.1939, |
| "step": 2421 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.1668229777256742e-05, |
| "loss": 1.1366, |
| "step": 2422 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.1664712778429072e-05, |
| "loss": 1.2337, |
| "step": 2423 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.1661195779601406e-05, |
| "loss": 1.2291, |
| "step": 2424 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.1657678780773739e-05, |
| "loss": 1.2281, |
| "step": 2425 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.1654161781946073e-05, |
| "loss": 1.175, |
| "step": 2426 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.1650644783118404e-05, |
| "loss": 1.231, |
| "step": 2427 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.1647127784290738e-05, |
| "loss": 1.2057, |
| "step": 2428 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.1643610785463071e-05, |
| "loss": 1.1817, |
| "step": 2429 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.1640093786635403e-05, |
| "loss": 1.2649, |
| "step": 2430 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.1636576787807737e-05, |
| "loss": 1.1873, |
| "step": 2431 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.163305978898007e-05, |
| "loss": 1.2201, |
| "step": 2432 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.1629542790152402e-05, |
| "loss": 1.2656, |
| "step": 2433 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.1626025791324736e-05, |
| "loss": 1.2158, |
| "step": 2434 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.162250879249707e-05, |
| "loss": 1.2224, |
| "step": 2435 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.1618991793669401e-05, |
| "loss": 1.1933, |
| "step": 2436 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.1615474794841735e-05, |
| "loss": 1.2202, |
| "step": 2437 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.1611957796014068e-05, |
| "loss": 1.2007, |
| "step": 2438 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.16084407971864e-05, |
| "loss": 1.234, |
| "step": 2439 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.1604923798358733e-05, |
| "loss": 1.1707, |
| "step": 2440 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.1601406799531067e-05, |
| "loss": 1.2242, |
| "step": 2441 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.1597889800703399e-05, |
| "loss": 1.2063, |
| "step": 2442 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.1594372801875732e-05, |
| "loss": 1.2377, |
| "step": 2443 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.1590855803048066e-05, |
| "loss": 1.2301, |
| "step": 2444 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.15873388042204e-05, |
| "loss": 1.2121, |
| "step": 2445 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.1583821805392731e-05, |
| "loss": 1.1945, |
| "step": 2446 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.1580304806565065e-05, |
| "loss": 1.2263, |
| "step": 2447 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.1576787807737398e-05, |
| "loss": 1.1555, |
| "step": 2448 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.157327080890973e-05, |
| "loss": 1.1979, |
| "step": 2449 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.1569753810082064e-05, |
| "loss": 1.1763, |
| "step": 2450 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.1566236811254397e-05, |
| "loss": 1.2306, |
| "step": 2451 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.1562719812426729e-05, |
| "loss": 1.2388, |
| "step": 2452 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.1559202813599063e-05, |
| "loss": 1.2459, |
| "step": 2453 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.1555685814771396e-05, |
| "loss": 1.2262, |
| "step": 2454 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.1552168815943726e-05, |
| "loss": 1.1875, |
| "step": 2455 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.154865181711606e-05, |
| "loss": 1.1834, |
| "step": 2456 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.1545134818288395e-05, |
| "loss": 1.2284, |
| "step": 2457 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.1541617819460725e-05, |
| "loss": 1.2447, |
| "step": 2458 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.1538100820633059e-05, |
| "loss": 1.1622, |
| "step": 2459 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.1534583821805392e-05, |
| "loss": 1.2004, |
| "step": 2460 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.1531066822977726e-05, |
| "loss": 1.1897, |
| "step": 2461 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.1527549824150057e-05, |
| "loss": 1.2733, |
| "step": 2462 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.1524032825322391e-05, |
| "loss": 1.244, |
| "step": 2463 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.1520515826494725e-05, |
| "loss": 1.23, |
| "step": 2464 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.1516998827667056e-05, |
| "loss": 1.1839, |
| "step": 2465 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.151348182883939e-05, |
| "loss": 1.2113, |
| "step": 2466 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.1509964830011723e-05, |
| "loss": 1.1638, |
| "step": 2467 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.1506447831184055e-05, |
| "loss": 1.2389, |
| "step": 2468 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.1502930832356389e-05, |
| "loss": 1.2189, |
| "step": 2469 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.1499413833528722e-05, |
| "loss": 1.1632, |
| "step": 2470 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.1495896834701054e-05, |
| "loss": 1.1867, |
| "step": 2471 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.1492379835873388e-05, |
| "loss": 1.25, |
| "step": 2472 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.1488862837045721e-05, |
| "loss": 1.2261, |
| "step": 2473 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.1485345838218053e-05, |
| "loss": 1.2352, |
| "step": 2474 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.1481828839390386e-05, |
| "loss": 1.1823, |
| "step": 2475 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.147831184056272e-05, |
| "loss": 1.177, |
| "step": 2476 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.1474794841735052e-05, |
| "loss": 1.1609, |
| "step": 2477 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.1471277842907385e-05, |
| "loss": 1.2315, |
| "step": 2478 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.1467760844079719e-05, |
| "loss": 1.2338, |
| "step": 2479 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.1464243845252052e-05, |
| "loss": 1.2288, |
| "step": 2480 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.1460726846424384e-05, |
| "loss": 1.22, |
| "step": 2481 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.1457209847596718e-05, |
| "loss": 1.1932, |
| "step": 2482 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.1453692848769051e-05, |
| "loss": 1.2269, |
| "step": 2483 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.1450175849941383e-05, |
| "loss": 1.1928, |
| "step": 2484 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.1446658851113717e-05, |
| "loss": 1.2156, |
| "step": 2485 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.144314185228605e-05, |
| "loss": 1.1691, |
| "step": 2486 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.1439624853458382e-05, |
| "loss": 1.2152, |
| "step": 2487 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.1436107854630716e-05, |
| "loss": 1.196, |
| "step": 2488 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.1432590855803049e-05, |
| "loss": 1.2412, |
| "step": 2489 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.142907385697538e-05, |
| "loss": 1.1891, |
| "step": 2490 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.1425556858147713e-05, |
| "loss": 1.2634, |
| "step": 2491 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.1422039859320048e-05, |
| "loss": 1.2263, |
| "step": 2492 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.1418522860492378e-05, |
| "loss": 1.2178, |
| "step": 2493 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.1415005861664712e-05, |
| "loss": 1.1609, |
| "step": 2494 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1411488862837045e-05, |
| "loss": 1.1864, |
| "step": 2495 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1407971864009377e-05, |
| "loss": 1.1816, |
| "step": 2496 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.140445486518171e-05, |
| "loss": 1.2254, |
| "step": 2497 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1400937866354044e-05, |
| "loss": 1.1802, |
| "step": 2498 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1397420867526378e-05, |
| "loss": 1.2292, |
| "step": 2499 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.139390386869871e-05, |
| "loss": 1.1785, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1390386869871043e-05, |
| "loss": 1.2133, |
| "step": 2501 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1386869871043376e-05, |
| "loss": 1.2143, |
| "step": 2502 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1383352872215708e-05, |
| "loss": 1.241, |
| "step": 2503 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1379835873388042e-05, |
| "loss": 1.2147, |
| "step": 2504 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1376318874560375e-05, |
| "loss": 1.2117, |
| "step": 2505 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1372801875732707e-05, |
| "loss": 1.2866, |
| "step": 2506 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.136928487690504e-05, |
| "loss": 1.2379, |
| "step": 2507 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1365767878077374e-05, |
| "loss": 1.2435, |
| "step": 2508 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1362250879249706e-05, |
| "loss": 1.2326, |
| "step": 2509 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.135873388042204e-05, |
| "loss": 1.2019, |
| "step": 2510 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1355216881594373e-05, |
| "loss": 1.2559, |
| "step": 2511 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1351699882766705e-05, |
| "loss": 1.1764, |
| "step": 2512 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1348182883939038e-05, |
| "loss": 1.204, |
| "step": 2513 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1344665885111372e-05, |
| "loss": 1.2181, |
| "step": 2514 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1341148886283705e-05, |
| "loss": 1.2826, |
| "step": 2515 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1337631887456037e-05, |
| "loss": 1.18, |
| "step": 2516 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.133411488862837e-05, |
| "loss": 1.2416, |
| "step": 2517 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1330597889800704e-05, |
| "loss": 1.2203, |
| "step": 2518 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1327080890973036e-05, |
| "loss": 1.2528, |
| "step": 2519 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.132356389214537e-05, |
| "loss": 1.2147, |
| "step": 2520 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.1320046893317703e-05, |
| "loss": 1.2163, |
| "step": 2521 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.1316529894490035e-05, |
| "loss": 1.1619, |
| "step": 2522 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.1313012895662369e-05, |
| "loss": 1.1982, |
| "step": 2523 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.1309495896834702e-05, |
| "loss": 1.2751, |
| "step": 2524 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.1305978898007032e-05, |
| "loss": 1.2597, |
| "step": 2525 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.1302461899179366e-05, |
| "loss": 1.1602, |
| "step": 2526 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.1298944900351701e-05, |
| "loss": 1.2073, |
| "step": 2527 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.1295427901524031e-05, |
| "loss": 1.2016, |
| "step": 2528 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.1291910902696365e-05, |
| "loss": 1.2001, |
| "step": 2529 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.1288393903868698e-05, |
| "loss": 1.1938, |
| "step": 2530 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.128487690504103e-05, |
| "loss": 1.2346, |
| "step": 2531 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.1281359906213364e-05, |
| "loss": 1.233, |
| "step": 2532 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.1277842907385697e-05, |
| "loss": 1.2495, |
| "step": 2533 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.127432590855803e-05, |
| "loss": 1.2089, |
| "step": 2534 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.1270808909730362e-05, |
| "loss": 1.2458, |
| "step": 2535 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.1267291910902696e-05, |
| "loss": 1.2252, |
| "step": 2536 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.126377491207503e-05, |
| "loss": 1.2176, |
| "step": 2537 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.1260257913247361e-05, |
| "loss": 1.2082, |
| "step": 2538 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.1256740914419695e-05, |
| "loss": 1.2149, |
| "step": 2539 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.1253223915592028e-05, |
| "loss": 1.1978, |
| "step": 2540 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.124970691676436e-05, |
| "loss": 1.2004, |
| "step": 2541 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.1246189917936694e-05, |
| "loss": 1.216, |
| "step": 2542 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.1242672919109027e-05, |
| "loss": 1.193, |
| "step": 2543 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.1239155920281359e-05, |
| "loss": 1.2279, |
| "step": 2544 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.1235638921453693e-05, |
| "loss": 1.1971, |
| "step": 2545 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.1232121922626026e-05, |
| "loss": 1.2801, |
| "step": 2546 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1228604923798358e-05, |
| "loss": 1.1912, |
| "step": 2547 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1225087924970691e-05, |
| "loss": 1.1834, |
| "step": 2548 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1221570926143025e-05, |
| "loss": 1.2314, |
| "step": 2549 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1218053927315358e-05, |
| "loss": 1.2622, |
| "step": 2550 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.121453692848769e-05, |
| "loss": 1.2143, |
| "step": 2551 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1211019929660024e-05, |
| "loss": 1.2633, |
| "step": 2552 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1207502930832357e-05, |
| "loss": 1.2206, |
| "step": 2553 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.120398593200469e-05, |
| "loss": 1.1982, |
| "step": 2554 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1200468933177023e-05, |
| "loss": 1.2187, |
| "step": 2555 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1196951934349356e-05, |
| "loss": 1.2168, |
| "step": 2556 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1193434935521688e-05, |
| "loss": 1.1873, |
| "step": 2557 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1189917936694022e-05, |
| "loss": 1.2355, |
| "step": 2558 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1186400937866355e-05, |
| "loss": 1.1707, |
| "step": 2559 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1182883939038685e-05, |
| "loss": 1.2183, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1179366940211019e-05, |
| "loss": 1.2107, |
| "step": 2561 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1175849941383352e-05, |
| "loss": 1.1446, |
| "step": 2562 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1172332942555684e-05, |
| "loss": 1.2136, |
| "step": 2563 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1168815943728018e-05, |
| "loss": 1.1893, |
| "step": 2564 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1165298944900351e-05, |
| "loss": 1.2049, |
| "step": 2565 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1161781946072683e-05, |
| "loss": 1.2056, |
| "step": 2566 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1158264947245017e-05, |
| "loss": 1.2315, |
| "step": 2567 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.115474794841735e-05, |
| "loss": 1.221, |
| "step": 2568 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1151230949589684e-05, |
| "loss": 1.2204, |
| "step": 2569 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1147713950762015e-05, |
| "loss": 1.2185, |
| "step": 2570 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1144196951934349e-05, |
| "loss": 1.2024, |
| "step": 2571 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1140679953106682e-05, |
| "loss": 1.177, |
| "step": 2572 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.1137162954279014e-05, |
| "loss": 1.2126, |
| "step": 2573 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.1133645955451348e-05, |
| "loss": 1.1607, |
| "step": 2574 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.1130128956623681e-05, |
| "loss": 1.2469, |
| "step": 2575 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.1126611957796013e-05, |
| "loss": 1.2385, |
| "step": 2576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.1123094958968347e-05, |
| "loss": 1.2067, |
| "step": 2577 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.111957796014068e-05, |
| "loss": 1.1888, |
| "step": 2578 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.1116060961313012e-05, |
| "loss": 1.2049, |
| "step": 2579 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.1112543962485346e-05, |
| "loss": 1.2158, |
| "step": 2580 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.1109026963657679e-05, |
| "loss": 1.2389, |
| "step": 2581 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.1105509964830011e-05, |
| "loss": 1.217, |
| "step": 2582 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.1101992966002344e-05, |
| "loss": 1.1763, |
| "step": 2583 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.1098475967174678e-05, |
| "loss": 1.2375, |
| "step": 2584 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.109495896834701e-05, |
| "loss": 1.1008, |
| "step": 2585 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.1091441969519343e-05, |
| "loss": 1.0658, |
| "step": 2586 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.1087924970691677e-05, |
| "loss": 1.1057, |
| "step": 2587 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.108440797186401e-05, |
| "loss": 1.0239, |
| "step": 2588 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.1080890973036342e-05, |
| "loss": 1.043, |
| "step": 2589 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.1077373974208676e-05, |
| "loss": 1.01, |
| "step": 2590 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.107385697538101e-05, |
| "loss": 1.0039, |
| "step": 2591 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.1070339976553341e-05, |
| "loss": 1.0205, |
| "step": 2592 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.1066822977725675e-05, |
| "loss": 1.0073, |
| "step": 2593 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.1063305978898008e-05, |
| "loss": 1.0189, |
| "step": 2594 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.1059788980070338e-05, |
| "loss": 1.0417, |
| "step": 2595 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.1056271981242672e-05, |
| "loss": 1.0064, |
| "step": 2596 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.1052754982415005e-05, |
| "loss": 0.9953, |
| "step": 2597 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.1049237983587337e-05, |
| "loss": 0.9374, |
| "step": 2598 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.104572098475967e-05, |
| "loss": 0.9611, |
| "step": 2599 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.1042203985932004e-05, |
| "loss": 0.9612, |
| "step": 2600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.1038686987104336e-05, |
| "loss": 0.9956, |
| "step": 2601 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.103516998827667e-05, |
| "loss": 1.0075, |
| "step": 2602 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.1031652989449003e-05, |
| "loss": 1.0056, |
| "step": 2603 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.1028135990621337e-05, |
| "loss": 0.9592, |
| "step": 2604 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.1024618991793668e-05, |
| "loss": 0.969, |
| "step": 2605 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.1021101992966002e-05, |
| "loss": 1.0084, |
| "step": 2606 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.1017584994138335e-05, |
| "loss": 0.991, |
| "step": 2607 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.1014067995310667e-05, |
| "loss": 0.9796, |
| "step": 2608 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.1010550996483e-05, |
| "loss": 0.9483, |
| "step": 2609 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.1007033997655334e-05, |
| "loss": 0.9856, |
| "step": 2610 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.1003516998827666e-05, |
| "loss": 0.9763, |
| "step": 2611 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.1e-05, |
| "loss": 0.9934, |
| "step": 2612 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.0996483001172332e-05, |
| "loss": 1.0343, |
| "step": 2613 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.0992966002344665e-05, |
| "loss": 0.9622, |
| "step": 2614 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.0989449003516999e-05, |
| "loss": 0.9642, |
| "step": 2615 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.098593200468933e-05, |
| "loss": 0.9729, |
| "step": 2616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.0982415005861664e-05, |
| "loss": 1.0196, |
| "step": 2617 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.0978898007033997e-05, |
| "loss": 0.9748, |
| "step": 2618 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.0975381008206331e-05, |
| "loss": 1.0003, |
| "step": 2619 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.0971864009378663e-05, |
| "loss": 1.0, |
| "step": 2620 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.0968347010550996e-05, |
| "loss": 0.9305, |
| "step": 2621 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.096483001172333e-05, |
| "loss": 0.9695, |
| "step": 2622 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.0961313012895662e-05, |
| "loss": 0.9446, |
| "step": 2623 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.0957796014067995e-05, |
| "loss": 1.0304, |
| "step": 2624 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.0954279015240329e-05, |
| "loss": 0.9348, |
| "step": 2625 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.095076201641266e-05, |
| "loss": 0.9917, |
| "step": 2626 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.0947245017584994e-05, |
| "loss": 1.0248, |
| "step": 2627 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.0943728018757328e-05, |
| "loss": 0.9462, |
| "step": 2628 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.094021101992966e-05, |
| "loss": 0.9502, |
| "step": 2629 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.0936694021101993e-05, |
| "loss": 1.014, |
| "step": 2630 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.0933177022274325e-05, |
| "loss": 1.0043, |
| "step": 2631 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.0929660023446658e-05, |
| "loss": 0.9949, |
| "step": 2632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.0926143024618992e-05, |
| "loss": 1.016, |
| "step": 2633 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.0922626025791324e-05, |
| "loss": 0.9835, |
| "step": 2634 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.0919109026963657e-05, |
| "loss": 0.9612, |
| "step": 2635 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.091559202813599e-05, |
| "loss": 0.9849, |
| "step": 2636 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.0912075029308323e-05, |
| "loss": 0.9915, |
| "step": 2637 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.0908558030480656e-05, |
| "loss": 0.9885, |
| "step": 2638 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.090504103165299e-05, |
| "loss": 0.9732, |
| "step": 2639 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.0901524032825321e-05, |
| "loss": 0.9504, |
| "step": 2640 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.0898007033997655e-05, |
| "loss": 0.9725, |
| "step": 2641 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.0894490035169989e-05, |
| "loss": 0.9509, |
| "step": 2642 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.089097303634232e-05, |
| "loss": 1.0249, |
| "step": 2643 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.0887456037514654e-05, |
| "loss": 0.9932, |
| "step": 2644 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.0883939038686987e-05, |
| "loss": 0.9735, |
| "step": 2645 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.0880422039859321e-05, |
| "loss": 0.9752, |
| "step": 2646 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.0876905041031653e-05, |
| "loss": 0.9611, |
| "step": 2647 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.0873388042203985e-05, |
| "loss": 1.009, |
| "step": 2648 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.0869871043376318e-05, |
| "loss": 0.9673, |
| "step": 2649 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.0866354044548652e-05, |
| "loss": 0.9432, |
| "step": 2650 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.0862837045720983e-05, |
| "loss": 1.0099, |
| "step": 2651 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.0859320046893317e-05, |
| "loss": 0.9531, |
| "step": 2652 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.085580304806565e-05, |
| "loss": 0.9761, |
| "step": 2653 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.0852286049237984e-05, |
| "loss": 1.0012, |
| "step": 2654 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.0848769050410316e-05, |
| "loss": 0.9744, |
| "step": 2655 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.084525205158265e-05, |
| "loss": 0.9927, |
| "step": 2656 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.0841735052754983e-05, |
| "loss": 0.9832, |
| "step": 2657 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.0838218053927315e-05, |
| "loss": 0.9647, |
| "step": 2658 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.0834701055099648e-05, |
| "loss": 0.9228, |
| "step": 2659 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.0831184056271982e-05, |
| "loss": 0.9567, |
| "step": 2660 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.0827667057444314e-05, |
| "loss": 1.026, |
| "step": 2661 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.0824150058616647e-05, |
| "loss": 1.0056, |
| "step": 2662 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.082063305978898e-05, |
| "loss": 0.97, |
| "step": 2663 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.0817116060961312e-05, |
| "loss": 1.0034, |
| "step": 2664 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.0813599062133646e-05, |
| "loss": 0.9379, |
| "step": 2665 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.0810082063305978e-05, |
| "loss": 0.9642, |
| "step": 2666 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.0806565064478311e-05, |
| "loss": 0.9655, |
| "step": 2667 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.0803048065650645e-05, |
| "loss": 0.9795, |
| "step": 2668 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.0799531066822977e-05, |
| "loss": 0.9775, |
| "step": 2669 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.079601406799531e-05, |
| "loss": 0.9443, |
| "step": 2670 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.0792497069167644e-05, |
| "loss": 0.999, |
| "step": 2671 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.0788980070339976e-05, |
| "loss": 0.9725, |
| "step": 2672 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.0785463071512309e-05, |
| "loss": 0.9328, |
| "step": 2673 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.0781946072684643e-05, |
| "loss": 0.9996, |
| "step": 2674 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.0778429073856974e-05, |
| "loss": 1.0129, |
| "step": 2675 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.0774912075029308e-05, |
| "loss": 0.9464, |
| "step": 2676 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.0771395076201642e-05, |
| "loss": 0.9785, |
| "step": 2677 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.0767878077373973e-05, |
| "loss": 0.9884, |
| "step": 2678 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.0764361078546307e-05, |
| "loss": 0.9824, |
| "step": 2679 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.076084407971864e-05, |
| "loss": 0.9471, |
| "step": 2680 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.0757327080890974e-05, |
| "loss": 0.9216, |
| "step": 2681 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.0753810082063306e-05, |
| "loss": 0.9545, |
| "step": 2682 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.0750293083235638e-05, |
| "loss": 0.9863, |
| "step": 2683 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.0746776084407971e-05, |
| "loss": 1.0105, |
| "step": 2684 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.0743259085580305e-05, |
| "loss": 1.0172, |
| "step": 2685 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.0739742086752636e-05, |
| "loss": 1.0148, |
| "step": 2686 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.073622508792497e-05, |
| "loss": 0.9947, |
| "step": 2687 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.0732708089097304e-05, |
| "loss": 0.9046, |
| "step": 2688 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.0729191090269637e-05, |
| "loss": 0.9671, |
| "step": 2689 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.0725674091441969e-05, |
| "loss": 1.0172, |
| "step": 2690 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.0722157092614302e-05, |
| "loss": 0.9888, |
| "step": 2691 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.0718640093786636e-05, |
| "loss": 1.0366, |
| "step": 2692 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.0715123094958968e-05, |
| "loss": 0.9786, |
| "step": 2693 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.0711606096131301e-05, |
| "loss": 0.9773, |
| "step": 2694 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.0708089097303635e-05, |
| "loss": 0.9543, |
| "step": 2695 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.0704572098475967e-05, |
| "loss": 1.0028, |
| "step": 2696 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.07010550996483e-05, |
| "loss": 0.9788, |
| "step": 2697 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.0697538100820634e-05, |
| "loss": 0.9626, |
| "step": 2698 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.0694021101992966e-05, |
| "loss": 0.9816, |
| "step": 2699 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.0690504103165299e-05, |
| "loss": 0.9684, |
| "step": 2700 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.0686987104337631e-05, |
| "loss": 0.9914, |
| "step": 2701 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.0683470105509964e-05, |
| "loss": 0.9644, |
| "step": 2702 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.0679953106682298e-05, |
| "loss": 0.9728, |
| "step": 2703 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.067643610785463e-05, |
| "loss": 1.0286, |
| "step": 2704 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.0672919109026963e-05, |
| "loss": 0.9925, |
| "step": 2705 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.0669402110199297e-05, |
| "loss": 0.9782, |
| "step": 2706 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.0665885111371629e-05, |
| "loss": 0.9667, |
| "step": 2707 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.0662368112543962e-05, |
| "loss": 1.0066, |
| "step": 2708 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.0658851113716296e-05, |
| "loss": 0.956, |
| "step": 2709 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.0655334114888628e-05, |
| "loss": 1.0144, |
| "step": 2710 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.0651817116060961e-05, |
| "loss": 1.009, |
| "step": 2711 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.0648300117233295e-05, |
| "loss": 1.0414, |
| "step": 2712 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.0644783118405626e-05, |
| "loss": 0.9384, |
| "step": 2713 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.064126611957796e-05, |
| "loss": 0.9946, |
| "step": 2714 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.0637749120750293e-05, |
| "loss": 0.9501, |
| "step": 2715 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.0634232121922627e-05, |
| "loss": 1.0172, |
| "step": 2716 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.0630715123094959e-05, |
| "loss": 0.9728, |
| "step": 2717 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.062719812426729e-05, |
| "loss": 0.9528, |
| "step": 2718 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.0623681125439624e-05, |
| "loss": 0.9536, |
| "step": 2719 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.0620164126611958e-05, |
| "loss": 0.9823, |
| "step": 2720 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.061664712778429e-05, |
| "loss": 0.9729, |
| "step": 2721 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.0613130128956623e-05, |
| "loss": 0.9927, |
| "step": 2722 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.0609613130128957e-05, |
| "loss": 1.0211, |
| "step": 2723 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.060609613130129e-05, |
| "loss": 0.9031, |
| "step": 2724 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.0602579132473622e-05, |
| "loss": 0.9468, |
| "step": 2725 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.0599062133645955e-05, |
| "loss": 0.9775, |
| "step": 2726 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.0595545134818289e-05, |
| "loss": 1.0116, |
| "step": 2727 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.059202813599062e-05, |
| "loss": 0.9645, |
| "step": 2728 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.0588511137162954e-05, |
| "loss": 0.9229, |
| "step": 2729 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.0584994138335288e-05, |
| "loss": 0.9581, |
| "step": 2730 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.058147713950762e-05, |
| "loss": 1.0119, |
| "step": 2731 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.0577960140679953e-05, |
| "loss": 0.9934, |
| "step": 2732 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.0574443141852287e-05, |
| "loss": 1.0044, |
| "step": 2733 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.0570926143024619e-05, |
| "loss": 0.9599, |
| "step": 2734 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.0567409144196952e-05, |
| "loss": 1.0056, |
| "step": 2735 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.0563892145369284e-05, |
| "loss": 0.9503, |
| "step": 2736 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.0560375146541617e-05, |
| "loss": 0.9811, |
| "step": 2737 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.0556858147713951e-05, |
| "loss": 0.9738, |
| "step": 2738 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.0553341148886283e-05, |
| "loss": 0.9597, |
| "step": 2739 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.0549824150058616e-05, |
| "loss": 1.0102, |
| "step": 2740 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.054630715123095e-05, |
| "loss": 1.0382, |
| "step": 2741 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.0542790152403282e-05, |
| "loss": 0.9523, |
| "step": 2742 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.0539273153575615e-05, |
| "loss": 1.0016, |
| "step": 2743 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.0535756154747949e-05, |
| "loss": 0.9702, |
| "step": 2744 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.053223915592028e-05, |
| "loss": 1.0049, |
| "step": 2745 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.0528722157092614e-05, |
| "loss": 0.9896, |
| "step": 2746 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.0525205158264948e-05, |
| "loss": 0.9897, |
| "step": 2747 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.052168815943728e-05, |
| "loss": 0.9405, |
| "step": 2748 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.0518171160609613e-05, |
| "loss": 0.9994, |
| "step": 2749 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.0514654161781945e-05, |
| "loss": 1.0152, |
| "step": 2750 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.051113716295428e-05, |
| "loss": 0.9657, |
| "step": 2751 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.0507620164126612e-05, |
| "loss": 0.9441, |
| "step": 2752 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.0504103165298944e-05, |
| "loss": 0.9795, |
| "step": 2753 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.0500586166471277e-05, |
| "loss": 1.0491, |
| "step": 2754 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.049706916764361e-05, |
| "loss": 0.9885, |
| "step": 2755 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.0493552168815943e-05, |
| "loss": 0.9424, |
| "step": 2756 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.0490035169988276e-05, |
| "loss": 0.9653, |
| "step": 2757 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.048651817116061e-05, |
| "loss": 0.978, |
| "step": 2758 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.0483001172332943e-05, |
| "loss": 0.9566, |
| "step": 2759 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.0479484173505275e-05, |
| "loss": 1.0095, |
| "step": 2760 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.0475967174677608e-05, |
| "loss": 0.9895, |
| "step": 2761 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.0472450175849942e-05, |
| "loss": 0.9884, |
| "step": 2762 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.0468933177022274e-05, |
| "loss": 0.988, |
| "step": 2763 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.0465416178194607e-05, |
| "loss": 1.0161, |
| "step": 2764 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.0461899179366941e-05, |
| "loss": 0.9795, |
| "step": 2765 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.0458382180539273e-05, |
| "loss": 0.9762, |
| "step": 2766 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.0454865181711605e-05, |
| "loss": 0.9975, |
| "step": 2767 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.045134818288394e-05, |
| "loss": 0.973, |
| "step": 2768 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.0447831184056272e-05, |
| "loss": 0.9753, |
| "step": 2769 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.0444314185228605e-05, |
| "loss": 0.987, |
| "step": 2770 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.0440797186400937e-05, |
| "loss": 0.9424, |
| "step": 2771 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.043728018757327e-05, |
| "loss": 0.927, |
| "step": 2772 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.0433763188745604e-05, |
| "loss": 0.9778, |
| "step": 2773 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.0430246189917936e-05, |
| "loss": 1.0043, |
| "step": 2774 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.042672919109027e-05, |
| "loss": 0.9694, |
| "step": 2775 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.0423212192262603e-05, |
| "loss": 0.9623, |
| "step": 2776 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.0419695193434935e-05, |
| "loss": 0.9754, |
| "step": 2777 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.0416178194607268e-05, |
| "loss": 0.9546, |
| "step": 2778 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.0412661195779602e-05, |
| "loss": 0.9874, |
| "step": 2779 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.0409144196951934e-05, |
| "loss": 0.995, |
| "step": 2780 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.0405627198124267e-05, |
| "loss": 0.9741, |
| "step": 2781 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.04021101992966e-05, |
| "loss": 1.0033, |
| "step": 2782 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.0398593200468932e-05, |
| "loss": 1.0075, |
| "step": 2783 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.0395076201641266e-05, |
| "loss": 0.9436, |
| "step": 2784 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.0391559202813598e-05, |
| "loss": 0.9847, |
| "step": 2785 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.0388042203985933e-05, |
| "loss": 0.9496, |
| "step": 2786 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.0384525205158265e-05, |
| "loss": 0.971, |
| "step": 2787 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.0381008206330597e-05, |
| "loss": 0.9847, |
| "step": 2788 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.037749120750293e-05, |
| "loss": 0.9863, |
| "step": 2789 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.0373974208675264e-05, |
| "loss": 0.9546, |
| "step": 2790 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.0370457209847596e-05, |
| "loss": 0.9916, |
| "step": 2791 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.0366940211019929e-05, |
| "loss": 1.005, |
| "step": 2792 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.0363423212192263e-05, |
| "loss": 0.9676, |
| "step": 2793 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.0359906213364594e-05, |
| "loss": 0.972, |
| "step": 2794 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.0356389214536928e-05, |
| "loss": 1.0111, |
| "step": 2795 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.0352872215709261e-05, |
| "loss": 0.9737, |
| "step": 2796 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.0349355216881595e-05, |
| "loss": 0.9545, |
| "step": 2797 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.0345838218053927e-05, |
| "loss": 1.001, |
| "step": 2798 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.034232121922626e-05, |
| "loss": 0.9498, |
| "step": 2799 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.0338804220398594e-05, |
| "loss": 0.9773, |
| "step": 2800 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.0335287221570926e-05, |
| "loss": 0.9709, |
| "step": 2801 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.0331770222743258e-05, |
| "loss": 0.9645, |
| "step": 2802 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.0328253223915591e-05, |
| "loss": 0.9543, |
| "step": 2803 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.0324736225087925e-05, |
| "loss": 0.9462, |
| "step": 2804 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.0321219226260258e-05, |
| "loss": 0.9703, |
| "step": 2805 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.031770222743259e-05, |
| "loss": 0.9769, |
| "step": 2806 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.0314185228604923e-05, |
| "loss": 0.9712, |
| "step": 2807 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.0310668229777257e-05, |
| "loss": 0.9648, |
| "step": 2808 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.0307151230949589e-05, |
| "loss": 0.956, |
| "step": 2809 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.0303634232121922e-05, |
| "loss": 1.0066, |
| "step": 2810 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.0300117233294256e-05, |
| "loss": 0.9917, |
| "step": 2811 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.0296600234466588e-05, |
| "loss": 1.0082, |
| "step": 2812 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.0293083235638921e-05, |
| "loss": 0.9828, |
| "step": 2813 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.0289566236811255e-05, |
| "loss": 0.96, |
| "step": 2814 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.0286049237983587e-05, |
| "loss": 0.9628, |
| "step": 2815 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.028253223915592e-05, |
| "loss": 0.9996, |
| "step": 2816 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.0279015240328254e-05, |
| "loss": 1.0007, |
| "step": 2817 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.0275498241500585e-05, |
| "loss": 1.026, |
| "step": 2818 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.0271981242672919e-05, |
| "loss": 0.9552, |
| "step": 2819 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.026846424384525e-05, |
| "loss": 0.9995, |
| "step": 2820 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.0264947245017586e-05, |
| "loss": 0.9967, |
| "step": 2821 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.0261430246189918e-05, |
| "loss": 0.9906, |
| "step": 2822 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.025791324736225e-05, |
| "loss": 0.9769, |
| "step": 2823 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.0254396248534583e-05, |
| "loss": 0.9599, |
| "step": 2824 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.0250879249706917e-05, |
| "loss": 1.0029, |
| "step": 2825 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.0247362250879249e-05, |
| "loss": 0.9749, |
| "step": 2826 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.0243845252051582e-05, |
| "loss": 0.9636, |
| "step": 2827 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.0240328253223916e-05, |
| "loss": 1.0026, |
| "step": 2828 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.0236811254396247e-05, |
| "loss": 0.9848, |
| "step": 2829 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.0233294255568581e-05, |
| "loss": 0.9231, |
| "step": 2830 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.0229777256740914e-05, |
| "loss": 1.0032, |
| "step": 2831 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.0226260257913248e-05, |
| "loss": 0.9866, |
| "step": 2832 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.022274325908558e-05, |
| "loss": 1.0122, |
| "step": 2833 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.0219226260257913e-05, |
| "loss": 0.985, |
| "step": 2834 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.0215709261430247e-05, |
| "loss": 0.9974, |
| "step": 2835 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.0212192262602579e-05, |
| "loss": 0.9903, |
| "step": 2836 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.020867526377491e-05, |
| "loss": 0.9842, |
| "step": 2837 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.0205158264947244e-05, |
| "loss": 1.0201, |
| "step": 2838 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.0201641266119578e-05, |
| "loss": 0.9908, |
| "step": 2839 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.0198124267291911e-05, |
| "loss": 0.9658, |
| "step": 2840 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.0194607268464243e-05, |
| "loss": 1.0511, |
| "step": 2841 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.0191090269636576e-05, |
| "loss": 1.0527, |
| "step": 2842 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.018757327080891e-05, |
| "loss": 0.9814, |
| "step": 2843 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.0184056271981242e-05, |
| "loss": 1.0355, |
| "step": 2844 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.0180539273153575e-05, |
| "loss": 0.9584, |
| "step": 2845 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.0177022274325909e-05, |
| "loss": 0.9572, |
| "step": 2846 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.017350527549824e-05, |
| "loss": 0.9822, |
| "step": 2847 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.0169988276670574e-05, |
| "loss": 0.9916, |
| "step": 2848 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.0166471277842908e-05, |
| "loss": 1.0131, |
| "step": 2849 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.016295427901524e-05, |
| "loss": 1.0267, |
| "step": 2850 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.0159437280187573e-05, |
| "loss": 0.9272, |
| "step": 2851 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.0155920281359907e-05, |
| "loss": 0.936, |
| "step": 2852 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.0152403282532238e-05, |
| "loss": 0.9492, |
| "step": 2853 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.0148886283704572e-05, |
| "loss": 0.9661, |
| "step": 2854 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.0145369284876904e-05, |
| "loss": 0.95, |
| "step": 2855 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.0141852286049237e-05, |
| "loss": 0.9672, |
| "step": 2856 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.0138335287221571e-05, |
| "loss": 0.9731, |
| "step": 2857 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.0134818288393903e-05, |
| "loss": 0.9883, |
| "step": 2858 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.0131301289566236e-05, |
| "loss": 0.9774, |
| "step": 2859 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.012778429073857e-05, |
| "loss": 1.0117, |
| "step": 2860 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.0124267291910902e-05, |
| "loss": 0.9932, |
| "step": 2861 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.0120750293083235e-05, |
| "loss": 0.9617, |
| "step": 2862 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.0117233294255569e-05, |
| "loss": 0.9554, |
| "step": 2863 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.01137162954279e-05, |
| "loss": 1.0022, |
| "step": 2864 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.0110199296600234e-05, |
| "loss": 1.0005, |
| "step": 2865 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.0106682297772568e-05, |
| "loss": 0.9803, |
| "step": 2866 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.0103165298944901e-05, |
| "loss": 1.0063, |
| "step": 2867 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.0099648300117233e-05, |
| "loss": 0.9437, |
| "step": 2868 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.0096131301289566e-05, |
| "loss": 0.9871, |
| "step": 2869 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.00926143024619e-05, |
| "loss": 0.9332, |
| "step": 2870 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.0089097303634232e-05, |
| "loss": 0.9324, |
| "step": 2871 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.0085580304806564e-05, |
| "loss": 1.014, |
| "step": 2872 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.0082063305978897e-05, |
| "loss": 0.9849, |
| "step": 2873 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.007854630715123e-05, |
| "loss": 1.0176, |
| "step": 2874 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.0075029308323564e-05, |
| "loss": 0.9788, |
| "step": 2875 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.0071512309495896e-05, |
| "loss": 0.9922, |
| "step": 2876 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.006799531066823e-05, |
| "loss": 0.9989, |
| "step": 2877 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.0064478311840563e-05, |
| "loss": 1.026, |
| "step": 2878 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.0060961313012895e-05, |
| "loss": 0.9989, |
| "step": 2879 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.0057444314185228e-05, |
| "loss": 0.9582, |
| "step": 2880 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.0053927315357562e-05, |
| "loss": 0.9978, |
| "step": 2881 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.0050410316529894e-05, |
| "loss": 0.9406, |
| "step": 2882 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.0046893317702227e-05, |
| "loss": 0.9332, |
| "step": 2883 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.004337631887456e-05, |
| "loss": 0.9964, |
| "step": 2884 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.0039859320046893e-05, |
| "loss": 1.0012, |
| "step": 2885 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.0036342321219226e-05, |
| "loss": 0.9856, |
| "step": 2886 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.003282532239156e-05, |
| "loss": 1.0232, |
| "step": 2887 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.0029308323563892e-05, |
| "loss": 0.9856, |
| "step": 2888 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.0025791324736225e-05, |
| "loss": 1.0042, |
| "step": 2889 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.0022274325908557e-05, |
| "loss": 1.0126, |
| "step": 2890 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.001875732708089e-05, |
| "loss": 0.9478, |
| "step": 2891 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.0015240328253224e-05, |
| "loss": 1.0136, |
| "step": 2892 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.0011723329425556e-05, |
| "loss": 0.9785, |
| "step": 2893 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.000820633059789e-05, |
| "loss": 1.0008, |
| "step": 2894 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.0004689331770223e-05, |
| "loss": 0.9961, |
| "step": 2895 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.0001172332942555e-05, |
| "loss": 0.9402, |
| "step": 2896 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 9.997655334114888e-06, |
| "loss": 1.0223, |
| "step": 2897 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 9.994138335287222e-06, |
| "loss": 0.972, |
| "step": 2898 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 9.990621336459553e-06, |
| "loss": 0.989, |
| "step": 2899 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 9.987104337631887e-06, |
| "loss": 0.9938, |
| "step": 2900 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 9.98358733880422e-06, |
| "loss": 0.951, |
| "step": 2901 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 9.980070339976554e-06, |
| "loss": 0.988, |
| "step": 2902 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 9.976553341148886e-06, |
| "loss": 0.9966, |
| "step": 2903 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 9.97303634232122e-06, |
| "loss": 0.9405, |
| "step": 2904 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 9.969519343493553e-06, |
| "loss": 0.9997, |
| "step": 2905 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 9.966002344665885e-06, |
| "loss": 0.9722, |
| "step": 2906 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 9.962485345838217e-06, |
| "loss": 0.9571, |
| "step": 2907 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 9.95896834701055e-06, |
| "loss": 0.9386, |
| "step": 2908 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.955451348182884e-06, |
| "loss": 0.9988, |
| "step": 2909 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.951934349355217e-06, |
| "loss": 1.0306, |
| "step": 2910 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.948417350527549e-06, |
| "loss": 0.95, |
| "step": 2911 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.944900351699883e-06, |
| "loss": 0.9855, |
| "step": 2912 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.941383352872216e-06, |
| "loss": 1.0035, |
| "step": 2913 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.937866354044548e-06, |
| "loss": 0.9658, |
| "step": 2914 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.934349355216881e-06, |
| "loss": 0.9435, |
| "step": 2915 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.930832356389215e-06, |
| "loss": 0.9852, |
| "step": 2916 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.927315357561547e-06, |
| "loss": 0.9812, |
| "step": 2917 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.92379835873388e-06, |
| "loss": 0.9874, |
| "step": 2918 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.920281359906214e-06, |
| "loss": 0.9551, |
| "step": 2919 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.916764361078546e-06, |
| "loss": 0.9819, |
| "step": 2920 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.91324736225088e-06, |
| "loss": 0.9934, |
| "step": 2921 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.909730363423213e-06, |
| "loss": 0.964, |
| "step": 2922 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.906213364595545e-06, |
| "loss": 0.994, |
| "step": 2923 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.902696365767878e-06, |
| "loss": 0.9892, |
| "step": 2924 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.89917936694021e-06, |
| "loss": 1.0321, |
| "step": 2925 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.895662368112543e-06, |
| "loss": 1.0245, |
| "step": 2926 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.892145369284877e-06, |
| "loss": 0.9815, |
| "step": 2927 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.888628370457209e-06, |
| "loss": 0.979, |
| "step": 2928 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.885111371629542e-06, |
| "loss": 0.9818, |
| "step": 2929 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.881594372801876e-06, |
| "loss": 0.9965, |
| "step": 2930 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.878077373974208e-06, |
| "loss": 0.9946, |
| "step": 2931 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.874560375146541e-06, |
| "loss": 0.9727, |
| "step": 2932 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.871043376318875e-06, |
| "loss": 0.9614, |
| "step": 2933 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.867526377491207e-06, |
| "loss": 0.932, |
| "step": 2934 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.86400937866354e-06, |
| "loss": 1.0313, |
| "step": 2935 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.860492379835874e-06, |
| "loss": 0.9756, |
| "step": 2936 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.856975381008207e-06, |
| "loss": 0.9894, |
| "step": 2937 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.853458382180539e-06, |
| "loss": 1.0288, |
| "step": 2938 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.849941383352872e-06, |
| "loss": 0.9686, |
| "step": 2939 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.846424384525206e-06, |
| "loss": 0.976, |
| "step": 2940 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.842907385697538e-06, |
| "loss": 1.0273, |
| "step": 2941 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.83939038686987e-06, |
| "loss": 0.9945, |
| "step": 2942 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.835873388042203e-06, |
| "loss": 0.9847, |
| "step": 2943 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.832356389214537e-06, |
| "loss": 1.0101, |
| "step": 2944 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.828839390386869e-06, |
| "loss": 1.0149, |
| "step": 2945 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.825322391559202e-06, |
| "loss": 1.0154, |
| "step": 2946 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.821805392731536e-06, |
| "loss": 1.0133, |
| "step": 2947 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.818288393903869e-06, |
| "loss": 0.9949, |
| "step": 2948 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.814771395076201e-06, |
| "loss": 0.9947, |
| "step": 2949 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.811254396248534e-06, |
| "loss": 0.9886, |
| "step": 2950 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.807737397420868e-06, |
| "loss": 0.959, |
| "step": 2951 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.8042203985932e-06, |
| "loss": 0.954, |
| "step": 2952 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.800703399765533e-06, |
| "loss": 1.0077, |
| "step": 2953 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.797186400937867e-06, |
| "loss": 0.9666, |
| "step": 2954 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.793669402110199e-06, |
| "loss": 0.9581, |
| "step": 2955 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.790152403282532e-06, |
| "loss": 0.9468, |
| "step": 2956 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.786635404454866e-06, |
| "loss": 0.9715, |
| "step": 2957 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.783118405627198e-06, |
| "loss": 0.989, |
| "step": 2958 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.779601406799531e-06, |
| "loss": 0.9698, |
| "step": 2959 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.776084407971863e-06, |
| "loss": 0.9815, |
| "step": 2960 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.772567409144196e-06, |
| "loss": 0.9725, |
| "step": 2961 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.76905041031653e-06, |
| "loss": 0.9882, |
| "step": 2962 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.765533411488862e-06, |
| "loss": 0.9638, |
| "step": 2963 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.762016412661195e-06, |
| "loss": 0.9888, |
| "step": 2964 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.758499413833529e-06, |
| "loss": 0.9755, |
| "step": 2965 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.75498241500586e-06, |
| "loss": 0.9584, |
| "step": 2966 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.751465416178194e-06, |
| "loss": 0.9713, |
| "step": 2967 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.747948417350528e-06, |
| "loss": 0.99, |
| "step": 2968 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.74443141852286e-06, |
| "loss": 1.0085, |
| "step": 2969 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.740914419695193e-06, |
| "loss": 0.9348, |
| "step": 2970 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.737397420867527e-06, |
| "loss": 0.9851, |
| "step": 2971 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.73388042203986e-06, |
| "loss": 0.9645, |
| "step": 2972 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.730363423212192e-06, |
| "loss": 0.9369, |
| "step": 2973 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.726846424384525e-06, |
| "loss": 0.9598, |
| "step": 2974 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.723329425556859e-06, |
| "loss": 1.018, |
| "step": 2975 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.71981242672919e-06, |
| "loss": 0.9854, |
| "step": 2976 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.716295427901523e-06, |
| "loss": 0.9792, |
| "step": 2977 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.712778429073856e-06, |
| "loss": 0.9843, |
| "step": 2978 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.70926143024619e-06, |
| "loss": 1.0126, |
| "step": 2979 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.705744431418522e-06, |
| "loss": 0.9504, |
| "step": 2980 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.702227432590855e-06, |
| "loss": 1.008, |
| "step": 2981 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.698710433763189e-06, |
| "loss": 0.9246, |
| "step": 2982 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.695193434935522e-06, |
| "loss": 0.9977, |
| "step": 2983 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.691676436107854e-06, |
| "loss": 1.0106, |
| "step": 2984 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.688159437280187e-06, |
| "loss": 0.9824, |
| "step": 2985 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 9.684642438452521e-06, |
| "loss": 0.9731, |
| "step": 2986 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 9.681125439624853e-06, |
| "loss": 1.014, |
| "step": 2987 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 9.677608440797186e-06, |
| "loss": 1.0155, |
| "step": 2988 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 9.67409144196952e-06, |
| "loss": 0.9769, |
| "step": 2989 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 9.670574443141852e-06, |
| "loss": 0.9636, |
| "step": 2990 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 9.667057444314185e-06, |
| "loss": 0.978, |
| "step": 2991 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 9.663540445486519e-06, |
| "loss": 0.9454, |
| "step": 2992 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 9.66002344665885e-06, |
| "loss": 0.9529, |
| "step": 2993 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 9.656506447831184e-06, |
| "loss": 0.9633, |
| "step": 2994 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 9.652989449003516e-06, |
| "loss": 0.962, |
| "step": 2995 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 9.64947245017585e-06, |
| "loss": 0.9148, |
| "step": 2996 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 9.645955451348183e-06, |
| "loss": 1.0287, |
| "step": 2997 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 9.642438452520515e-06, |
| "loss": 0.9577, |
| "step": 2998 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 9.638921453692848e-06, |
| "loss": 0.9472, |
| "step": 2999 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 9.635404454865182e-06, |
| "loss": 0.9962, |
| "step": 3000 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 5170, |
| "num_train_epochs": 2, |
| "save_steps": 100, |
| "total_flos": 9335111417856000.0, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|