| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.9936102236421727, | |
| "eval_steps": 500, | |
| "global_step": 156, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 4.0414, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 4.0692, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1.2e-05, | |
| "loss": 3.9926, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 3.8362, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 2e-05, | |
| "loss": 3.6034, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.999783578606323e-05, | |
| "loss": 3.3434, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.9991344081017312e-05, | |
| "loss": 3.3582, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.9980527694749952e-05, | |
| "loss": 3.3779, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.996539130905593e-05, | |
| "loss": 3.1884, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.9945941475610623e-05, | |
| "loss": 3.0909, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9922186613134152e-05, | |
| "loss": 2.9483, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9894137003747404e-05, | |
| "loss": 2.9103, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.986180478852149e-05, | |
| "loss": 3.073, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9825203962222573e-05, | |
| "loss": 2.8503, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.9784350367254322e-05, | |
| "loss": 2.6766, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9739261686800662e-05, | |
| "loss": 2.5458, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.968995743717171e-05, | |
| "loss": 2.7816, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.963645895935632e-05, | |
| "loss": 2.6675, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9578789409784727e-05, | |
| "loss": 2.6929, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.951697375030553e-05, | |
| "loss": 2.5884, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.9451038737381078e-05, | |
| "loss": 2.5874, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.9381012910506146e-05, | |
| "loss": 2.3095, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.930692657985482e-05, | |
| "loss": 2.4514, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.9228811813160972e-05, | |
| "loss": 2.3887, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.9146702421837952e-05, | |
| "loss": 2.2314, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.906063394634356e-05, | |
| "loss": 2.4158, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.8970643640796642e-05, | |
| "loss": 2.4192, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.887677045685188e-05, | |
| "loss": 2.3371, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.877905502683987e-05, | |
| "loss": 2.2763, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.8677539646179706e-05, | |
| "loss": 2.2117, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.8572268255071718e-05, | |
| "loss": 2.2059, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.8463286419478256e-05, | |
| "loss": 2.0932, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.8350641311400813e-05, | |
| "loss": 2.2732, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.8234381688461943e-05, | |
| "loss": 2.2529, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.8114557872800906e-05, | |
| "loss": 2.107, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.799122172929206e-05, | |
| "loss": 2.2048, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.7864426643095537e-05, | |
| "loss": 2.0147, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.773422749654988e-05, | |
| "loss": 1.9337, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.7600680645416583e-05, | |
| "loss": 2.0663, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.746384389448694e-05, | |
| "loss": 1.9993, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.7323776472561625e-05, | |
| "loss": 2.139, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.7180539006813973e-05, | |
| "loss": 2.0415, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.7034193496547903e-05, | |
| "loss": 2.076, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.6884803286362e-05, | |
| "loss": 1.9077, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.6732433038731245e-05, | |
| "loss": 1.9536, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.657714870601833e-05, | |
| "loss": 2.0756, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.641901750192666e-05, | |
| "loss": 1.8334, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.6258107872407376e-05, | |
| "loss": 1.9679, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.609448946603304e-05, | |
| "loss": 2.1307, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.592823310385073e-05, | |
| "loss": 2.0794, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.5759410748727663e-05, | |
| "loss": 1.8599, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.5588095474202597e-05, | |
| "loss": 1.9418, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.5414361432856475e-05, | |
| "loss": 1.9263, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.5238283824216015e-05, | |
| "loss": 1.8794, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.5059938862204126e-05, | |
| "loss": 1.9508, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.4879403742151283e-05, | |
| "loss": 2.0843, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.469675660738206e-05, | |
| "loss": 2.1207, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.4512076515391375e-05, | |
| "loss": 2.0181, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.4325443403625012e-05, | |
| "loss": 1.9578, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.4136938054879284e-05, | |
| "loss": 1.9789, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.3946642062334765e-05, | |
| "loss": 1.9475, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.3754637794239303e-05, | |
| "loss": 1.9798, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.356100835825547e-05, | |
| "loss": 2.0374, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.3365837565488065e-05, | |
| "loss": 1.8257, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.316920989420703e-05, | |
| "loss": 1.7065, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.2971210453281675e-05, | |
| "loss": 2.0069, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.2771924945341906e-05, | |
| "loss": 1.8164, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.257143962968246e-05, | |
| "loss": 1.8483, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.236984128492619e-05, | |
| "loss": 1.9107, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.2167217171462566e-05, | |
| "loss": 1.8914, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.1963654993677645e-05, | |
| "loss": 1.7124, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.1759242861991855e-05, | |
| "loss": 1.8255, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.155406925472205e-05, | |
| "loss": 1.7687, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.1348222979784289e-05, | |
| "loss": 1.6878, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.1141793136253987e-05, | |
| "loss": 1.6335, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.09348690758e-05, | |
| "loss": 1.8839, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.072754036400944e-05, | |
| "loss": 1.7922, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.0519896741619803e-05, | |
| "loss": 1.8622, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.0312028085675393e-05, | |
| "loss": 1.9027, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.0104024370624644e-05, | |
| "loss": 1.8657, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 9.89597562937536e-06, | |
| "loss": 1.8238, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 9.687971914324607e-06, | |
| "loss": 1.9649, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 9.480103258380198e-06, | |
| "loss": 1.8795, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 9.272459635990563e-06, | |
| "loss": 1.8047, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 9.065130924199998e-06, | |
| "loss": 1.8717, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 8.858206863746018e-06, | |
| "loss": 1.9954, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 8.651777020215713e-06, | |
| "loss": 1.7742, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 8.445930745277953e-06, | |
| "loss": 1.747, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 8.240757138008149e-06, | |
| "loss": 1.7671, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 8.036345006322358e-06, | |
| "loss": 1.7731, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 7.832782828537437e-06, | |
| "loss": 1.9667, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 7.630158715073813e-06, | |
| "loss": 1.8442, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 7.428560370317542e-06, | |
| "loss": 1.7114, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 7.228075054658096e-06, | |
| "loss": 1.7463, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 7.028789546718327e-06, | |
| "loss": 1.7918, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 6.8307901057929735e-06, | |
| "loss": 1.7758, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 6.634162434511939e-06, | |
| "loss": 1.9351, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 6.438991641744531e-06, | |
| "loss": 1.9534, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 6.245362205760703e-06, | |
| "loss": 1.7821, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 6.053357937665237e-06, | |
| "loss": 1.6681, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 5.863061945120719e-06, | |
| "loss": 1.6398, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 5.674556596374993e-06, | |
| "loss": 1.8436, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 5.487923484608629e-06, | |
| "loss": 1.648, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 5.3032433926179395e-06, | |
| "loss": 1.734, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 5.120596257848716e-06, | |
| "loss": 1.8794, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 4.940061137795876e-06, | |
| "loss": 1.7761, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 4.7617161757839895e-06, | |
| "loss": 1.716, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 4.5856385671435285e-06, | |
| "loss": 1.7799, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 4.411904525797408e-06, | |
| "loss": 1.6644, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 4.240589251272342e-06, | |
| "loss": 1.6787, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 4.0717668961492725e-06, | |
| "loss": 1.7178, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 3.905510533966959e-06, | |
| "loss": 1.6878, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 3.7418921275926245e-06, | |
| "loss": 1.5657, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 3.5809824980733445e-06, | |
| "loss": 1.7181, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 3.422851293981676e-06, | |
| "loss": 1.6137, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 3.2675669612687565e-06, | |
| "loss": 1.5988, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 3.115196713638e-06, | |
| "loss": 1.6402, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 2.965806503452098e-06, | |
| "loss": 1.5697, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 2.819460993186032e-06, | |
| "loss": 1.7401, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 2.6762235274383775e-06, | |
| "loss": 1.6331, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 2.5361561055130625e-06, | |
| "loss": 1.6747, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 2.3993193545834182e-06, | |
| "loss": 1.6595, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 2.265772503450122e-06, | |
| "loss": 1.5915, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 2.1355733569044633e-06, | |
| "loss": 1.7466, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 2.008778270707944e-06, | |
| "loss": 1.5603, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 1.8854421271990964e-06, | |
| "loss": 1.6223, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 1.7656183115380577e-06, | |
| "loss": 1.7658, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 1.6493586885991908e-06, | |
| "loss": 1.7695, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 1.536713580521746e-06, | |
| "loss": 1.5982, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 1.4277317449282834e-06, | |
| "loss": 1.6277, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.3224603538202929e-06, | |
| "loss": 1.741, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 1.220944973160133e-06, | |
| "loss": 1.5835, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.1232295431481222e-06, | |
| "loss": 1.6105, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 1.0293563592033595e-06, | |
| "loss": 1.769, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 9.393660536564408e-07, | |
| "loss": 1.9176, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 8.532975781620511e-07, | |
| "loss": 1.7732, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 7.711881868390292e-07, | |
| "loss": 1.7261, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 6.930734201451817e-07, | |
| "loss": 1.6634, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 6.189870894938587e-07, | |
| "loss": 1.8281, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 5.489612626189245e-07, | |
| "loss": 1.6842, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 4.830262496944693e-07, | |
| "loss": 1.8311, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 4.21210590215273e-07, | |
| "loss": 1.7023, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 3.635410406436857e-07, | |
| "loss": 1.5255, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 3.100425628282899e-07, | |
| "loss": 1.7267, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 2.607383131993424e-07, | |
| "loss": 1.6569, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 2.1564963274568028e-07, | |
| "loss": 1.6669, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 1.7479603777742937e-07, | |
| "loss": 1.6977, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.3819521147851122e-07, | |
| "loss": 1.8195, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 1.0586299625259699e-07, | |
| "loss": 1.5223, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 7.781338686584928e-08, | |
| "loss": 1.6346, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 5.405852438937764e-08, | |
| "loss": 1.6406, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 3.460869094407127e-08, | |
| "loss": 1.5628, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.947230525005006e-08, | |
| "loss": 1.441, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 8.655918982689582e-09, | |
| "loss": 1.6986, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 2.164213936770576e-09, | |
| "loss": 1.7365, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 0.0, | |
| "loss": 1.6223, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "step": 156, | |
| "total_flos": 8448650564861952.0, | |
| "train_loss": 1.2015542892309337, | |
| "train_runtime": 1225.7143, | |
| "train_samples_per_second": 16.317, | |
| "train_steps_per_second": 0.127 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 156, | |
| "num_train_epochs": 2, | |
| "save_steps": 50, | |
| "total_flos": 8448650564861952.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |