| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 3.0, |
| "eval_steps": 500, |
| "global_step": 564, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.005319148936170213, |
| "grad_norm": 0.009561154911335044, |
| "learning_rate": 1.4035087719298246e-06, |
| "loss": 0.1755, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.010638297872340425, |
| "grad_norm": 0.009417024000546658, |
| "learning_rate": 2.8070175438596493e-06, |
| "loss": 0.1802, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.015957446808510637, |
| "grad_norm": 0.010116583556988192, |
| "learning_rate": 4.210526315789474e-06, |
| "loss": 0.1746, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.02127659574468085, |
| "grad_norm": 0.009334399611426118, |
| "learning_rate": 5.6140350877192985e-06, |
| "loss": 0.1913, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.026595744680851064, |
| "grad_norm": 0.010636734605182496, |
| "learning_rate": 7.017543859649123e-06, |
| "loss": 0.1773, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.031914893617021274, |
| "grad_norm": 0.010122285207874202, |
| "learning_rate": 8.421052631578948e-06, |
| "loss": 0.1866, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.03723404255319149, |
| "grad_norm": 0.00937113637210872, |
| "learning_rate": 9.824561403508772e-06, |
| "loss": 0.1779, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.0425531914893617, |
| "grad_norm": 0.00969444789354534, |
| "learning_rate": 1.1228070175438597e-05, |
| "loss": 0.1745, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.047872340425531915, |
| "grad_norm": 0.009832210038464333, |
| "learning_rate": 1.263157894736842e-05, |
| "loss": 0.1801, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.05319148936170213, |
| "grad_norm": 0.009980332112239266, |
| "learning_rate": 1.4035087719298246e-05, |
| "loss": 0.1874, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.05851063829787234, |
| "grad_norm": 0.01063535083092729, |
| "learning_rate": 1.543859649122807e-05, |
| "loss": 0.1754, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.06382978723404255, |
| "grad_norm": 0.01136221960257153, |
| "learning_rate": 1.6842105263157896e-05, |
| "loss": 0.1769, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.06914893617021277, |
| "grad_norm": 0.010796659895276959, |
| "learning_rate": 1.824561403508772e-05, |
| "loss": 0.1811, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.07446808510638298, |
| "grad_norm": 0.010828246350806878, |
| "learning_rate": 1.9649122807017544e-05, |
| "loss": 0.1811, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.0797872340425532, |
| "grad_norm": 0.012967063647540094, |
| "learning_rate": 2.105263157894737e-05, |
| "loss": 0.1711, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.0851063829787234, |
| "grad_norm": 0.014401121661797859, |
| "learning_rate": 2.2456140350877194e-05, |
| "loss": 0.1778, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.09042553191489362, |
| "grad_norm": 0.012707389517038273, |
| "learning_rate": 2.385964912280702e-05, |
| "loss": 0.1833, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.09574468085106383, |
| "grad_norm": 0.014684451595816035, |
| "learning_rate": 2.526315789473684e-05, |
| "loss": 0.1665, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.10106382978723404, |
| "grad_norm": 0.014208539873317407, |
| "learning_rate": 2.6666666666666667e-05, |
| "loss": 0.1716, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.10638297872340426, |
| "grad_norm": 0.012655789234389916, |
| "learning_rate": 2.8070175438596492e-05, |
| "loss": 0.1845, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.11170212765957446, |
| "grad_norm": 0.013478779449117998, |
| "learning_rate": 2.9473684210526317e-05, |
| "loss": 0.1717, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.11702127659574468, |
| "grad_norm": 0.01453653728901511, |
| "learning_rate": 3.087719298245614e-05, |
| "loss": 0.1636, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.12234042553191489, |
| "grad_norm": 0.013308065595260715, |
| "learning_rate": 3.228070175438597e-05, |
| "loss": 0.1663, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.1276595744680851, |
| "grad_norm": 0.01166891361475697, |
| "learning_rate": 3.368421052631579e-05, |
| "loss": 0.1535, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.13297872340425532, |
| "grad_norm": 0.011275841571644641, |
| "learning_rate": 3.508771929824562e-05, |
| "loss": 0.1555, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.13829787234042554, |
| "grad_norm": 0.010675783250049909, |
| "learning_rate": 3.649122807017544e-05, |
| "loss": 0.1576, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.14361702127659576, |
| "grad_norm": 0.010645443601299744, |
| "learning_rate": 3.789473684210526e-05, |
| "loss": 0.1499, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.14893617021276595, |
| "grad_norm": 0.011375250838379794, |
| "learning_rate": 3.929824561403509e-05, |
| "loss": 0.1508, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.15425531914893617, |
| "grad_norm": 0.01174614273803293, |
| "learning_rate": 4.070175438596492e-05, |
| "loss": 0.1507, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.1595744680851064, |
| "grad_norm": 0.013268526158970286, |
| "learning_rate": 4.210526315789474e-05, |
| "loss": 0.1511, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.16489361702127658, |
| "grad_norm": 0.016094704780159706, |
| "learning_rate": 4.350877192982457e-05, |
| "loss": 0.138, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.1702127659574468, |
| "grad_norm": 0.014231850913349995, |
| "learning_rate": 4.491228070175439e-05, |
| "loss": 0.142, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.17553191489361702, |
| "grad_norm": 0.015183724538402317, |
| "learning_rate": 4.6315789473684214e-05, |
| "loss": 0.1327, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.18085106382978725, |
| "grad_norm": 0.011214302590448333, |
| "learning_rate": 4.771929824561404e-05, |
| "loss": 0.1339, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.18617021276595744, |
| "grad_norm": 0.011210375598494636, |
| "learning_rate": 4.9122807017543864e-05, |
| "loss": 0.1333, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.19148936170212766, |
| "grad_norm": 0.011892406426104361, |
| "learning_rate": 5.052631578947368e-05, |
| "loss": 0.1257, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.19680851063829788, |
| "grad_norm": 0.011288000710952284, |
| "learning_rate": 5.1929824561403515e-05, |
| "loss": 0.1271, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.20212765957446807, |
| "grad_norm": 0.010492140609106346, |
| "learning_rate": 5.333333333333333e-05, |
| "loss": 0.1271, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.2074468085106383, |
| "grad_norm": 0.01033779471257665, |
| "learning_rate": 5.4736842105263165e-05, |
| "loss": 0.1162, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.2127659574468085, |
| "grad_norm": 0.011149841504085078, |
| "learning_rate": 5.6140350877192984e-05, |
| "loss": 0.1137, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.21808510638297873, |
| "grad_norm": 0.011141635603095867, |
| "learning_rate": 5.7543859649122816e-05, |
| "loss": 0.1175, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.22340425531914893, |
| "grad_norm": 0.010407733066101588, |
| "learning_rate": 5.8947368421052634e-05, |
| "loss": 0.11, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.22872340425531915, |
| "grad_norm": 0.011531135807763208, |
| "learning_rate": 6.035087719298246e-05, |
| "loss": 0.1056, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.23404255319148937, |
| "grad_norm": 0.010983597061530941, |
| "learning_rate": 6.175438596491228e-05, |
| "loss": 0.1111, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.2393617021276596, |
| "grad_norm": 0.011983118403984453, |
| "learning_rate": 6.315789473684212e-05, |
| "loss": 0.1056, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.24468085106382978, |
| "grad_norm": 0.011009586929078287, |
| "learning_rate": 6.456140350877194e-05, |
| "loss": 0.104, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 0.01024427372191449, |
| "learning_rate": 6.596491228070175e-05, |
| "loss": 0.0907, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.2553191489361702, |
| "grad_norm": 0.009735327047319636, |
| "learning_rate": 6.736842105263159e-05, |
| "loss": 0.103, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.26063829787234044, |
| "grad_norm": 0.009901003489880959, |
| "learning_rate": 6.87719298245614e-05, |
| "loss": 0.0921, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.26595744680851063, |
| "grad_norm": 0.009695233843116385, |
| "learning_rate": 7.017543859649124e-05, |
| "loss": 0.0815, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.2712765957446808, |
| "grad_norm": 0.010956603236800123, |
| "learning_rate": 7.157894736842105e-05, |
| "loss": 0.0888, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.2765957446808511, |
| "grad_norm": 0.011083272917479708, |
| "learning_rate": 7.298245614035087e-05, |
| "loss": 0.0858, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.28191489361702127, |
| "grad_norm": 0.010950809209300583, |
| "learning_rate": 7.43859649122807e-05, |
| "loss": 0.0781, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.2872340425531915, |
| "grad_norm": 0.010675185765520238, |
| "learning_rate": 7.578947368421052e-05, |
| "loss": 0.0854, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.2925531914893617, |
| "grad_norm": 0.010915668773410872, |
| "learning_rate": 7.719298245614036e-05, |
| "loss": 0.0847, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.2978723404255319, |
| "grad_norm": 0.010822616560865793, |
| "learning_rate": 7.859649122807017e-05, |
| "loss": 0.081, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.30319148936170215, |
| "grad_norm": 0.0107935601921023, |
| "learning_rate": 8e-05, |
| "loss": 0.0747, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.30851063829787234, |
| "grad_norm": 0.014591153145662837, |
| "learning_rate": 7.99992320862698e-05, |
| "loss": 0.0744, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.31382978723404253, |
| "grad_norm": 0.010284692962658925, |
| "learning_rate": 7.999692837456373e-05, |
| "loss": 0.0767, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.3191489361702128, |
| "grad_norm": 0.012836377596775543, |
| "learning_rate": 7.999308895333442e-05, |
| "loss": 0.06, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.324468085106383, |
| "grad_norm": 0.017491704991111173, |
| "learning_rate": 7.998771396999908e-05, |
| "loss": 0.0822, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.32978723404255317, |
| "grad_norm": 0.012287714359868156, |
| "learning_rate": 7.998080363093387e-05, |
| "loss": 0.0636, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.3351063829787234, |
| "grad_norm": 0.01878993418191391, |
| "learning_rate": 7.9972358201466e-05, |
| "loss": 0.0643, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.3404255319148936, |
| "grad_norm": 0.01448479189635225, |
| "learning_rate": 7.996237800586354e-05, |
| "loss": 0.0642, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.34574468085106386, |
| "grad_norm": 0.013277729537687489, |
| "learning_rate": 7.995086342732296e-05, |
| "loss": 0.0701, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.35106382978723405, |
| "grad_norm": 0.014123573086677482, |
| "learning_rate": 7.99378149079544e-05, |
| "loss": 0.0719, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.35638297872340424, |
| "grad_norm": 0.013123241507956603, |
| "learning_rate": 7.992323294876472e-05, |
| "loss": 0.0604, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.3617021276595745, |
| "grad_norm": 0.014584991003349663, |
| "learning_rate": 7.990711810963825e-05, |
| "loss": 0.0672, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.3670212765957447, |
| "grad_norm": 0.012307848920990693, |
| "learning_rate": 7.98894710093153e-05, |
| "loss": 0.0718, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.3723404255319149, |
| "grad_norm": 0.009528323138503523, |
| "learning_rate": 7.987029232536841e-05, |
| "loss": 0.0583, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.3776595744680851, |
| "grad_norm": 0.016166270367235794, |
| "learning_rate": 7.984958279417631e-05, |
| "loss": 0.0563, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.3829787234042553, |
| "grad_norm": 0.010423282112990056, |
| "learning_rate": 7.982734321089566e-05, |
| "loss": 0.0621, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.3882978723404255, |
| "grad_norm": 0.012746741765982545, |
| "learning_rate": 7.980357442943054e-05, |
| "loss": 0.0614, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.39361702127659576, |
| "grad_norm": 0.012086758277574675, |
| "learning_rate": 7.977827736239966e-05, |
| "loss": 0.061, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.39893617021276595, |
| "grad_norm": 0.008628045103687244, |
| "learning_rate": 7.975145298110121e-05, |
| "loss": 0.0624, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.40425531914893614, |
| "grad_norm": 0.021048414457652535, |
| "learning_rate": 7.972310231547579e-05, |
| "loss": 0.0644, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.4095744680851064, |
| "grad_norm": 0.014191091339863782, |
| "learning_rate": 7.969322645406661e-05, |
| "loss": 0.0547, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.4148936170212766, |
| "grad_norm": 0.011884095111294736, |
| "learning_rate": 7.966182654397792e-05, |
| "loss": 0.0639, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.42021276595744683, |
| "grad_norm": 0.007948739073755548, |
| "learning_rate": 7.962890379083082e-05, |
| "loss": 0.0685, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.425531914893617, |
| "grad_norm": 0.010645064952770983, |
| "learning_rate": 7.9594459458717e-05, |
| "loss": 0.0489, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.4308510638297872, |
| "grad_norm": 0.008765974581248325, |
| "learning_rate": 7.955849487015026e-05, |
| "loss": 0.0529, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.43617021276595747, |
| "grad_norm": 0.008266727410392078, |
| "learning_rate": 7.952101140601566e-05, |
| "loss": 0.0559, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.44148936170212766, |
| "grad_norm": 0.008762924603276562, |
| "learning_rate": 7.948201050551651e-05, |
| "loss": 0.0632, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.44680851063829785, |
| "grad_norm": 0.008536028513454225, |
| "learning_rate": 7.944149366611921e-05, |
| "loss": 0.0711, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.4521276595744681, |
| "grad_norm": 0.007506130196497499, |
| "learning_rate": 7.93994624434956e-05, |
| "loss": 0.0614, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.4574468085106383, |
| "grad_norm": 0.013215586532834878, |
| "learning_rate": 7.935591845146332e-05, |
| "loss": 0.0618, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.4627659574468085, |
| "grad_norm": 0.007354194968520879, |
| "learning_rate": 7.931086336192385e-05, |
| "loss": 0.053, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.46808510638297873, |
| "grad_norm": 0.016244809833768043, |
| "learning_rate": 7.926429890479826e-05, |
| "loss": 0.0591, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.4734042553191489, |
| "grad_norm": 0.006810029557397404, |
| "learning_rate": 7.921622686796088e-05, |
| "loss": 0.0575, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.4787234042553192, |
| "grad_norm": 0.00742457550555756, |
| "learning_rate": 7.916664909717055e-05, |
| "loss": 0.0614, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.48404255319148937, |
| "grad_norm": 0.00880206806081524, |
| "learning_rate": 7.911556749599982e-05, |
| "loss": 0.066, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.48936170212765956, |
| "grad_norm": 0.008684236043180452, |
| "learning_rate": 7.906298402576183e-05, |
| "loss": 0.0542, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.4946808510638298, |
| "grad_norm": 0.007704209928003154, |
| "learning_rate": 7.900890070543503e-05, |
| "loss": 0.0639, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 0.006590582428715168, |
| "learning_rate": 7.895331961158561e-05, |
| "loss": 0.0583, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.5053191489361702, |
| "grad_norm": 0.008366147966217917, |
| "learning_rate": 7.889624287828785e-05, |
| "loss": 0.0614, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.5106382978723404, |
| "grad_norm": 0.008155256525355911, |
| "learning_rate": 7.883767269704209e-05, |
| "loss": 0.0616, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.5159574468085106, |
| "grad_norm": 0.006900966096502813, |
| "learning_rate": 7.877761131669065e-05, |
| "loss": 0.0581, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.5212765957446809, |
| "grad_norm": 0.007853442068788125, |
| "learning_rate": 7.871606104333149e-05, |
| "loss": 0.0535, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.526595744680851, |
| "grad_norm": 0.008083954715164652, |
| "learning_rate": 7.865302424022958e-05, |
| "loss": 0.0609, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.5319148936170213, |
| "grad_norm": 0.007028938410701514, |
| "learning_rate": 7.858850332772625e-05, |
| "loss": 0.0631, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.5372340425531915, |
| "grad_norm": 0.0079742828320416, |
| "learning_rate": 7.852250078314624e-05, |
| "loss": 0.0491, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.5425531914893617, |
| "grad_norm": 0.0071240966142409115, |
| "learning_rate": 7.845501914070257e-05, |
| "loss": 0.0694, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.5478723404255319, |
| "grad_norm": 0.0085749142911446, |
| "learning_rate": 7.83860609913992e-05, |
| "loss": 0.0595, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.5531914893617021, |
| "grad_norm": 0.006164034136135808, |
| "learning_rate": 7.831562898293164e-05, |
| "loss": 0.0548, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.5585106382978723, |
| "grad_norm": 0.007639317361603054, |
| "learning_rate": 7.82437258195852e-05, |
| "loss": 0.0576, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.5638297872340425, |
| "grad_norm": 0.0075059924765771896, |
| "learning_rate": 7.817035426213119e-05, |
| "loss": 0.0546, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.5691489361702128, |
| "grad_norm": 0.006448327808500205, |
| "learning_rate": 7.809551712772094e-05, |
| "loss": 0.0605, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.574468085106383, |
| "grad_norm": 0.006573290280877171, |
| "learning_rate": 7.80192172897776e-05, |
| "loss": 0.0578, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.5797872340425532, |
| "grad_norm": 0.006835636654187989, |
| "learning_rate": 7.794145767788582e-05, |
| "loss": 0.0562, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.5851063829787234, |
| "grad_norm": 0.006764865571110806, |
| "learning_rate": 7.78622412776793e-05, |
| "loss": 0.0579, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.5904255319148937, |
| "grad_norm": 0.006884519326644317, |
| "learning_rate": 7.778157113072609e-05, |
| "loss": 0.065, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.5957446808510638, |
| "grad_norm": 0.007886043916845945, |
| "learning_rate": 7.769945033441187e-05, |
| "loss": 0.0591, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.601063829787234, |
| "grad_norm": 0.007696063393083606, |
| "learning_rate": 7.761588204182099e-05, |
| "loss": 0.0526, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.6063829787234043, |
| "grad_norm": 0.007480083702728038, |
| "learning_rate": 7.753086946161541e-05, |
| "loss": 0.0485, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.6117021276595744, |
| "grad_norm": 0.007222100533336928, |
| "learning_rate": 7.744441585791153e-05, |
| "loss": 0.0509, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.6170212765957447, |
| "grad_norm": 0.0077514372788631825, |
| "learning_rate": 7.735652455015479e-05, |
| "loss": 0.0563, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.6223404255319149, |
| "grad_norm": 0.006877356775872902, |
| "learning_rate": 7.72671989129923e-05, |
| "loss": 0.0575, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.6276595744680851, |
| "grad_norm": 0.007358462447017441, |
| "learning_rate": 7.717644237614321e-05, |
| "loss": 0.0608, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.6329787234042553, |
| "grad_norm": 0.007115399890363866, |
| "learning_rate": 7.708425842426709e-05, |
| "loss": 0.0472, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.6382978723404256, |
| "grad_norm": 0.007262902947298311, |
| "learning_rate": 7.699065059683004e-05, |
| "loss": 0.0625, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.6436170212765957, |
| "grad_norm": 0.007412623383774491, |
| "learning_rate": 7.689562248796883e-05, |
| "loss": 0.0487, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.648936170212766, |
| "grad_norm": 0.006227288002816663, |
| "learning_rate": 7.679917774635298e-05, |
| "loss": 0.06, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.6542553191489362, |
| "grad_norm": 0.006709905429038058, |
| "learning_rate": 7.670132007504452e-05, |
| "loss": 0.0539, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.6595744680851063, |
| "grad_norm": 0.00761976081275206, |
| "learning_rate": 7.660205323135595e-05, |
| "loss": 0.055, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.6648936170212766, |
| "grad_norm": 0.006816417427517607, |
| "learning_rate": 7.650138102670587e-05, |
| "loss": 0.0569, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.6702127659574468, |
| "grad_norm": 0.007078205199271824, |
| "learning_rate": 7.639930732647267e-05, |
| "loss": 0.0622, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.675531914893617, |
| "grad_norm": 0.007592150389525906, |
| "learning_rate": 7.629583604984618e-05, |
| "loss": 0.0649, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.6808510638297872, |
| "grad_norm": 0.006882934144710119, |
| "learning_rate": 7.619097116967707e-05, |
| "loss": 0.059, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.6861702127659575, |
| "grad_norm": 0.00716349908298991, |
| "learning_rate": 7.608471671232443e-05, |
| "loss": 0.0481, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.6914893617021277, |
| "grad_norm": 0.007116087746097427, |
| "learning_rate": 7.597707675750108e-05, |
| "loss": 0.0544, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.6968085106382979, |
| "grad_norm": 0.006529125411797407, |
| "learning_rate": 7.586805543811699e-05, |
| "loss": 0.0614, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.7021276595744681, |
| "grad_norm": 0.006649370184241135, |
| "learning_rate": 7.575765694012055e-05, |
| "loss": 0.0509, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.7074468085106383, |
| "grad_norm": 0.009091831721893998, |
| "learning_rate": 7.564588550233786e-05, |
| "loss": 0.0623, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.7127659574468085, |
| "grad_norm": 0.006852350753434688, |
| "learning_rate": 7.553274541631008e-05, |
| "loss": 0.0521, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.7180851063829787, |
| "grad_norm": 0.012935847955709368, |
| "learning_rate": 7.541824102612839e-05, |
| "loss": 0.0506, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.723404255319149, |
| "grad_norm": 0.006635233166836011, |
| "learning_rate": 7.530237672826755e-05, |
| "loss": 0.0513, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.7287234042553191, |
| "grad_norm": 0.007396511537910537, |
| "learning_rate": 7.518515697141676e-05, |
| "loss": 0.0571, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.7340425531914894, |
| "grad_norm": 0.006272758585952709, |
| "learning_rate": 7.506658625630906e-05, |
| "loss": 0.0577, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.7393617021276596, |
| "grad_norm": 0.007215512440685293, |
| "learning_rate": 7.494666913554847e-05, |
| "loss": 0.0525, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.7446808510638298, |
| "grad_norm": 0.008073236295641481, |
| "learning_rate": 7.482541021343518e-05, |
| "loss": 0.0599, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 0.0075081856867129015, |
| "learning_rate": 7.470281414578871e-05, |
| "loss": 0.0538, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.7553191489361702, |
| "grad_norm": 0.011241759305206718, |
| "learning_rate": 7.457888563976929e-05, |
| "loss": 0.0647, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.7606382978723404, |
| "grad_norm": 0.009272069310553854, |
| "learning_rate": 7.445362945369696e-05, |
| "loss": 0.0547, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.7659574468085106, |
| "grad_norm": 0.007692861669562378, |
| "learning_rate": 7.432705039686896e-05, |
| "loss": 0.0567, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.7712765957446809, |
| "grad_norm": 0.007799025058812548, |
| "learning_rate": 7.41991533293751e-05, |
| "loss": 0.0574, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.776595744680851, |
| "grad_norm": 0.0072290282211692316, |
| "learning_rate": 7.406994316191109e-05, |
| "loss": 0.0636, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.7819148936170213, |
| "grad_norm": 0.008063455133510753, |
| "learning_rate": 7.393942485558998e-05, |
| "loss": 0.0586, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.7872340425531915, |
| "grad_norm": 0.008205838050790625, |
| "learning_rate": 7.38076034217518e-05, |
| "loss": 0.0542, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.7925531914893617, |
| "grad_norm": 0.007823736183597602, |
| "learning_rate": 7.367448392177094e-05, |
| "loss": 0.0623, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.7978723404255319, |
| "grad_norm": 0.007430604257458213, |
| "learning_rate": 7.354007146686203e-05, |
| "loss": 0.0608, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.8031914893617021, |
| "grad_norm": 0.007205687178727836, |
| "learning_rate": 7.340437121788354e-05, |
| "loss": 0.0529, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.8085106382978723, |
| "grad_norm": 0.008118522091005099, |
| "learning_rate": 7.326738838513967e-05, |
| "loss": 0.0545, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.8138297872340425, |
| "grad_norm": 0.009314357301676833, |
| "learning_rate": 7.312912822818035e-05, |
| "loss": 0.0495, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.8191489361702128, |
| "grad_norm": 0.007604144473266805, |
| "learning_rate": 7.298959605559922e-05, |
| "loss": 0.0509, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.824468085106383, |
| "grad_norm": 0.007455592041832291, |
| "learning_rate": 7.284879722482983e-05, |
| "loss": 0.0535, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.8297872340425532, |
| "grad_norm": 0.007506837756117337, |
| "learning_rate": 7.270673714193994e-05, |
| "loss": 0.048, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.8351063829787234, |
| "grad_norm": 0.006679291515171901, |
| "learning_rate": 7.256342126142397e-05, |
| "loss": 0.0637, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.8404255319148937, |
| "grad_norm": 0.007489623089323601, |
| "learning_rate": 7.241885508599353e-05, |
| "loss": 0.0508, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.8457446808510638, |
| "grad_norm": 0.007104773484113012, |
| "learning_rate": 7.227304416636619e-05, |
| "loss": 0.0551, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.851063829787234, |
| "grad_norm": 0.006979020690716367, |
| "learning_rate": 7.21259941010523e-05, |
| "loss": 0.0613, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.8563829787234043, |
| "grad_norm": 0.006744348116220081, |
| "learning_rate": 7.197771053614006e-05, |
| "loss": 0.0561, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.8617021276595744, |
| "grad_norm": 0.0067229803925796024, |
| "learning_rate": 7.182819916507873e-05, |
| "loss": 0.0517, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.8670212765957447, |
| "grad_norm": 0.00931157182556077, |
| "learning_rate": 7.16774657284601e-05, |
| "loss": 0.0544, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.8723404255319149, |
| "grad_norm": 0.008654675730737709, |
| "learning_rate": 7.15255160137979e-05, |
| "loss": 0.0471, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.8776595744680851, |
| "grad_norm": 0.006837962889682489, |
| "learning_rate": 7.137235585530575e-05, |
| "loss": 0.0508, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.8829787234042553, |
| "grad_norm": 0.007334374370839323, |
| "learning_rate": 7.121799113367306e-05, |
| "loss": 0.0496, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.8882978723404256, |
| "grad_norm": 0.006778922572558991, |
| "learning_rate": 7.106242777583933e-05, |
| "loss": 0.0545, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.8936170212765957, |
| "grad_norm": 0.010426789397646885, |
| "learning_rate": 7.090567175476648e-05, |
| "loss": 0.0534, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.898936170212766, |
| "grad_norm": 0.006607076689478627, |
| "learning_rate": 7.074772908920953e-05, |
| "loss": 0.0538, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.9042553191489362, |
| "grad_norm": 0.007436067473091195, |
| "learning_rate": 7.058860584348555e-05, |
| "loss": 0.0504, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.9095744680851063, |
| "grad_norm": 0.007077560789983193, |
| "learning_rate": 7.042830812724081e-05, |
| "loss": 0.0553, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.9148936170212766, |
| "grad_norm": 0.007421291349082297, |
| "learning_rate": 7.026684209521618e-05, |
| "loss": 0.0477, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.9202127659574468, |
| "grad_norm": 0.008267153622335049, |
| "learning_rate": 7.010421394701079e-05, |
| "loss": 0.0546, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.925531914893617, |
| "grad_norm": 0.008486501007180495, |
| "learning_rate": 6.994042992684406e-05, |
| "loss": 0.051, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.9308510638297872, |
| "grad_norm": 0.006791287167312348, |
| "learning_rate": 6.977549632331585e-05, |
| "loss": 0.0494, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.9361702127659575, |
| "grad_norm": 0.007026222867661035, |
| "learning_rate": 6.960941946916512e-05, |
| "loss": 0.0486, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.9414893617021277, |
| "grad_norm": 0.0073427814850310505, |
| "learning_rate": 6.944220574102669e-05, |
| "loss": 0.0495, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.9468085106382979, |
| "grad_norm": 0.00939447240023586, |
| "learning_rate": 6.927386155918646e-05, |
| "loss": 0.0554, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.9521276595744681, |
| "grad_norm": 0.007113435102389512, |
| "learning_rate": 6.910439338733484e-05, |
| "loss": 0.0547, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.9574468085106383, |
| "grad_norm": 0.008197567333853677, |
| "learning_rate": 6.893380773231865e-05, |
| "loss": 0.0541, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.9627659574468085, |
| "grad_norm": 0.0067846258680574, |
| "learning_rate": 6.876211114389123e-05, |
| "loss": 0.0548, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.9680851063829787, |
| "grad_norm": 0.006558858686191774, |
| "learning_rate": 6.858931021446092e-05, |
| "loss": 0.0569, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.973404255319149, |
| "grad_norm": 0.007346927718564663, |
| "learning_rate": 6.841541157883807e-05, |
| "loss": 0.0553, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.9787234042553191, |
| "grad_norm": 0.006628804183352618, |
| "learning_rate": 6.824042191398018e-05, |
| "loss": 0.0461, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.9840425531914894, |
| "grad_norm": 0.006565776930343823, |
| "learning_rate": 6.806434793873555e-05, |
| "loss": 0.0569, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.9893617021276596, |
| "grad_norm": 0.008428687319511245, |
| "learning_rate": 6.788719641358533e-05, |
| "loss": 0.0525, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.9946808510638298, |
| "grad_norm": 0.007231980801782624, |
| "learning_rate": 6.770897414038398e-05, |
| "loss": 0.0564, |
| "step": 187 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.007199071126863927, |
| "learning_rate": 6.7529687962098e-05, |
| "loss": 0.0508, |
| "step": 188 |
| }, |
| { |
| "epoch": 1.0053191489361701, |
| "grad_norm": 0.007698913160393612, |
| "learning_rate": 6.73493447625433e-05, |
| "loss": 0.0449, |
| "step": 189 |
| }, |
| { |
| "epoch": 1.0106382978723405, |
| "grad_norm": 0.007075718521029457, |
| "learning_rate": 6.716795146612082e-05, |
| "loss": 0.0465, |
| "step": 190 |
| }, |
| { |
| "epoch": 1.0159574468085106, |
| "grad_norm": 0.007787560829006243, |
| "learning_rate": 6.69855150375507e-05, |
| "loss": 0.0452, |
| "step": 191 |
| }, |
| { |
| "epoch": 1.0212765957446808, |
| "grad_norm": 0.007146156188810159, |
| "learning_rate": 6.680204248160489e-05, |
| "loss": 0.0557, |
| "step": 192 |
| }, |
| { |
| "epoch": 1.0265957446808511, |
| "grad_norm": 0.006494048050335908, |
| "learning_rate": 6.661754084283813e-05, |
| "loss": 0.0524, |
| "step": 193 |
| }, |
| { |
| "epoch": 1.0319148936170213, |
| "grad_norm": 0.008447897620748324, |
| "learning_rate": 6.643201720531747e-05, |
| "loss": 0.0494, |
| "step": 194 |
| }, |
| { |
| "epoch": 1.0372340425531914, |
| "grad_norm": 0.006687788733037075, |
| "learning_rate": 6.624547869235035e-05, |
| "loss": 0.0487, |
| "step": 195 |
| }, |
| { |
| "epoch": 1.0425531914893618, |
| "grad_norm": 0.007544841508955585, |
| "learning_rate": 6.605793246621105e-05, |
| "loss": 0.0515, |
| "step": 196 |
| }, |
| { |
| "epoch": 1.047872340425532, |
| "grad_norm": 0.007088986909333983, |
| "learning_rate": 6.586938572786565e-05, |
| "loss": 0.0555, |
| "step": 197 |
| }, |
| { |
| "epoch": 1.053191489361702, |
| "grad_norm": 0.008031390701399636, |
| "learning_rate": 6.567984571669564e-05, |
| "loss": 0.0512, |
| "step": 198 |
| }, |
| { |
| "epoch": 1.0585106382978724, |
| "grad_norm": 0.007084650359565095, |
| "learning_rate": 6.548931971021984e-05, |
| "loss": 0.053, |
| "step": 199 |
| }, |
| { |
| "epoch": 1.0638297872340425, |
| "grad_norm": 0.007439919427825884, |
| "learning_rate": 6.529781502381509e-05, |
| "loss": 0.0539, |
| "step": 200 |
| }, |
| { |
| "epoch": 1.0691489361702127, |
| "grad_norm": 0.006701997691768118, |
| "learning_rate": 6.510533901043529e-05, |
| "loss": 0.0484, |
| "step": 201 |
| }, |
| { |
| "epoch": 1.074468085106383, |
| "grad_norm": 0.00731882541153124, |
| "learning_rate": 6.49118990603291e-05, |
| "loss": 0.0463, |
| "step": 202 |
| }, |
| { |
| "epoch": 1.0797872340425532, |
| "grad_norm": 0.006116525693404446, |
| "learning_rate": 6.471750260075619e-05, |
| "loss": 0.0439, |
| "step": 203 |
| }, |
| { |
| "epoch": 1.0851063829787233, |
| "grad_norm": 0.007388241001202413, |
| "learning_rate": 6.452215709570212e-05, |
| "loss": 0.0484, |
| "step": 204 |
| }, |
| { |
| "epoch": 1.0904255319148937, |
| "grad_norm": 0.008666776246601548, |
| "learning_rate": 6.432587004559163e-05, |
| "loss": 0.0543, |
| "step": 205 |
| }, |
| { |
| "epoch": 1.0957446808510638, |
| "grad_norm": 0.007138924190681442, |
| "learning_rate": 6.412864898700078e-05, |
| "loss": 0.0463, |
| "step": 206 |
| }, |
| { |
| "epoch": 1.101063829787234, |
| "grad_norm": 0.00725240803680164, |
| "learning_rate": 6.393050149236751e-05, |
| "loss": 0.051, |
| "step": 207 |
| }, |
| { |
| "epoch": 1.1063829787234043, |
| "grad_norm": 0.006850029160465142, |
| "learning_rate": 6.373143516970088e-05, |
| "loss": 0.0417, |
| "step": 208 |
| }, |
| { |
| "epoch": 1.1117021276595744, |
| "grad_norm": 0.007642565085385908, |
| "learning_rate": 6.353145766228905e-05, |
| "loss": 0.0492, |
| "step": 209 |
| }, |
| { |
| "epoch": 1.1170212765957448, |
| "grad_norm": 0.00668100280420778, |
| "learning_rate": 6.333057664840567e-05, |
| "loss": 0.0548, |
| "step": 210 |
| }, |
| { |
| "epoch": 1.122340425531915, |
| "grad_norm": 0.0067289847920395185, |
| "learning_rate": 6.31287998410152e-05, |
| "loss": 0.0498, |
| "step": 211 |
| }, |
| { |
| "epoch": 1.127659574468085, |
| "grad_norm": 0.006864325578896686, |
| "learning_rate": 6.292613498747665e-05, |
| "loss": 0.0564, |
| "step": 212 |
| }, |
| { |
| "epoch": 1.1329787234042552, |
| "grad_norm": 0.008313073585277162, |
| "learning_rate": 6.272258986924624e-05, |
| "loss": 0.0484, |
| "step": 213 |
| }, |
| { |
| "epoch": 1.1382978723404256, |
| "grad_norm": 0.006855863390220988, |
| "learning_rate": 6.25181723015785e-05, |
| "loss": 0.0528, |
| "step": 214 |
| }, |
| { |
| "epoch": 1.1436170212765957, |
| "grad_norm": 0.007393622334837033, |
| "learning_rate": 6.231289013322628e-05, |
| "loss": 0.0481, |
| "step": 215 |
| }, |
| { |
| "epoch": 1.148936170212766, |
| "grad_norm": 0.007581599816486484, |
| "learning_rate": 6.210675124613936e-05, |
| "loss": 0.0557, |
| "step": 216 |
| }, |
| { |
| "epoch": 1.1542553191489362, |
| "grad_norm": 0.006535831248330529, |
| "learning_rate": 6.189976355516182e-05, |
| "loss": 0.0471, |
| "step": 217 |
| }, |
| { |
| "epoch": 1.1595744680851063, |
| "grad_norm": 0.007104957520970814, |
| "learning_rate": 6.169193500772816e-05, |
| "loss": 0.0539, |
| "step": 218 |
| }, |
| { |
| "epoch": 1.1648936170212765, |
| "grad_norm": 0.006509507833000309, |
| "learning_rate": 6.148327358355815e-05, |
| "loss": 0.0484, |
| "step": 219 |
| }, |
| { |
| "epoch": 1.1702127659574468, |
| "grad_norm": 0.006782884912666521, |
| "learning_rate": 6.12737872943504e-05, |
| "loss": 0.0561, |
| "step": 220 |
| }, |
| { |
| "epoch": 1.175531914893617, |
| "grad_norm": 0.007438938470380351, |
| "learning_rate": 6.10634841834748e-05, |
| "loss": 0.0632, |
| "step": 221 |
| }, |
| { |
| "epoch": 1.1808510638297873, |
| "grad_norm": 0.006850602723658474, |
| "learning_rate": 6.085237232566368e-05, |
| "loss": 0.049, |
| "step": 222 |
| }, |
| { |
| "epoch": 1.1861702127659575, |
| "grad_norm": 0.007481691357552809, |
| "learning_rate": 6.0640459826701735e-05, |
| "loss": 0.0567, |
| "step": 223 |
| }, |
| { |
| "epoch": 1.1914893617021276, |
| "grad_norm": 0.007013277129439232, |
| "learning_rate": 6.042775482311485e-05, |
| "loss": 0.0618, |
| "step": 224 |
| }, |
| { |
| "epoch": 1.196808510638298, |
| "grad_norm": 0.009522833901214152, |
| "learning_rate": 6.0214265481857665e-05, |
| "loss": 0.0493, |
| "step": 225 |
| }, |
| { |
| "epoch": 1.202127659574468, |
| "grad_norm": 0.0074092384476877465, |
| "learning_rate": 6.000000000000001e-05, |
| "loss": 0.0444, |
| "step": 226 |
| }, |
| { |
| "epoch": 1.2074468085106382, |
| "grad_norm": 0.00775172518924885, |
| "learning_rate": 5.9784966604412135e-05, |
| "loss": 0.043, |
| "step": 227 |
| }, |
| { |
| "epoch": 1.2127659574468086, |
| "grad_norm": 0.006352286950445278, |
| "learning_rate": 5.9569173551448886e-05, |
| "loss": 0.0538, |
| "step": 228 |
| }, |
| { |
| "epoch": 1.2180851063829787, |
| "grad_norm": 0.006666800154229632, |
| "learning_rate": 5.9352629126632704e-05, |
| "loss": 0.0426, |
| "step": 229 |
| }, |
| { |
| "epoch": 1.2234042553191489, |
| "grad_norm": 0.006892876900444898, |
| "learning_rate": 5.913534164433542e-05, |
| "loss": 0.0519, |
| "step": 230 |
| }, |
| { |
| "epoch": 1.2287234042553192, |
| "grad_norm": 0.00885266025009227, |
| "learning_rate": 5.891731944745908e-05, |
| "loss": 0.0516, |
| "step": 231 |
| }, |
| { |
| "epoch": 1.2340425531914894, |
| "grad_norm": 0.007426026820379377, |
| "learning_rate": 5.869857090711562e-05, |
| "loss": 0.0481, |
| "step": 232 |
| }, |
| { |
| "epoch": 1.2393617021276595, |
| "grad_norm": 0.007802746789095517, |
| "learning_rate": 5.8479104422305404e-05, |
| "loss": 0.0464, |
| "step": 233 |
| }, |
| { |
| "epoch": 1.2446808510638299, |
| "grad_norm": 0.006943102486379265, |
| "learning_rate": 5.825892841959481e-05, |
| "loss": 0.0487, |
| "step": 234 |
| }, |
| { |
| "epoch": 1.25, |
| "grad_norm": 0.007706761903675802, |
| "learning_rate": 5.803805135279259e-05, |
| "loss": 0.0428, |
| "step": 235 |
| }, |
| { |
| "epoch": 1.2553191489361701, |
| "grad_norm": 0.007211059835003603, |
| "learning_rate": 5.781648170262537e-05, |
| "loss": 0.0494, |
| "step": 236 |
| }, |
| { |
| "epoch": 1.2606382978723405, |
| "grad_norm": 0.007543229328547115, |
| "learning_rate": 5.7594227976411984e-05, |
| "loss": 0.0489, |
| "step": 237 |
| }, |
| { |
| "epoch": 1.2659574468085106, |
| "grad_norm": 0.006314015580350789, |
| "learning_rate": 5.7371298707736806e-05, |
| "loss": 0.0536, |
| "step": 238 |
| }, |
| { |
| "epoch": 1.2712765957446808, |
| "grad_norm": 0.008153750160550395, |
| "learning_rate": 5.714770245612217e-05, |
| "loss": 0.0547, |
| "step": 239 |
| }, |
| { |
| "epoch": 1.2765957446808511, |
| "grad_norm": 0.011511743306987555, |
| "learning_rate": 5.692344780669966e-05, |
| "loss": 0.0451, |
| "step": 240 |
| }, |
| { |
| "epoch": 1.2819148936170213, |
| "grad_norm": 0.00853883204760107, |
| "learning_rate": 5.6698543369880474e-05, |
| "loss": 0.0502, |
| "step": 241 |
| }, |
| { |
| "epoch": 1.2872340425531914, |
| "grad_norm": 0.009057904567549594, |
| "learning_rate": 5.64729977810249e-05, |
| "loss": 0.0445, |
| "step": 242 |
| }, |
| { |
| "epoch": 1.2925531914893618, |
| "grad_norm": 0.007445609352699238, |
| "learning_rate": 5.6246819700110614e-05, |
| "loss": 0.0503, |
| "step": 243 |
| }, |
| { |
| "epoch": 1.297872340425532, |
| "grad_norm": 0.006586201160698914, |
| "learning_rate": 5.602001781140033e-05, |
| "loss": 0.0577, |
| "step": 244 |
| }, |
| { |
| "epoch": 1.3031914893617023, |
| "grad_norm": 0.007472644541338592, |
| "learning_rate": 5.579260082310828e-05, |
| "loss": 0.056, |
| "step": 245 |
| }, |
| { |
| "epoch": 1.3085106382978724, |
| "grad_norm": 0.007257139726080155, |
| "learning_rate": 5.556457746706583e-05, |
| "loss": 0.0468, |
| "step": 246 |
| }, |
| { |
| "epoch": 1.3138297872340425, |
| "grad_norm": 0.009156575447991255, |
| "learning_rate": 5.5335956498386285e-05, |
| "loss": 0.0476, |
| "step": 247 |
| }, |
| { |
| "epoch": 1.3191489361702127, |
| "grad_norm": 0.007327482792302203, |
| "learning_rate": 5.5106746695128706e-05, |
| "loss": 0.0507, |
| "step": 248 |
| }, |
| { |
| "epoch": 1.324468085106383, |
| "grad_norm": 0.007707712383809691, |
| "learning_rate": 5.4876956857960814e-05, |
| "loss": 0.0491, |
| "step": 249 |
| }, |
| { |
| "epoch": 1.3297872340425532, |
| "grad_norm": 0.0076050764592637725, |
| "learning_rate": 5.464659580982117e-05, |
| "loss": 0.0534, |
| "step": 250 |
| }, |
| { |
| "epoch": 1.3351063829787235, |
| "grad_norm": 0.009228254195647411, |
| "learning_rate": 5.441567239558037e-05, |
| "loss": 0.0497, |
| "step": 251 |
| }, |
| { |
| "epoch": 1.3404255319148937, |
| "grad_norm": 0.008317525932420388, |
| "learning_rate": 5.4184195481701425e-05, |
| "loss": 0.0506, |
| "step": 252 |
| }, |
| { |
| "epoch": 1.3457446808510638, |
| "grad_norm": 0.007269726675371962, |
| "learning_rate": 5.3952173955899376e-05, |
| "loss": 0.0507, |
| "step": 253 |
| }, |
| { |
| "epoch": 1.351063829787234, |
| "grad_norm": 0.007655521519862631, |
| "learning_rate": 5.3719616726799965e-05, |
| "loss": 0.0555, |
| "step": 254 |
| }, |
| { |
| "epoch": 1.3563829787234043, |
| "grad_norm": 0.00696845445343109, |
| "learning_rate": 5.3486532723597666e-05, |
| "loss": 0.0469, |
| "step": 255 |
| }, |
| { |
| "epoch": 1.3617021276595744, |
| "grad_norm": 0.007516971162040113, |
| "learning_rate": 5.32529308957128e-05, |
| "loss": 0.0495, |
| "step": 256 |
| }, |
| { |
| "epoch": 1.3670212765957448, |
| "grad_norm": 0.007463622192694667, |
| "learning_rate": 5.301882021244792e-05, |
| "loss": 0.0456, |
| "step": 257 |
| }, |
| { |
| "epoch": 1.372340425531915, |
| "grad_norm": 0.007664409733036738, |
| "learning_rate": 5.278420966264342e-05, |
| "loss": 0.0579, |
| "step": 258 |
| }, |
| { |
| "epoch": 1.377659574468085, |
| "grad_norm": 0.00783016815834082, |
| "learning_rate": 5.254910825433244e-05, |
| "loss": 0.0373, |
| "step": 259 |
| }, |
| { |
| "epoch": 1.3829787234042552, |
| "grad_norm": 0.007172814232848292, |
| "learning_rate": 5.231352501439493e-05, |
| "loss": 0.0516, |
| "step": 260 |
| }, |
| { |
| "epoch": 1.3882978723404256, |
| "grad_norm": 0.007834863284179953, |
| "learning_rate": 5.2077468988211123e-05, |
| "loss": 0.0467, |
| "step": 261 |
| }, |
| { |
| "epoch": 1.3936170212765957, |
| "grad_norm": 0.008569241469350284, |
| "learning_rate": 5.1840949239314205e-05, |
| "loss": 0.0558, |
| "step": 262 |
| }, |
| { |
| "epoch": 1.398936170212766, |
| "grad_norm": 0.008096639339208428, |
| "learning_rate": 5.1603974849042324e-05, |
| "loss": 0.0582, |
| "step": 263 |
| }, |
| { |
| "epoch": 1.4042553191489362, |
| "grad_norm": 0.008124743275729969, |
| "learning_rate": 5.136655491618985e-05, |
| "loss": 0.0577, |
| "step": 264 |
| }, |
| { |
| "epoch": 1.4095744680851063, |
| "grad_norm": 0.007290690635111368, |
| "learning_rate": 5.112869855665811e-05, |
| "loss": 0.056, |
| "step": 265 |
| }, |
| { |
| "epoch": 1.4148936170212765, |
| "grad_norm": 0.00856218575627906, |
| "learning_rate": 5.0890414903105324e-05, |
| "loss": 0.0517, |
| "step": 266 |
| }, |
| { |
| "epoch": 1.4202127659574468, |
| "grad_norm": 0.008206205489349762, |
| "learning_rate": 5.065171310459594e-05, |
| "loss": 0.0463, |
| "step": 267 |
| }, |
| { |
| "epoch": 1.425531914893617, |
| "grad_norm": 0.008238964930652048, |
| "learning_rate": 5.0412602326249395e-05, |
| "loss": 0.0489, |
| "step": 268 |
| }, |
| { |
| "epoch": 1.4308510638297873, |
| "grad_norm": 0.0075185226531628005, |
| "learning_rate": 5.017309174888818e-05, |
| "loss": 0.0463, |
| "step": 269 |
| }, |
| { |
| "epoch": 1.4361702127659575, |
| "grad_norm": 0.008282159907300468, |
| "learning_rate": 4.993319056868533e-05, |
| "loss": 0.051, |
| "step": 270 |
| }, |
| { |
| "epoch": 1.4414893617021276, |
| "grad_norm": 0.008012100107028653, |
| "learning_rate": 4.969290799681135e-05, |
| "loss": 0.0472, |
| "step": 271 |
| }, |
| { |
| "epoch": 1.4468085106382977, |
| "grad_norm": 0.00855605360209428, |
| "learning_rate": 4.9452253259080543e-05, |
| "loss": 0.0432, |
| "step": 272 |
| }, |
| { |
| "epoch": 1.452127659574468, |
| "grad_norm": 0.00792828627678935, |
| "learning_rate": 4.92112355955968e-05, |
| "loss": 0.0468, |
| "step": 273 |
| }, |
| { |
| "epoch": 1.4574468085106382, |
| "grad_norm": 0.008551324952644062, |
| "learning_rate": 4.896986426039874e-05, |
| "loss": 0.0507, |
| "step": 274 |
| }, |
| { |
| "epoch": 1.4627659574468086, |
| "grad_norm": 0.007022798486635891, |
| "learning_rate": 4.87281485211045e-05, |
| "loss": 0.0516, |
| "step": 275 |
| }, |
| { |
| "epoch": 1.4680851063829787, |
| "grad_norm": 0.0072631916477464125, |
| "learning_rate": 4.848609765855583e-05, |
| "loss": 0.0491, |
| "step": 276 |
| }, |
| { |
| "epoch": 1.4734042553191489, |
| "grad_norm": 0.00762645022153394, |
| "learning_rate": 4.824372096646176e-05, |
| "loss": 0.056, |
| "step": 277 |
| }, |
| { |
| "epoch": 1.4787234042553192, |
| "grad_norm": 0.008364966224500458, |
| "learning_rate": 4.8001027751041784e-05, |
| "loss": 0.0493, |
| "step": 278 |
| }, |
| { |
| "epoch": 1.4840425531914894, |
| "grad_norm": 0.007660362479017892, |
| "learning_rate": 4.7758027330668516e-05, |
| "loss": 0.0518, |
| "step": 279 |
| }, |
| { |
| "epoch": 1.4893617021276595, |
| "grad_norm": 0.008741948051137169, |
| "learning_rate": 4.751472903550991e-05, |
| "loss": 0.0424, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.4946808510638299, |
| "grad_norm": 0.009195165835070018, |
| "learning_rate": 4.727114220717106e-05, |
| "loss": 0.0486, |
| "step": 281 |
| }, |
| { |
| "epoch": 1.5, |
| "grad_norm": 0.006870740251298955, |
| "learning_rate": 4.7027276198335445e-05, |
| "loss": 0.055, |
| "step": 282 |
| }, |
| { |
| "epoch": 1.5053191489361701, |
| "grad_norm": 0.008578177799684878, |
| "learning_rate": 4.67831403724059e-05, |
| "loss": 0.0468, |
| "step": 283 |
| }, |
| { |
| "epoch": 1.5106382978723403, |
| "grad_norm": 0.008031472649917649, |
| "learning_rate": 4.6538744103145054e-05, |
| "loss": 0.0523, |
| "step": 284 |
| }, |
| { |
| "epoch": 1.5159574468085106, |
| "grad_norm": 0.006855126393454216, |
| "learning_rate": 4.629409677431545e-05, |
| "loss": 0.0513, |
| "step": 285 |
| }, |
| { |
| "epoch": 1.521276595744681, |
| "grad_norm": 0.008215860815897723, |
| "learning_rate": 4.6049207779319235e-05, |
| "loss": 0.0569, |
| "step": 286 |
| }, |
| { |
| "epoch": 1.5265957446808511, |
| "grad_norm": 0.007243113754961158, |
| "learning_rate": 4.580408652083748e-05, |
| "loss": 0.0455, |
| "step": 287 |
| }, |
| { |
| "epoch": 1.5319148936170213, |
| "grad_norm": 0.008043727977950582, |
| "learning_rate": 4.555874241046921e-05, |
| "loss": 0.0494, |
| "step": 288 |
| }, |
| { |
| "epoch": 1.5372340425531914, |
| "grad_norm": 0.007988806279024173, |
| "learning_rate": 4.531318486836994e-05, |
| "loss": 0.0518, |
| "step": 289 |
| }, |
| { |
| "epoch": 1.5425531914893615, |
| "grad_norm": 0.007436159316271967, |
| "learning_rate": 4.506742332289009e-05, |
| "loss": 0.0443, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.547872340425532, |
| "grad_norm": 0.009266073593499354, |
| "learning_rate": 4.4821467210212924e-05, |
| "loss": 0.0463, |
| "step": 291 |
| }, |
| { |
| "epoch": 1.5531914893617023, |
| "grad_norm": 0.008189804714642554, |
| "learning_rate": 4.4575325973992244e-05, |
| "loss": 0.0488, |
| "step": 292 |
| }, |
| { |
| "epoch": 1.5585106382978724, |
| "grad_norm": 0.007119226833116723, |
| "learning_rate": 4.432900906498977e-05, |
| "loss": 0.0478, |
| "step": 293 |
| }, |
| { |
| "epoch": 1.5638297872340425, |
| "grad_norm": 0.007821289435175521, |
| "learning_rate": 4.408252594071234e-05, |
| "loss": 0.0535, |
| "step": 294 |
| }, |
| { |
| "epoch": 1.5691489361702127, |
| "grad_norm": 0.007390607277386733, |
| "learning_rate": 4.3835886065048715e-05, |
| "loss": 0.0474, |
| "step": 295 |
| }, |
| { |
| "epoch": 1.574468085106383, |
| "grad_norm": 0.006837441801938445, |
| "learning_rate": 4.3589098907906244e-05, |
| "loss": 0.0483, |
| "step": 296 |
| }, |
| { |
| "epoch": 1.5797872340425532, |
| "grad_norm": 0.007308887707271758, |
| "learning_rate": 4.334217394484726e-05, |
| "loss": 0.0447, |
| "step": 297 |
| }, |
| { |
| "epoch": 1.5851063829787235, |
| "grad_norm": 0.007295937581287956, |
| "learning_rate": 4.309512065672522e-05, |
| "loss": 0.0495, |
| "step": 298 |
| }, |
| { |
| "epoch": 1.5904255319148937, |
| "grad_norm": 0.00846551876084445, |
| "learning_rate": 4.2847948529320726e-05, |
| "loss": 0.0612, |
| "step": 299 |
| }, |
| { |
| "epoch": 1.5957446808510638, |
| "grad_norm": 0.007401897536022008, |
| "learning_rate": 4.260066705297732e-05, |
| "loss": 0.0526, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.601063829787234, |
| "grad_norm": 0.00798220663027418, |
| "learning_rate": 4.235328572223701e-05, |
| "loss": 0.0534, |
| "step": 301 |
| }, |
| { |
| "epoch": 1.6063829787234043, |
| "grad_norm": 0.007259380056195186, |
| "learning_rate": 4.2105814035475845e-05, |
| "loss": 0.0538, |
| "step": 302 |
| }, |
| { |
| "epoch": 1.6117021276595744, |
| "grad_norm": 0.007349829765282824, |
| "learning_rate": 4.1858261494539135e-05, |
| "loss": 0.0425, |
| "step": 303 |
| }, |
| { |
| "epoch": 1.6170212765957448, |
| "grad_norm": 0.008042118588200196, |
| "learning_rate": 4.1610637604376614e-05, |
| "loss": 0.0466, |
| "step": 304 |
| }, |
| { |
| "epoch": 1.622340425531915, |
| "grad_norm": 0.007362785461224037, |
| "learning_rate": 4.136295187267756e-05, |
| "loss": 0.0544, |
| "step": 305 |
| }, |
| { |
| "epoch": 1.627659574468085, |
| "grad_norm": 0.00700720200664349, |
| "learning_rate": 4.1115213809505665e-05, |
| "loss": 0.0444, |
| "step": 306 |
| }, |
| { |
| "epoch": 1.6329787234042552, |
| "grad_norm": 0.007022803980757569, |
| "learning_rate": 4.0867432926933953e-05, |
| "loss": 0.0558, |
| "step": 307 |
| }, |
| { |
| "epoch": 1.6382978723404256, |
| "grad_norm": 0.012575450182603807, |
| "learning_rate": 4.061961873867951e-05, |
| "loss": 0.0468, |
| "step": 308 |
| }, |
| { |
| "epoch": 1.6436170212765957, |
| "grad_norm": 0.007191254294197074, |
| "learning_rate": 4.037178075973823e-05, |
| "loss": 0.0469, |
| "step": 309 |
| }, |
| { |
| "epoch": 1.648936170212766, |
| "grad_norm": 0.007534556742102712, |
| "learning_rate": 4.012392850601944e-05, |
| "loss": 0.0497, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.6542553191489362, |
| "grad_norm": 0.006969818752127919, |
| "learning_rate": 3.987607149398057e-05, |
| "loss": 0.0596, |
| "step": 311 |
| }, |
| { |
| "epoch": 1.6595744680851063, |
| "grad_norm": 0.007097540428956701, |
| "learning_rate": 3.962821924026178e-05, |
| "loss": 0.0465, |
| "step": 312 |
| }, |
| { |
| "epoch": 1.6648936170212765, |
| "grad_norm": 0.008519404376125157, |
| "learning_rate": 3.9380381261320494e-05, |
| "loss": 0.0475, |
| "step": 313 |
| }, |
| { |
| "epoch": 1.6702127659574468, |
| "grad_norm": 0.008674073129702996, |
| "learning_rate": 3.9132567073066046e-05, |
| "loss": 0.0551, |
| "step": 314 |
| }, |
| { |
| "epoch": 1.675531914893617, |
| "grad_norm": 0.007695187369607825, |
| "learning_rate": 3.888478619049434e-05, |
| "loss": 0.0414, |
| "step": 315 |
| }, |
| { |
| "epoch": 1.6808510638297873, |
| "grad_norm": 0.007509965084779445, |
| "learning_rate": 3.863704812732245e-05, |
| "loss": 0.0484, |
| "step": 316 |
| }, |
| { |
| "epoch": 1.6861702127659575, |
| "grad_norm": 0.007123230923554642, |
| "learning_rate": 3.8389362395623406e-05, |
| "loss": 0.0501, |
| "step": 317 |
| }, |
| { |
| "epoch": 1.6914893617021276, |
| "grad_norm": 0.010084369976221577, |
| "learning_rate": 3.8141738505460886e-05, |
| "loss": 0.0536, |
| "step": 318 |
| }, |
| { |
| "epoch": 1.6968085106382977, |
| "grad_norm": 0.009145261110231229, |
| "learning_rate": 3.789418596452417e-05, |
| "loss": 0.0533, |
| "step": 319 |
| }, |
| { |
| "epoch": 1.702127659574468, |
| "grad_norm": 0.007196833882217448, |
| "learning_rate": 3.7646714277763006e-05, |
| "loss": 0.0477, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.7074468085106385, |
| "grad_norm": 0.009410482252106153, |
| "learning_rate": 3.73993329470227e-05, |
| "loss": 0.0527, |
| "step": 321 |
| }, |
| { |
| "epoch": 1.7127659574468086, |
| "grad_norm": 0.00717174797623846, |
| "learning_rate": 3.715205147067928e-05, |
| "loss": 0.0502, |
| "step": 322 |
| }, |
| { |
| "epoch": 1.7180851063829787, |
| "grad_norm": 0.008029838692958749, |
| "learning_rate": 3.6904879343274795e-05, |
| "loss": 0.0516, |
| "step": 323 |
| }, |
| { |
| "epoch": 1.7234042553191489, |
| "grad_norm": 0.0071183732774290085, |
| "learning_rate": 3.6657826055152746e-05, |
| "loss": 0.0462, |
| "step": 324 |
| }, |
| { |
| "epoch": 1.728723404255319, |
| "grad_norm": 0.00829856769830592, |
| "learning_rate": 3.641090109209376e-05, |
| "loss": 0.0475, |
| "step": 325 |
| }, |
| { |
| "epoch": 1.7340425531914894, |
| "grad_norm": 0.007343993690542114, |
| "learning_rate": 3.61641139349513e-05, |
| "loss": 0.0499, |
| "step": 326 |
| }, |
| { |
| "epoch": 1.7393617021276597, |
| "grad_norm": 0.008294024215736651, |
| "learning_rate": 3.591747405928767e-05, |
| "loss": 0.0458, |
| "step": 327 |
| }, |
| { |
| "epoch": 1.7446808510638299, |
| "grad_norm": 0.007763046969499534, |
| "learning_rate": 3.567099093501026e-05, |
| "loss": 0.0467, |
| "step": 328 |
| }, |
| { |
| "epoch": 1.75, |
| "grad_norm": 0.007259069002723228, |
| "learning_rate": 3.5424674026007776e-05, |
| "loss": 0.0445, |
| "step": 329 |
| }, |
| { |
| "epoch": 1.7553191489361701, |
| "grad_norm": 0.007555182759201945, |
| "learning_rate": 3.517853278978708e-05, |
| "loss": 0.0474, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.7606382978723403, |
| "grad_norm": 0.012179287759593998, |
| "learning_rate": 3.4932576677109924e-05, |
| "loss": 0.0491, |
| "step": 331 |
| }, |
| { |
| "epoch": 1.7659574468085106, |
| "grad_norm": 0.007444042633552802, |
| "learning_rate": 3.4686815131630074e-05, |
| "loss": 0.0468, |
| "step": 332 |
| }, |
| { |
| "epoch": 1.771276595744681, |
| "grad_norm": 0.007464691217925324, |
| "learning_rate": 3.4441257589530805e-05, |
| "loss": 0.0508, |
| "step": 333 |
| }, |
| { |
| "epoch": 1.7765957446808511, |
| "grad_norm": 0.007977839672549674, |
| "learning_rate": 3.4195913479162525e-05, |
| "loss": 0.0453, |
| "step": 334 |
| }, |
| { |
| "epoch": 1.7819148936170213, |
| "grad_norm": 0.006747884469881696, |
| "learning_rate": 3.395079222068077e-05, |
| "loss": 0.0422, |
| "step": 335 |
| }, |
| { |
| "epoch": 1.7872340425531914, |
| "grad_norm": 0.007034889146744127, |
| "learning_rate": 3.3705903225684556e-05, |
| "loss": 0.0487, |
| "step": 336 |
| }, |
| { |
| "epoch": 1.7925531914893615, |
| "grad_norm": 0.00672190286272762, |
| "learning_rate": 3.346125589685495e-05, |
| "loss": 0.0535, |
| "step": 337 |
| }, |
| { |
| "epoch": 1.797872340425532, |
| "grad_norm": 0.006930155344559242, |
| "learning_rate": 3.3216859627594105e-05, |
| "loss": 0.0481, |
| "step": 338 |
| }, |
| { |
| "epoch": 1.8031914893617023, |
| "grad_norm": 0.007067552128162008, |
| "learning_rate": 3.297272380166457e-05, |
| "loss": 0.0433, |
| "step": 339 |
| }, |
| { |
| "epoch": 1.8085106382978724, |
| "grad_norm": 0.007696980178429048, |
| "learning_rate": 3.272885779282895e-05, |
| "loss": 0.0502, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.8138297872340425, |
| "grad_norm": 0.007574319564968359, |
| "learning_rate": 3.24852709644901e-05, |
| "loss": 0.0472, |
| "step": 341 |
| }, |
| { |
| "epoch": 1.8191489361702127, |
| "grad_norm": 0.006921926170628345, |
| "learning_rate": 3.22419726693315e-05, |
| "loss": 0.0512, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.824468085106383, |
| "grad_norm": 0.0075017973398598635, |
| "learning_rate": 3.199897224895823e-05, |
| "loss": 0.0481, |
| "step": 343 |
| }, |
| { |
| "epoch": 1.8297872340425532, |
| "grad_norm": 0.006909825712198796, |
| "learning_rate": 3.175627903353825e-05, |
| "loss": 0.0526, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.8351063829787235, |
| "grad_norm": 0.007775933195755259, |
| "learning_rate": 3.151390234144418e-05, |
| "loss": 0.058, |
| "step": 345 |
| }, |
| { |
| "epoch": 1.8404255319148937, |
| "grad_norm": 0.007735187419835471, |
| "learning_rate": 3.127185147889551e-05, |
| "loss": 0.0524, |
| "step": 346 |
| }, |
| { |
| "epoch": 1.8457446808510638, |
| "grad_norm": 0.0070363906680244205, |
| "learning_rate": 3.103013573960127e-05, |
| "loss": 0.0465, |
| "step": 347 |
| }, |
| { |
| "epoch": 1.851063829787234, |
| "grad_norm": 0.006942069954003131, |
| "learning_rate": 3.078876440440321e-05, |
| "loss": 0.0509, |
| "step": 348 |
| }, |
| { |
| "epoch": 1.8563829787234043, |
| "grad_norm": 0.008011624404221132, |
| "learning_rate": 3.0547746740919456e-05, |
| "loss": 0.0519, |
| "step": 349 |
| }, |
| { |
| "epoch": 1.8617021276595744, |
| "grad_norm": 0.0067273591805839745, |
| "learning_rate": 3.0307092003188664e-05, |
| "loss": 0.043, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.8670212765957448, |
| "grad_norm": 0.007172208235260311, |
| "learning_rate": 3.006680943131469e-05, |
| "loss": 0.0449, |
| "step": 351 |
| }, |
| { |
| "epoch": 1.872340425531915, |
| "grad_norm": 0.008024510395719497, |
| "learning_rate": 2.982690825111183e-05, |
| "loss": 0.0532, |
| "step": 352 |
| }, |
| { |
| "epoch": 1.877659574468085, |
| "grad_norm": 0.007389548490341114, |
| "learning_rate": 2.9587397673750612e-05, |
| "loss": 0.0467, |
| "step": 353 |
| }, |
| { |
| "epoch": 1.8829787234042552, |
| "grad_norm": 0.007275097089013663, |
| "learning_rate": 2.934828689540407e-05, |
| "loss": 0.0502, |
| "step": 354 |
| }, |
| { |
| "epoch": 1.8882978723404256, |
| "grad_norm": 0.007710632175226575, |
| "learning_rate": 2.9109585096894686e-05, |
| "loss": 0.051, |
| "step": 355 |
| }, |
| { |
| "epoch": 1.8936170212765957, |
| "grad_norm": 0.007252806596524295, |
| "learning_rate": 2.88713014433419e-05, |
| "loss": 0.0556, |
| "step": 356 |
| }, |
| { |
| "epoch": 1.898936170212766, |
| "grad_norm": 0.007428786424139136, |
| "learning_rate": 2.8633445083810162e-05, |
| "loss": 0.0456, |
| "step": 357 |
| }, |
| { |
| "epoch": 1.9042553191489362, |
| "grad_norm": 0.006836357909143644, |
| "learning_rate": 2.8396025150957682e-05, |
| "loss": 0.0431, |
| "step": 358 |
| }, |
| { |
| "epoch": 1.9095744680851063, |
| "grad_norm": 0.0071721300641603035, |
| "learning_rate": 2.815905076068579e-05, |
| "loss": 0.0395, |
| "step": 359 |
| }, |
| { |
| "epoch": 1.9148936170212765, |
| "grad_norm": 0.006928447603829107, |
| "learning_rate": 2.7922531011788883e-05, |
| "loss": 0.0487, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.9202127659574468, |
| "grad_norm": 0.008523718885092514, |
| "learning_rate": 2.7686474985605093e-05, |
| "loss": 0.051, |
| "step": 361 |
| }, |
| { |
| "epoch": 1.925531914893617, |
| "grad_norm": 0.00891085144760899, |
| "learning_rate": 2.7450891745667587e-05, |
| "loss": 0.045, |
| "step": 362 |
| }, |
| { |
| "epoch": 1.9308510638297873, |
| "grad_norm": 0.008389640488944299, |
| "learning_rate": 2.721579033735659e-05, |
| "loss": 0.0571, |
| "step": 363 |
| }, |
| { |
| "epoch": 1.9361702127659575, |
| "grad_norm": 0.008207695773768032, |
| "learning_rate": 2.6981179787552096e-05, |
| "loss": 0.0484, |
| "step": 364 |
| }, |
| { |
| "epoch": 1.9414893617021276, |
| "grad_norm": 0.007079156557469271, |
| "learning_rate": 2.6747069104287217e-05, |
| "loss": 0.0452, |
| "step": 365 |
| }, |
| { |
| "epoch": 1.9468085106382977, |
| "grad_norm": 0.007715298382046255, |
| "learning_rate": 2.6513467276402344e-05, |
| "loss": 0.0466, |
| "step": 366 |
| }, |
| { |
| "epoch": 1.952127659574468, |
| "grad_norm": 0.008121735936676599, |
| "learning_rate": 2.628038327320005e-05, |
| "loss": 0.0451, |
| "step": 367 |
| }, |
| { |
| "epoch": 1.9574468085106385, |
| "grad_norm": 0.007148539443633158, |
| "learning_rate": 2.6047826044100634e-05, |
| "loss": 0.0443, |
| "step": 368 |
| }, |
| { |
| "epoch": 1.9627659574468086, |
| "grad_norm": 0.007518474975656456, |
| "learning_rate": 2.5815804518298575e-05, |
| "loss": 0.0462, |
| "step": 369 |
| }, |
| { |
| "epoch": 1.9680851063829787, |
| "grad_norm": 0.007694403765899983, |
| "learning_rate": 2.5584327604419637e-05, |
| "loss": 0.0433, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.9734042553191489, |
| "grad_norm": 0.007570891321851904, |
| "learning_rate": 2.535340419017883e-05, |
| "loss": 0.0479, |
| "step": 371 |
| }, |
| { |
| "epoch": 1.978723404255319, |
| "grad_norm": 0.00802025866586826, |
| "learning_rate": 2.5123043142039206e-05, |
| "loss": 0.0498, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.9840425531914894, |
| "grad_norm": 0.0067897658824606745, |
| "learning_rate": 2.489325330487131e-05, |
| "loss": 0.0471, |
| "step": 373 |
| }, |
| { |
| "epoch": 1.9893617021276597, |
| "grad_norm": 0.007205355447198536, |
| "learning_rate": 2.466404350161372e-05, |
| "loss": 0.0543, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.9946808510638299, |
| "grad_norm": 0.00815752426628928, |
| "learning_rate": 2.4435422532934184e-05, |
| "loss": 0.0515, |
| "step": 375 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.006969869441843902, |
| "learning_rate": 2.420739917689173e-05, |
| "loss": 0.0387, |
| "step": 376 |
| }, |
| { |
| "epoch": 2.00531914893617, |
| "grad_norm": 0.007641821101807204, |
| "learning_rate": 2.3979982188599677e-05, |
| "loss": 0.043, |
| "step": 377 |
| }, |
| { |
| "epoch": 2.0106382978723403, |
| "grad_norm": 0.0069585505525113145, |
| "learning_rate": 2.37531802998894e-05, |
| "loss": 0.0468, |
| "step": 378 |
| }, |
| { |
| "epoch": 2.015957446808511, |
| "grad_norm": 0.0075195061411692556, |
| "learning_rate": 2.3527002218975116e-05, |
| "loss": 0.0455, |
| "step": 379 |
| }, |
| { |
| "epoch": 2.021276595744681, |
| "grad_norm": 0.00813720400709436, |
| "learning_rate": 2.3301456630119522e-05, |
| "loss": 0.0422, |
| "step": 380 |
| }, |
| { |
| "epoch": 2.026595744680851, |
| "grad_norm": 0.007634520737450374, |
| "learning_rate": 2.307655219330035e-05, |
| "loss": 0.0463, |
| "step": 381 |
| }, |
| { |
| "epoch": 2.0319148936170213, |
| "grad_norm": 0.008195984663012319, |
| "learning_rate": 2.285229754387783e-05, |
| "loss": 0.0486, |
| "step": 382 |
| }, |
| { |
| "epoch": 2.0372340425531914, |
| "grad_norm": 0.00649339655363922, |
| "learning_rate": 2.2628701292263208e-05, |
| "loss": 0.0489, |
| "step": 383 |
| }, |
| { |
| "epoch": 2.0425531914893615, |
| "grad_norm": 0.007267282560587648, |
| "learning_rate": 2.2405772023588046e-05, |
| "loss": 0.0376, |
| "step": 384 |
| }, |
| { |
| "epoch": 2.047872340425532, |
| "grad_norm": 0.007429823140616243, |
| "learning_rate": 2.2183518297374644e-05, |
| "loss": 0.0453, |
| "step": 385 |
| }, |
| { |
| "epoch": 2.0531914893617023, |
| "grad_norm": 0.007458587644752268, |
| "learning_rate": 2.1961948647207418e-05, |
| "loss": 0.046, |
| "step": 386 |
| }, |
| { |
| "epoch": 2.0585106382978724, |
| "grad_norm": 0.00714444202545274, |
| "learning_rate": 2.1741071580405212e-05, |
| "loss": 0.0493, |
| "step": 387 |
| }, |
| { |
| "epoch": 2.0638297872340425, |
| "grad_norm": 0.007610912778577067, |
| "learning_rate": 2.1520895577694602e-05, |
| "loss": 0.0454, |
| "step": 388 |
| }, |
| { |
| "epoch": 2.0691489361702127, |
| "grad_norm": 0.007106107083579986, |
| "learning_rate": 2.1301429092884395e-05, |
| "loss": 0.0526, |
| "step": 389 |
| }, |
| { |
| "epoch": 2.074468085106383, |
| "grad_norm": 0.00742256244822804, |
| "learning_rate": 2.1082680552540936e-05, |
| "loss": 0.0374, |
| "step": 390 |
| }, |
| { |
| "epoch": 2.0797872340425534, |
| "grad_norm": 0.007135154313474544, |
| "learning_rate": 2.0864658355664593e-05, |
| "loss": 0.0496, |
| "step": 391 |
| }, |
| { |
| "epoch": 2.0851063829787235, |
| "grad_norm": 0.007520715390985864, |
| "learning_rate": 2.06473708733673e-05, |
| "loss": 0.0451, |
| "step": 392 |
| }, |
| { |
| "epoch": 2.0904255319148937, |
| "grad_norm": 0.007988806137504299, |
| "learning_rate": 2.0430826448551104e-05, |
| "loss": 0.0475, |
| "step": 393 |
| }, |
| { |
| "epoch": 2.095744680851064, |
| "grad_norm": 0.008077394194271862, |
| "learning_rate": 2.0215033395587892e-05, |
| "loss": 0.0485, |
| "step": 394 |
| }, |
| { |
| "epoch": 2.101063829787234, |
| "grad_norm": 0.0072320841603180554, |
| "learning_rate": 2.0000000000000012e-05, |
| "loss": 0.0454, |
| "step": 395 |
| }, |
| { |
| "epoch": 2.106382978723404, |
| "grad_norm": 0.0080966147887127, |
| "learning_rate": 1.978573451814234e-05, |
| "loss": 0.0408, |
| "step": 396 |
| }, |
| { |
| "epoch": 2.1117021276595747, |
| "grad_norm": 0.0076982164769785425, |
| "learning_rate": 1.9572245176885167e-05, |
| "loss": 0.0527, |
| "step": 397 |
| }, |
| { |
| "epoch": 2.117021276595745, |
| "grad_norm": 0.00729887150694859, |
| "learning_rate": 1.935954017329828e-05, |
| "loss": 0.0464, |
| "step": 398 |
| }, |
| { |
| "epoch": 2.122340425531915, |
| "grad_norm": 0.0070619503426544855, |
| "learning_rate": 1.9147627674336333e-05, |
| "loss": 0.0498, |
| "step": 399 |
| }, |
| { |
| "epoch": 2.127659574468085, |
| "grad_norm": 0.007420155412071844, |
| "learning_rate": 1.8936515816525218e-05, |
| "loss": 0.0487, |
| "step": 400 |
| }, |
| { |
| "epoch": 2.132978723404255, |
| "grad_norm": 0.007820534557012846, |
| "learning_rate": 1.872621270564962e-05, |
| "loss": 0.0477, |
| "step": 401 |
| }, |
| { |
| "epoch": 2.1382978723404253, |
| "grad_norm": 0.007218411792762793, |
| "learning_rate": 1.8516726416441857e-05, |
| "loss": 0.052, |
| "step": 402 |
| }, |
| { |
| "epoch": 2.143617021276596, |
| "grad_norm": 0.007296897671548496, |
| "learning_rate": 1.830806499227185e-05, |
| "loss": 0.0488, |
| "step": 403 |
| }, |
| { |
| "epoch": 2.148936170212766, |
| "grad_norm": 0.007922574415107773, |
| "learning_rate": 1.8100236444838192e-05, |
| "loss": 0.0441, |
| "step": 404 |
| }, |
| { |
| "epoch": 2.154255319148936, |
| "grad_norm": 0.007045423651472144, |
| "learning_rate": 1.7893248753860666e-05, |
| "loss": 0.042, |
| "step": 405 |
| }, |
| { |
| "epoch": 2.1595744680851063, |
| "grad_norm": 0.00800781372853001, |
| "learning_rate": 1.7687109866773738e-05, |
| "loss": 0.0451, |
| "step": 406 |
| }, |
| { |
| "epoch": 2.1648936170212765, |
| "grad_norm": 0.0068107841608532685, |
| "learning_rate": 1.7481827698421525e-05, |
| "loss": 0.0496, |
| "step": 407 |
| }, |
| { |
| "epoch": 2.1702127659574466, |
| "grad_norm": 0.007008890657305554, |
| "learning_rate": 1.7277410130753775e-05, |
| "loss": 0.0475, |
| "step": 408 |
| }, |
| { |
| "epoch": 2.175531914893617, |
| "grad_norm": 0.007923877620411671, |
| "learning_rate": 1.7073865012523355e-05, |
| "loss": 0.0394, |
| "step": 409 |
| }, |
| { |
| "epoch": 2.1808510638297873, |
| "grad_norm": 0.007772358651908587, |
| "learning_rate": 1.6871200158984823e-05, |
| "loss": 0.0522, |
| "step": 410 |
| }, |
| { |
| "epoch": 2.1861702127659575, |
| "grad_norm": 0.00742861948687516, |
| "learning_rate": 1.666942335159434e-05, |
| "loss": 0.0471, |
| "step": 411 |
| }, |
| { |
| "epoch": 2.1914893617021276, |
| "grad_norm": 0.007155063584429264, |
| "learning_rate": 1.6468542337710957e-05, |
| "loss": 0.0509, |
| "step": 412 |
| }, |
| { |
| "epoch": 2.1968085106382977, |
| "grad_norm": 0.007321991214983998, |
| "learning_rate": 1.6268564830299127e-05, |
| "loss": 0.0507, |
| "step": 413 |
| }, |
| { |
| "epoch": 2.202127659574468, |
| "grad_norm": 0.0072748688346314745, |
| "learning_rate": 1.60694985076325e-05, |
| "loss": 0.0538, |
| "step": 414 |
| }, |
| { |
| "epoch": 2.2074468085106385, |
| "grad_norm": 0.008656752935932507, |
| "learning_rate": 1.587135101299922e-05, |
| "loss": 0.0507, |
| "step": 415 |
| }, |
| { |
| "epoch": 2.2127659574468086, |
| "grad_norm": 0.00924542200939376, |
| "learning_rate": 1.5674129954408375e-05, |
| "loss": 0.0464, |
| "step": 416 |
| }, |
| { |
| "epoch": 2.2180851063829787, |
| "grad_norm": 0.007745356691939634, |
| "learning_rate": 1.5477842904297898e-05, |
| "loss": 0.0633, |
| "step": 417 |
| }, |
| { |
| "epoch": 2.223404255319149, |
| "grad_norm": 0.007860243946587438, |
| "learning_rate": 1.5282497399243816e-05, |
| "loss": 0.0401, |
| "step": 418 |
| }, |
| { |
| "epoch": 2.228723404255319, |
| "grad_norm": 0.007138094306896187, |
| "learning_rate": 1.5088100939670912e-05, |
| "loss": 0.0377, |
| "step": 419 |
| }, |
| { |
| "epoch": 2.2340425531914896, |
| "grad_norm": 0.007449639511012579, |
| "learning_rate": 1.4894660989564727e-05, |
| "loss": 0.0454, |
| "step": 420 |
| }, |
| { |
| "epoch": 2.2393617021276597, |
| "grad_norm": 0.00863829635053251, |
| "learning_rate": 1.4702184976184915e-05, |
| "loss": 0.0408, |
| "step": 421 |
| }, |
| { |
| "epoch": 2.24468085106383, |
| "grad_norm": 0.008050264193689701, |
| "learning_rate": 1.4510680289780154e-05, |
| "loss": 0.0433, |
| "step": 422 |
| }, |
| { |
| "epoch": 2.25, |
| "grad_norm": 0.008330281272365877, |
| "learning_rate": 1.4320154283304372e-05, |
| "loss": 0.0404, |
| "step": 423 |
| }, |
| { |
| "epoch": 2.25531914893617, |
| "grad_norm": 0.007946622399455964, |
| "learning_rate": 1.4130614272134349e-05, |
| "loss": 0.048, |
| "step": 424 |
| }, |
| { |
| "epoch": 2.2606382978723403, |
| "grad_norm": 0.007123582123563734, |
| "learning_rate": 1.3942067533788955e-05, |
| "loss": 0.0478, |
| "step": 425 |
| }, |
| { |
| "epoch": 2.2659574468085104, |
| "grad_norm": 0.0074418286987616164, |
| "learning_rate": 1.3754521307649657e-05, |
| "loss": 0.0543, |
| "step": 426 |
| }, |
| { |
| "epoch": 2.271276595744681, |
| "grad_norm": 0.007960751003786235, |
| "learning_rate": 1.3567982794682552e-05, |
| "loss": 0.0427, |
| "step": 427 |
| }, |
| { |
| "epoch": 2.276595744680851, |
| "grad_norm": 0.007567966749385251, |
| "learning_rate": 1.3382459157161888e-05, |
| "loss": 0.039, |
| "step": 428 |
| }, |
| { |
| "epoch": 2.2819148936170213, |
| "grad_norm": 0.007716592908574747, |
| "learning_rate": 1.3197957518395108e-05, |
| "loss": 0.0491, |
| "step": 429 |
| }, |
| { |
| "epoch": 2.2872340425531914, |
| "grad_norm": 0.0082928249358766, |
| "learning_rate": 1.3014484962449303e-05, |
| "loss": 0.0405, |
| "step": 430 |
| }, |
| { |
| "epoch": 2.2925531914893615, |
| "grad_norm": 0.007784102466322947, |
| "learning_rate": 1.2832048533879196e-05, |
| "loss": 0.0405, |
| "step": 431 |
| }, |
| { |
| "epoch": 2.297872340425532, |
| "grad_norm": 0.007009556798014148, |
| "learning_rate": 1.2650655237456713e-05, |
| "loss": 0.0493, |
| "step": 432 |
| }, |
| { |
| "epoch": 2.3031914893617023, |
| "grad_norm": 0.0077700307231731375, |
| "learning_rate": 1.247031203790201e-05, |
| "loss": 0.0443, |
| "step": 433 |
| }, |
| { |
| "epoch": 2.3085106382978724, |
| "grad_norm": 0.007614954353149061, |
| "learning_rate": 1.2291025859616026e-05, |
| "loss": 0.0501, |
| "step": 434 |
| }, |
| { |
| "epoch": 2.3138297872340425, |
| "grad_norm": 0.0077957631930139935, |
| "learning_rate": 1.2112803586414659e-05, |
| "loss": 0.0378, |
| "step": 435 |
| }, |
| { |
| "epoch": 2.3191489361702127, |
| "grad_norm": 0.007688997462919743, |
| "learning_rate": 1.1935652061264462e-05, |
| "loss": 0.0451, |
| "step": 436 |
| }, |
| { |
| "epoch": 2.324468085106383, |
| "grad_norm": 0.007537182804493993, |
| "learning_rate": 1.1759578086019828e-05, |
| "loss": 0.0473, |
| "step": 437 |
| }, |
| { |
| "epoch": 2.329787234042553, |
| "grad_norm": 0.007814525035613123, |
| "learning_rate": 1.1584588421161942e-05, |
| "loss": 0.0479, |
| "step": 438 |
| }, |
| { |
| "epoch": 2.3351063829787235, |
| "grad_norm": 0.008107300034686318, |
| "learning_rate": 1.1410689785539088e-05, |
| "loss": 0.0427, |
| "step": 439 |
| }, |
| { |
| "epoch": 2.3404255319148937, |
| "grad_norm": 0.00797543313708522, |
| "learning_rate": 1.1237888856108797e-05, |
| "loss": 0.049, |
| "step": 440 |
| }, |
| { |
| "epoch": 2.345744680851064, |
| "grad_norm": 0.007457273214884273, |
| "learning_rate": 1.1066192267681353e-05, |
| "loss": 0.0497, |
| "step": 441 |
| }, |
| { |
| "epoch": 2.351063829787234, |
| "grad_norm": 0.008730312811656617, |
| "learning_rate": 1.0895606612665156e-05, |
| "loss": 0.0466, |
| "step": 442 |
| }, |
| { |
| "epoch": 2.356382978723404, |
| "grad_norm": 0.007640959140015427, |
| "learning_rate": 1.0726138440813552e-05, |
| "loss": 0.0419, |
| "step": 443 |
| }, |
| { |
| "epoch": 2.3617021276595747, |
| "grad_norm": 0.00804034501032493, |
| "learning_rate": 1.0557794258973311e-05, |
| "loss": 0.0504, |
| "step": 444 |
| }, |
| { |
| "epoch": 2.367021276595745, |
| "grad_norm": 0.007206012074055382, |
| "learning_rate": 1.0390580530834881e-05, |
| "loss": 0.047, |
| "step": 445 |
| }, |
| { |
| "epoch": 2.372340425531915, |
| "grad_norm": 0.008224236412003825, |
| "learning_rate": 1.0224503676684162e-05, |
| "loss": 0.042, |
| "step": 446 |
| }, |
| { |
| "epoch": 2.377659574468085, |
| "grad_norm": 0.007589580554041486, |
| "learning_rate": 1.0059570073155953e-05, |
| "loss": 0.0503, |
| "step": 447 |
| }, |
| { |
| "epoch": 2.382978723404255, |
| "grad_norm": 0.00769738321425147, |
| "learning_rate": 9.895786052989207e-06, |
| "loss": 0.0508, |
| "step": 448 |
| }, |
| { |
| "epoch": 2.3882978723404253, |
| "grad_norm": 0.0076871651718464174, |
| "learning_rate": 9.733157904783841e-06, |
| "loss": 0.0502, |
| "step": 449 |
| }, |
| { |
| "epoch": 2.393617021276596, |
| "grad_norm": 0.008023067838390211, |
| "learning_rate": 9.571691872759202e-06, |
| "loss": 0.0392, |
| "step": 450 |
| }, |
| { |
| "epoch": 2.398936170212766, |
| "grad_norm": 0.0071530917733077, |
| "learning_rate": 9.41139415651446e-06, |
| "loss": 0.0456, |
| "step": 451 |
| }, |
| { |
| "epoch": 2.404255319148936, |
| "grad_norm": 0.008421758545468476, |
| "learning_rate": 9.252270910790494e-06, |
| "loss": 0.0502, |
| "step": 452 |
| }, |
| { |
| "epoch": 2.4095744680851063, |
| "grad_norm": 0.007786267383422607, |
| "learning_rate": 9.094328245233535e-06, |
| "loss": 0.0461, |
| "step": 453 |
| }, |
| { |
| "epoch": 2.4148936170212765, |
| "grad_norm": 0.007493396020047423, |
| "learning_rate": 8.937572224160665e-06, |
| "loss": 0.0449, |
| "step": 454 |
| }, |
| { |
| "epoch": 2.420212765957447, |
| "grad_norm": 0.007854079490505598, |
| "learning_rate": 8.782008866326937e-06, |
| "loss": 0.0449, |
| "step": 455 |
| }, |
| { |
| "epoch": 2.425531914893617, |
| "grad_norm": 0.007700652069375009, |
| "learning_rate": 8.627644144694272e-06, |
| "loss": 0.0448, |
| "step": 456 |
| }, |
| { |
| "epoch": 2.4308510638297873, |
| "grad_norm": 0.008265942886963144, |
| "learning_rate": 8.474483986202116e-06, |
| "loss": 0.0445, |
| "step": 457 |
| }, |
| { |
| "epoch": 2.4361702127659575, |
| "grad_norm": 0.007760458129907413, |
| "learning_rate": 8.322534271539906e-06, |
| "loss": 0.0439, |
| "step": 458 |
| }, |
| { |
| "epoch": 2.4414893617021276, |
| "grad_norm": 0.007591266958286359, |
| "learning_rate": 8.171800834921266e-06, |
| "loss": 0.0517, |
| "step": 459 |
| }, |
| { |
| "epoch": 2.4468085106382977, |
| "grad_norm": 0.00838634154686241, |
| "learning_rate": 8.022289463859963e-06, |
| "loss": 0.0447, |
| "step": 460 |
| }, |
| { |
| "epoch": 2.452127659574468, |
| "grad_norm": 0.008396404666797915, |
| "learning_rate": 7.874005898947717e-06, |
| "loss": 0.0514, |
| "step": 461 |
| }, |
| { |
| "epoch": 2.4574468085106385, |
| "grad_norm": 0.007978502130917645, |
| "learning_rate": 7.726955833633827e-06, |
| "loss": 0.0426, |
| "step": 462 |
| }, |
| { |
| "epoch": 2.4627659574468086, |
| "grad_norm": 0.007702552101854744, |
| "learning_rate": 7.5811449140064775e-06, |
| "loss": 0.0523, |
| "step": 463 |
| }, |
| { |
| "epoch": 2.4680851063829787, |
| "grad_norm": 0.007750915035280797, |
| "learning_rate": 7.436578738576039e-06, |
| "loss": 0.0441, |
| "step": 464 |
| }, |
| { |
| "epoch": 2.473404255319149, |
| "grad_norm": 0.0078023184043162575, |
| "learning_rate": 7.293262858060073e-06, |
| "loss": 0.0484, |
| "step": 465 |
| }, |
| { |
| "epoch": 2.478723404255319, |
| "grad_norm": 0.007185255606789849, |
| "learning_rate": 7.151202775170181e-06, |
| "loss": 0.0418, |
| "step": 466 |
| }, |
| { |
| "epoch": 2.4840425531914896, |
| "grad_norm": 0.007436257195950586, |
| "learning_rate": 7.0104039444007835e-06, |
| "loss": 0.0409, |
| "step": 467 |
| }, |
| { |
| "epoch": 2.4893617021276597, |
| "grad_norm": 0.007326583992310838, |
| "learning_rate": 6.870871771819656e-06, |
| "loss": 0.0459, |
| "step": 468 |
| }, |
| { |
| "epoch": 2.49468085106383, |
| "grad_norm": 0.007463106319396288, |
| "learning_rate": 6.732611614860332e-06, |
| "loss": 0.0501, |
| "step": 469 |
| }, |
| { |
| "epoch": 2.5, |
| "grad_norm": 0.007533662319905821, |
| "learning_rate": 6.5956287821164675e-06, |
| "loss": 0.0495, |
| "step": 470 |
| }, |
| { |
| "epoch": 2.50531914893617, |
| "grad_norm": 0.008161602838507689, |
| "learning_rate": 6.45992853313798e-06, |
| "loss": 0.0453, |
| "step": 471 |
| }, |
| { |
| "epoch": 2.5106382978723403, |
| "grad_norm": 0.007155861763314661, |
| "learning_rate": 6.325516078229071e-06, |
| "loss": 0.0431, |
| "step": 472 |
| }, |
| { |
| "epoch": 2.5159574468085104, |
| "grad_norm": 0.007539919209014622, |
| "learning_rate": 6.1923965782482165e-06, |
| "loss": 0.0447, |
| "step": 473 |
| }, |
| { |
| "epoch": 2.521276595744681, |
| "grad_norm": 0.00775502744864421, |
| "learning_rate": 6.060575144410013e-06, |
| "loss": 0.0457, |
| "step": 474 |
| }, |
| { |
| "epoch": 2.526595744680851, |
| "grad_norm": 0.0067872630900419366, |
| "learning_rate": 5.930056838088925e-06, |
| "loss": 0.044, |
| "step": 475 |
| }, |
| { |
| "epoch": 2.5319148936170213, |
| "grad_norm": 0.008713866806807285, |
| "learning_rate": 5.800846670624904e-06, |
| "loss": 0.0467, |
| "step": 476 |
| }, |
| { |
| "epoch": 2.5372340425531914, |
| "grad_norm": 0.008247908388252265, |
| "learning_rate": 5.672949603131042e-06, |
| "loss": 0.0409, |
| "step": 477 |
| }, |
| { |
| "epoch": 2.5425531914893615, |
| "grad_norm": 0.007989890031089488, |
| "learning_rate": 5.546370546303057e-06, |
| "loss": 0.0465, |
| "step": 478 |
| }, |
| { |
| "epoch": 2.547872340425532, |
| "grad_norm": 0.008051278773763755, |
| "learning_rate": 5.4211143602307174e-06, |
| "loss": 0.0438, |
| "step": 479 |
| }, |
| { |
| "epoch": 2.5531914893617023, |
| "grad_norm": 0.007860818231782947, |
| "learning_rate": 5.297185854211284e-06, |
| "loss": 0.0445, |
| "step": 480 |
| }, |
| { |
| "epoch": 2.5585106382978724, |
| "grad_norm": 0.008279264973211398, |
| "learning_rate": 5.174589786564834e-06, |
| "loss": 0.0405, |
| "step": 481 |
| }, |
| { |
| "epoch": 2.5638297872340425, |
| "grad_norm": 0.00787279384702668, |
| "learning_rate": 5.05333086445154e-06, |
| "loss": 0.0441, |
| "step": 482 |
| }, |
| { |
| "epoch": 2.5691489361702127, |
| "grad_norm": 0.0075983988471285385, |
| "learning_rate": 4.933413743690953e-06, |
| "loss": 0.0523, |
| "step": 483 |
| }, |
| { |
| "epoch": 2.574468085106383, |
| "grad_norm": 0.007217261825527728, |
| "learning_rate": 4.81484302858326e-06, |
| "loss": 0.0581, |
| "step": 484 |
| }, |
| { |
| "epoch": 2.579787234042553, |
| "grad_norm": 0.007847446240448823, |
| "learning_rate": 4.69762327173247e-06, |
| "loss": 0.0505, |
| "step": 485 |
| }, |
| { |
| "epoch": 2.5851063829787235, |
| "grad_norm": 0.007344178233679974, |
| "learning_rate": 4.581758973871609e-06, |
| "loss": 0.0426, |
| "step": 486 |
| }, |
| { |
| "epoch": 2.5904255319148937, |
| "grad_norm": 0.007473733417833468, |
| "learning_rate": 4.467254583689938e-06, |
| "loss": 0.048, |
| "step": 487 |
| }, |
| { |
| "epoch": 2.595744680851064, |
| "grad_norm": 0.007579010065366221, |
| "learning_rate": 4.354114497662138e-06, |
| "loss": 0.0501, |
| "step": 488 |
| }, |
| { |
| "epoch": 2.601063829787234, |
| "grad_norm": 0.007980796639723483, |
| "learning_rate": 4.242343059879468e-06, |
| "loss": 0.0385, |
| "step": 489 |
| }, |
| { |
| "epoch": 2.6063829787234045, |
| "grad_norm": 0.007944736518486387, |
| "learning_rate": 4.1319445618830254e-06, |
| "loss": 0.0444, |
| "step": 490 |
| }, |
| { |
| "epoch": 2.6117021276595747, |
| "grad_norm": 0.008152339598695301, |
| "learning_rate": 4.022923242498933e-06, |
| "loss": 0.0399, |
| "step": 491 |
| }, |
| { |
| "epoch": 2.617021276595745, |
| "grad_norm": 0.008105279313269747, |
| "learning_rate": 3.915283287675573e-06, |
| "loss": 0.0466, |
| "step": 492 |
| }, |
| { |
| "epoch": 2.622340425531915, |
| "grad_norm": 0.008173872546795465, |
| "learning_rate": 3.809028830322934e-06, |
| "loss": 0.0469, |
| "step": 493 |
| }, |
| { |
| "epoch": 2.627659574468085, |
| "grad_norm": 0.008055721870866051, |
| "learning_rate": 3.7041639501538275e-06, |
| "loss": 0.0448, |
| "step": 494 |
| }, |
| { |
| "epoch": 2.632978723404255, |
| "grad_norm": 0.007861015403246664, |
| "learning_rate": 3.6006926735273353e-06, |
| "loss": 0.0406, |
| "step": 495 |
| }, |
| { |
| "epoch": 2.6382978723404253, |
| "grad_norm": 0.007590616830080324, |
| "learning_rate": 3.4986189732941457e-06, |
| "loss": 0.0502, |
| "step": 496 |
| }, |
| { |
| "epoch": 2.6436170212765955, |
| "grad_norm": 0.008120208186594521, |
| "learning_rate": 3.3979467686440537e-06, |
| "loss": 0.0411, |
| "step": 497 |
| }, |
| { |
| "epoch": 2.648936170212766, |
| "grad_norm": 0.007949792288387625, |
| "learning_rate": 3.2986799249554857e-06, |
| "loss": 0.0433, |
| "step": 498 |
| }, |
| { |
| "epoch": 2.654255319148936, |
| "grad_norm": 0.007844755384001931, |
| "learning_rate": 3.200822253647031e-06, |
| "loss": 0.0523, |
| "step": 499 |
| }, |
| { |
| "epoch": 2.6595744680851063, |
| "grad_norm": 0.008136360914120786, |
| "learning_rate": 3.104377512031174e-06, |
| "loss": 0.051, |
| "step": 500 |
| }, |
| { |
| "epoch": 2.6648936170212765, |
| "grad_norm": 0.008366635075960189, |
| "learning_rate": 3.0093494031699786e-06, |
| "loss": 0.0431, |
| "step": 501 |
| }, |
| { |
| "epoch": 2.670212765957447, |
| "grad_norm": 0.007664684957493284, |
| "learning_rate": 2.9157415757329156e-06, |
| "loss": 0.0632, |
| "step": 502 |
| }, |
| { |
| "epoch": 2.675531914893617, |
| "grad_norm": 0.00825024592131537, |
| "learning_rate": 2.8235576238567853e-06, |
| "loss": 0.0425, |
| "step": 503 |
| }, |
| { |
| "epoch": 2.6808510638297873, |
| "grad_norm": 0.00755888470323572, |
| "learning_rate": 2.732801087007708e-06, |
| "loss": 0.0486, |
| "step": 504 |
| }, |
| { |
| "epoch": 2.6861702127659575, |
| "grad_norm": 0.007727613923429901, |
| "learning_rate": 2.6434754498452232e-06, |
| "loss": 0.0425, |
| "step": 505 |
| }, |
| { |
| "epoch": 2.6914893617021276, |
| "grad_norm": 0.007853383876233723, |
| "learning_rate": 2.555584142088483e-06, |
| "loss": 0.0503, |
| "step": 506 |
| }, |
| { |
| "epoch": 2.6968085106382977, |
| "grad_norm": 0.008197291840186867, |
| "learning_rate": 2.469130538384592e-06, |
| "loss": 0.0513, |
| "step": 507 |
| }, |
| { |
| "epoch": 2.702127659574468, |
| "grad_norm": 0.007854509644886805, |
| "learning_rate": 2.38411795817902e-06, |
| "loss": 0.0468, |
| "step": 508 |
| }, |
| { |
| "epoch": 2.7074468085106385, |
| "grad_norm": 0.008188036636652614, |
| "learning_rate": 2.300549665588139e-06, |
| "loss": 0.0474, |
| "step": 509 |
| }, |
| { |
| "epoch": 2.7127659574468086, |
| "grad_norm": 0.007295361408424019, |
| "learning_rate": 2.2184288692739163e-06, |
| "loss": 0.0568, |
| "step": 510 |
| }, |
| { |
| "epoch": 2.7180851063829787, |
| "grad_norm": 0.007970226427094782, |
| "learning_rate": 2.1377587223207062e-06, |
| "loss": 0.0459, |
| "step": 511 |
| }, |
| { |
| "epoch": 2.723404255319149, |
| "grad_norm": 0.0070737488891118114, |
| "learning_rate": 2.0585423221141807e-06, |
| "loss": 0.056, |
| "step": 512 |
| }, |
| { |
| "epoch": 2.728723404255319, |
| "grad_norm": 0.007656308636782512, |
| "learning_rate": 1.9807827102224e-06, |
| "loss": 0.0424, |
| "step": 513 |
| }, |
| { |
| "epoch": 2.7340425531914896, |
| "grad_norm": 0.007476789030171374, |
| "learning_rate": 1.9044828722790631e-06, |
| "loss": 0.0446, |
| "step": 514 |
| }, |
| { |
| "epoch": 2.7393617021276597, |
| "grad_norm": 0.007784187955794537, |
| "learning_rate": 1.829645737868817e-06, |
| "loss": 0.0462, |
| "step": 515 |
| }, |
| { |
| "epoch": 2.74468085106383, |
| "grad_norm": 0.007737371218302239, |
| "learning_rate": 1.7562741804148098e-06, |
| "loss": 0.0406, |
| "step": 516 |
| }, |
| { |
| "epoch": 2.75, |
| "grad_norm": 0.008303721871683187, |
| "learning_rate": 1.684371017068367e-06, |
| "loss": 0.0365, |
| "step": 517 |
| }, |
| { |
| "epoch": 2.75531914893617, |
| "grad_norm": 0.007951986536488485, |
| "learning_rate": 1.613939008600811e-06, |
| "loss": 0.0468, |
| "step": 518 |
| }, |
| { |
| "epoch": 2.7606382978723403, |
| "grad_norm": 0.008361965641519975, |
| "learning_rate": 1.5449808592974491e-06, |
| "loss": 0.0592, |
| "step": 519 |
| }, |
| { |
| "epoch": 2.7659574468085104, |
| "grad_norm": 0.007692570064497456, |
| "learning_rate": 1.4774992168537662e-06, |
| "loss": 0.0478, |
| "step": 520 |
| }, |
| { |
| "epoch": 2.771276595744681, |
| "grad_norm": 0.007788921289162489, |
| "learning_rate": 1.4114966722737644e-06, |
| "loss": 0.0462, |
| "step": 521 |
| }, |
| { |
| "epoch": 2.776595744680851, |
| "grad_norm": 0.00853946835662155, |
| "learning_rate": 1.3469757597704347e-06, |
| "loss": 0.0403, |
| "step": 522 |
| }, |
| { |
| "epoch": 2.7819148936170213, |
| "grad_norm": 0.006968028970357744, |
| "learning_rate": 1.283938956668518e-06, |
| "loss": 0.0554, |
| "step": 523 |
| }, |
| { |
| "epoch": 2.7872340425531914, |
| "grad_norm": 0.007766308530455441, |
| "learning_rate": 1.22238868330935e-06, |
| "loss": 0.0458, |
| "step": 524 |
| }, |
| { |
| "epoch": 2.7925531914893615, |
| "grad_norm": 0.0076113733286044035, |
| "learning_rate": 1.1623273029579195e-06, |
| "loss": 0.043, |
| "step": 525 |
| }, |
| { |
| "epoch": 2.797872340425532, |
| "grad_norm": 0.008257562564150014, |
| "learning_rate": 1.1037571217121657e-06, |
| "loss": 0.049, |
| "step": 526 |
| }, |
| { |
| "epoch": 2.8031914893617023, |
| "grad_norm": 0.008067313850038924, |
| "learning_rate": 1.0466803884144006e-06, |
| "loss": 0.0439, |
| "step": 527 |
| }, |
| { |
| "epoch": 2.8085106382978724, |
| "grad_norm": 0.007998925168002408, |
| "learning_rate": 9.910992945649878e-07, |
| "loss": 0.0534, |
| "step": 528 |
| }, |
| { |
| "epoch": 2.8138297872340425, |
| "grad_norm": 0.00817617049130359, |
| "learning_rate": 9.37015974238178e-07, |
| "loss": 0.0495, |
| "step": 529 |
| }, |
| { |
| "epoch": 2.8191489361702127, |
| "grad_norm": 0.007662776866522958, |
| "learning_rate": 8.844325040001877e-07, |
| "loss": 0.0466, |
| "step": 530 |
| }, |
| { |
| "epoch": 2.824468085106383, |
| "grad_norm": 0.007219957227385873, |
| "learning_rate": 8.33350902829455e-07, |
| "loss": 0.0463, |
| "step": 531 |
| }, |
| { |
| "epoch": 2.829787234042553, |
| "grad_norm": 0.007646907666451822, |
| "learning_rate": 7.837731320391228e-07, |
| "loss": 0.049, |
| "step": 532 |
| }, |
| { |
| "epoch": 2.8351063829787235, |
| "grad_norm": 0.007962815962301627, |
| "learning_rate": 7.35701095201744e-07, |
| "loss": 0.0472, |
| "step": 533 |
| }, |
| { |
| "epoch": 2.8404255319148937, |
| "grad_norm": 0.007968390463292509, |
| "learning_rate": 6.891366380761666e-07, |
| "loss": 0.0411, |
| "step": 534 |
| }, |
| { |
| "epoch": 2.845744680851064, |
| "grad_norm": 0.008151810235593781, |
| "learning_rate": 6.44081548536688e-07, |
| "loss": 0.0497, |
| "step": 535 |
| }, |
| { |
| "epoch": 2.851063829787234, |
| "grad_norm": 0.008120173888367008, |
| "learning_rate": 6.00537556504408e-07, |
| "loss": 0.0494, |
| "step": 536 |
| }, |
| { |
| "epoch": 2.8563829787234045, |
| "grad_norm": 0.008124494949718132, |
| "learning_rate": 5.585063338807927e-07, |
| "loss": 0.0491, |
| "step": 537 |
| }, |
| { |
| "epoch": 2.8617021276595747, |
| "grad_norm": 0.008126109866352144, |
| "learning_rate": 5.179894944834863e-07, |
| "loss": 0.0469, |
| "step": 538 |
| }, |
| { |
| "epoch": 2.867021276595745, |
| "grad_norm": 0.007859457846739715, |
| "learning_rate": 4.789885939843553e-07, |
| "loss": 0.0459, |
| "step": 539 |
| }, |
| { |
| "epoch": 2.872340425531915, |
| "grad_norm": 0.00784570861119501, |
| "learning_rate": 4.415051298497508e-07, |
| "loss": 0.0407, |
| "step": 540 |
| }, |
| { |
| "epoch": 2.877659574468085, |
| "grad_norm": 0.00838711843836882, |
| "learning_rate": 4.055405412830027e-07, |
| "loss": 0.0558, |
| "step": 541 |
| }, |
| { |
| "epoch": 2.882978723404255, |
| "grad_norm": 0.007863153167928544, |
| "learning_rate": 3.710962091691883e-07, |
| "loss": 0.0482, |
| "step": 542 |
| }, |
| { |
| "epoch": 2.8882978723404253, |
| "grad_norm": 0.008105905332148732, |
| "learning_rate": 3.381734560220862e-07, |
| "loss": 0.0448, |
| "step": 543 |
| }, |
| { |
| "epoch": 2.8936170212765955, |
| "grad_norm": 0.008361699664247291, |
| "learning_rate": 3.0677354593339424e-07, |
| "loss": 0.042, |
| "step": 544 |
| }, |
| { |
| "epoch": 2.898936170212766, |
| "grad_norm": 0.007562511854461672, |
| "learning_rate": 2.768976845242266e-07, |
| "loss": 0.0441, |
| "step": 545 |
| }, |
| { |
| "epoch": 2.904255319148936, |
| "grad_norm": 0.00821191943277541, |
| "learning_rate": 2.485470188987904e-07, |
| "loss": 0.044, |
| "step": 546 |
| }, |
| { |
| "epoch": 2.9095744680851063, |
| "grad_norm": 0.008000437860898272, |
| "learning_rate": 2.2172263760035452e-07, |
| "loss": 0.0465, |
| "step": 547 |
| }, |
| { |
| "epoch": 2.9148936170212765, |
| "grad_norm": 0.007613170697726605, |
| "learning_rate": 1.9642557056945177e-07, |
| "loss": 0.0478, |
| "step": 548 |
| }, |
| { |
| "epoch": 2.920212765957447, |
| "grad_norm": 0.008236168486075241, |
| "learning_rate": 1.7265678910433737e-07, |
| "loss": 0.0415, |
| "step": 549 |
| }, |
| { |
| "epoch": 2.925531914893617, |
| "grad_norm": 0.007696489803714274, |
| "learning_rate": 1.50417205823703e-07, |
| "loss": 0.0433, |
| "step": 550 |
| }, |
| { |
| "epoch": 2.9308510638297873, |
| "grad_norm": 0.007562609433880933, |
| "learning_rate": 1.2970767463160284e-07, |
| "loss": 0.0434, |
| "step": 551 |
| }, |
| { |
| "epoch": 2.9361702127659575, |
| "grad_norm": 0.007732507310194832, |
| "learning_rate": 1.1052899068471068e-07, |
| "loss": 0.0464, |
| "step": 552 |
| }, |
| { |
| "epoch": 2.9414893617021276, |
| "grad_norm": 0.007428197342957649, |
| "learning_rate": 9.288189036176231e-08, |
| "loss": 0.0446, |
| "step": 553 |
| }, |
| { |
| "epoch": 2.9468085106382977, |
| "grad_norm": 0.007741699415017989, |
| "learning_rate": 7.676705123528916e-08, |
| "loss": 0.0426, |
| "step": 554 |
| }, |
| { |
| "epoch": 2.952127659574468, |
| "grad_norm": 0.0076164845974366424, |
| "learning_rate": 6.218509204560796e-08, |
| "loss": 0.0527, |
| "step": 555 |
| }, |
| { |
| "epoch": 2.9574468085106385, |
| "grad_norm": 0.007833663927276755, |
| "learning_rate": 4.91365726770443e-08, |
| "loss": 0.0469, |
| "step": 556 |
| }, |
| { |
| "epoch": 2.9627659574468086, |
| "grad_norm": 0.007673579914876504, |
| "learning_rate": 3.7621994136460835e-08, |
| "loss": 0.0467, |
| "step": 557 |
| }, |
| { |
| "epoch": 2.9680851063829787, |
| "grad_norm": 0.007829180023256685, |
| "learning_rate": 2.764179853400606e-08, |
| "loss": 0.055, |
| "step": 558 |
| }, |
| { |
| "epoch": 2.973404255319149, |
| "grad_norm": 0.007350125386937092, |
| "learning_rate": 1.9196369066141197e-08, |
| "loss": 0.0468, |
| "step": 559 |
| }, |
| { |
| "epoch": 2.978723404255319, |
| "grad_norm": 0.007546491636896886, |
| "learning_rate": 1.228603000092754e-08, |
| "loss": 0.0437, |
| "step": 560 |
| }, |
| { |
| "epoch": 2.9840425531914896, |
| "grad_norm": 0.007716277351876769, |
| "learning_rate": 6.911046665578625e-09, |
| "loss": 0.0513, |
| "step": 561 |
| }, |
| { |
| "epoch": 2.9893617021276597, |
| "grad_norm": 0.00767958290724334, |
| "learning_rate": 3.0716254362683774e-09, |
| "loss": 0.0475, |
| "step": 562 |
| }, |
| { |
| "epoch": 2.99468085106383, |
| "grad_norm": 0.008148970740552009, |
| "learning_rate": 7.679137302085693e-10, |
| "loss": 0.0531, |
| "step": 563 |
| }, |
| { |
| "epoch": 3.0, |
| "grad_norm": 0.007953012041425112, |
| "learning_rate": 0.0, |
| "loss": 0.0377, |
| "step": 564 |
| }, |
| { |
| "epoch": 3.0, |
| "step": 564, |
| "total_flos": 424964463656960.0, |
| "train_loss": 0.05965587804556316, |
| "train_runtime": 4685.7686, |
| "train_samples_per_second": 0.96, |
| "train_steps_per_second": 0.12 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 564, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": false, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 424964463656960.0, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|