| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.988354430379747, | |
| "eval_steps": 500, | |
| "global_step": 1230, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.004050632911392405, | |
| "grad_norm": 6.738137424432949, | |
| "learning_rate": 3.2520325203252037e-07, | |
| "loss": 0.9668, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.00810126582278481, | |
| "grad_norm": 6.704480183106894, | |
| "learning_rate": 6.504065040650407e-07, | |
| "loss": 0.9715, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.012151898734177215, | |
| "grad_norm": 6.649852168234561, | |
| "learning_rate": 9.75609756097561e-07, | |
| "loss": 0.9555, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.01620253164556962, | |
| "grad_norm": 6.586384339989349, | |
| "learning_rate": 1.3008130081300815e-06, | |
| "loss": 0.965, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.020253164556962026, | |
| "grad_norm": 6.009424153534045, | |
| "learning_rate": 1.6260162601626018e-06, | |
| "loss": 0.9484, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.02430379746835443, | |
| "grad_norm": 5.851554206902821, | |
| "learning_rate": 1.951219512195122e-06, | |
| "loss": 0.9727, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.028354430379746835, | |
| "grad_norm": 4.220419241345155, | |
| "learning_rate": 2.2764227642276426e-06, | |
| "loss": 0.8843, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.03240506329113924, | |
| "grad_norm": 3.977587799836835, | |
| "learning_rate": 2.601626016260163e-06, | |
| "loss": 0.8994, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.03645569620253165, | |
| "grad_norm": 2.9500264035888115, | |
| "learning_rate": 2.926829268292683e-06, | |
| "loss": 0.8499, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.04050632911392405, | |
| "grad_norm": 2.74529493974799, | |
| "learning_rate": 3.2520325203252037e-06, | |
| "loss": 0.8951, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.044556962025316456, | |
| "grad_norm": 2.2388546719948255, | |
| "learning_rate": 3.577235772357724e-06, | |
| "loss": 0.8459, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.04860759493670886, | |
| "grad_norm": 4.605664435522847, | |
| "learning_rate": 3.902439024390244e-06, | |
| "loss": 0.881, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.052658227848101265, | |
| "grad_norm": 4.42657753494484, | |
| "learning_rate": 4.227642276422765e-06, | |
| "loss": 0.8385, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.05670886075949367, | |
| "grad_norm": 4.1547475177727335, | |
| "learning_rate": 4.552845528455285e-06, | |
| "loss": 0.8076, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.060759493670886074, | |
| "grad_norm": 3.774279488756445, | |
| "learning_rate": 4.8780487804878055e-06, | |
| "loss": 0.8323, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.06481012658227848, | |
| "grad_norm": 2.148514458280813, | |
| "learning_rate": 5.203252032520326e-06, | |
| "loss": 0.7746, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.06886075949367089, | |
| "grad_norm": 2.089569715105327, | |
| "learning_rate": 5.528455284552846e-06, | |
| "loss": 0.8118, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.0729113924050633, | |
| "grad_norm": 1.8128797101669885, | |
| "learning_rate": 5.853658536585366e-06, | |
| "loss": 0.7438, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.0769620253164557, | |
| "grad_norm": 1.3929441683392823, | |
| "learning_rate": 6.178861788617887e-06, | |
| "loss": 0.7345, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.0810126582278481, | |
| "grad_norm": 1.3435509653560478, | |
| "learning_rate": 6.504065040650407e-06, | |
| "loss": 0.717, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.08506329113924051, | |
| "grad_norm": 1.1518944179113615, | |
| "learning_rate": 6.829268292682928e-06, | |
| "loss": 0.7285, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.08911392405063291, | |
| "grad_norm": 1.037800881018344, | |
| "learning_rate": 7.154471544715448e-06, | |
| "loss": 0.6999, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.09316455696202532, | |
| "grad_norm": 1.0406570785553142, | |
| "learning_rate": 7.4796747967479676e-06, | |
| "loss": 0.715, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.09721518987341772, | |
| "grad_norm": 1.1054781909533145, | |
| "learning_rate": 7.804878048780489e-06, | |
| "loss": 0.6935, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.10126582278481013, | |
| "grad_norm": 1.0808832018750802, | |
| "learning_rate": 8.130081300813009e-06, | |
| "loss": 0.6748, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.10531645569620253, | |
| "grad_norm": 0.8120786833078844, | |
| "learning_rate": 8.45528455284553e-06, | |
| "loss": 0.6848, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.10936708860759493, | |
| "grad_norm": 0.8022044098431059, | |
| "learning_rate": 8.78048780487805e-06, | |
| "loss": 0.6677, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.11341772151898734, | |
| "grad_norm": 0.8202570499156832, | |
| "learning_rate": 9.10569105691057e-06, | |
| "loss": 0.6676, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.11746835443037974, | |
| "grad_norm": 0.9122335465172019, | |
| "learning_rate": 9.43089430894309e-06, | |
| "loss": 0.6656, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.12151898734177215, | |
| "grad_norm": 0.8128208709238237, | |
| "learning_rate": 9.756097560975611e-06, | |
| "loss": 0.6648, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.12556962025316457, | |
| "grad_norm": 0.6857485662725991, | |
| "learning_rate": 1.008130081300813e-05, | |
| "loss": 0.68, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.12962025316455697, | |
| "grad_norm": 0.6743229086446373, | |
| "learning_rate": 1.0406504065040652e-05, | |
| "loss": 0.6533, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.13367088607594937, | |
| "grad_norm": 0.8179942317888155, | |
| "learning_rate": 1.0731707317073172e-05, | |
| "loss": 0.6633, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.13772151898734178, | |
| "grad_norm": 0.7923267760833331, | |
| "learning_rate": 1.1056910569105692e-05, | |
| "loss": 0.6475, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.14177215189873418, | |
| "grad_norm": 0.580942452996161, | |
| "learning_rate": 1.1382113821138213e-05, | |
| "loss": 0.6614, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.1458227848101266, | |
| "grad_norm": 0.7405932422874135, | |
| "learning_rate": 1.1707317073170731e-05, | |
| "loss": 0.6458, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.149873417721519, | |
| "grad_norm": 0.6935200212548842, | |
| "learning_rate": 1.2032520325203254e-05, | |
| "loss": 0.6487, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.1539240506329114, | |
| "grad_norm": 0.6151757390966838, | |
| "learning_rate": 1.2357723577235774e-05, | |
| "loss": 0.6444, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.1579746835443038, | |
| "grad_norm": 0.593954913303251, | |
| "learning_rate": 1.2682926829268294e-05, | |
| "loss": 0.6422, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.1620253164556962, | |
| "grad_norm": 0.6049660429457115, | |
| "learning_rate": 1.3008130081300815e-05, | |
| "loss": 0.6232, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.1660759493670886, | |
| "grad_norm": 0.6202598276442614, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 0.6469, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.17012658227848101, | |
| "grad_norm": 0.5252543366810118, | |
| "learning_rate": 1.3658536585365855e-05, | |
| "loss": 0.6273, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.17417721518987342, | |
| "grad_norm": 0.5990662753459397, | |
| "learning_rate": 1.3983739837398376e-05, | |
| "loss": 0.6385, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.17822784810126582, | |
| "grad_norm": 0.5555298700793329, | |
| "learning_rate": 1.4308943089430896e-05, | |
| "loss": 0.6335, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.18227848101265823, | |
| "grad_norm": 0.5397270311831047, | |
| "learning_rate": 1.4634146341463415e-05, | |
| "loss": 0.6529, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.18632911392405063, | |
| "grad_norm": 0.547448981025657, | |
| "learning_rate": 1.4959349593495935e-05, | |
| "loss": 0.6366, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.19037974683544304, | |
| "grad_norm": 0.5465929255940167, | |
| "learning_rate": 1.528455284552846e-05, | |
| "loss": 0.6169, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.19443037974683544, | |
| "grad_norm": 0.5968501964145153, | |
| "learning_rate": 1.5609756097560978e-05, | |
| "loss": 0.6544, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.19848101265822785, | |
| "grad_norm": 0.5649668464239493, | |
| "learning_rate": 1.5934959349593496e-05, | |
| "loss": 0.6249, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.20253164556962025, | |
| "grad_norm": 0.5862494161295905, | |
| "learning_rate": 1.6260162601626018e-05, | |
| "loss": 0.6553, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.20658227848101265, | |
| "grad_norm": 0.5585290861876406, | |
| "learning_rate": 1.6585365853658537e-05, | |
| "loss": 0.6364, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.21063291139240506, | |
| "grad_norm": 0.5656095503736616, | |
| "learning_rate": 1.691056910569106e-05, | |
| "loss": 0.6427, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.21468354430379746, | |
| "grad_norm": 0.7090104270042087, | |
| "learning_rate": 1.7235772357723578e-05, | |
| "loss": 0.6337, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.21873417721518987, | |
| "grad_norm": 0.5220301052114755, | |
| "learning_rate": 1.75609756097561e-05, | |
| "loss": 0.6381, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.22278481012658227, | |
| "grad_norm": 0.6632143725654617, | |
| "learning_rate": 1.788617886178862e-05, | |
| "loss": 0.6222, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.22683544303797468, | |
| "grad_norm": 0.5650459842153049, | |
| "learning_rate": 1.821138211382114e-05, | |
| "loss": 0.6087, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.23088607594936708, | |
| "grad_norm": 0.6353465575731497, | |
| "learning_rate": 1.8536585365853663e-05, | |
| "loss": 0.6412, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.23493670886075949, | |
| "grad_norm": 0.5298639288059516, | |
| "learning_rate": 1.886178861788618e-05, | |
| "loss": 0.6029, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.2389873417721519, | |
| "grad_norm": 0.6408073698377098, | |
| "learning_rate": 1.91869918699187e-05, | |
| "loss": 0.6314, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.2430379746835443, | |
| "grad_norm": 0.6663725445327395, | |
| "learning_rate": 1.9512195121951222e-05, | |
| "loss": 0.6222, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.2470886075949367, | |
| "grad_norm": 0.619667014775608, | |
| "learning_rate": 1.983739837398374e-05, | |
| "loss": 0.6148, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.25113924050632913, | |
| "grad_norm": 0.6522160972568782, | |
| "learning_rate": 2.016260162601626e-05, | |
| "loss": 0.6211, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.25518987341772154, | |
| "grad_norm": 0.6588528929532699, | |
| "learning_rate": 2.048780487804878e-05, | |
| "loss": 0.6293, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.25924050632911394, | |
| "grad_norm": 0.6085291038558257, | |
| "learning_rate": 2.0813008130081303e-05, | |
| "loss": 0.6069, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.26329113924050634, | |
| "grad_norm": 0.7361879054546313, | |
| "learning_rate": 2.1138211382113822e-05, | |
| "loss": 0.6299, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.26734177215189875, | |
| "grad_norm": 0.5114808132070221, | |
| "learning_rate": 2.1463414634146344e-05, | |
| "loss": 0.5976, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.27139240506329115, | |
| "grad_norm": 0.6556791255027764, | |
| "learning_rate": 2.1788617886178863e-05, | |
| "loss": 0.5968, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.27544303797468356, | |
| "grad_norm": 0.6120042447557519, | |
| "learning_rate": 2.2113821138211385e-05, | |
| "loss": 0.6091, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.27949367088607596, | |
| "grad_norm": 0.6364528311090728, | |
| "learning_rate": 2.2439024390243907e-05, | |
| "loss": 0.595, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.28354430379746837, | |
| "grad_norm": 0.545332925458142, | |
| "learning_rate": 2.2764227642276426e-05, | |
| "loss": 0.6195, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.28759493670886077, | |
| "grad_norm": 0.6446808684081377, | |
| "learning_rate": 2.3089430894308948e-05, | |
| "loss": 0.605, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.2916455696202532, | |
| "grad_norm": 0.526482946232368, | |
| "learning_rate": 2.3414634146341463e-05, | |
| "loss": 0.5987, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.2956962025316456, | |
| "grad_norm": 0.6843137029671412, | |
| "learning_rate": 2.3739837398373985e-05, | |
| "loss": 0.61, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.299746835443038, | |
| "grad_norm": 0.5313501957668688, | |
| "learning_rate": 2.4065040650406507e-05, | |
| "loss": 0.5983, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.3037974683544304, | |
| "grad_norm": 0.6229037916787954, | |
| "learning_rate": 2.4390243902439026e-05, | |
| "loss": 0.6045, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.3078481012658228, | |
| "grad_norm": 0.7587650172852438, | |
| "learning_rate": 2.4715447154471548e-05, | |
| "loss": 0.6049, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.3118987341772152, | |
| "grad_norm": 0.5235923200776036, | |
| "learning_rate": 2.5040650406504066e-05, | |
| "loss": 0.6048, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.3159493670886076, | |
| "grad_norm": 0.6116443836101014, | |
| "learning_rate": 2.536585365853659e-05, | |
| "loss": 0.6048, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.57070110102058, | |
| "learning_rate": 2.569105691056911e-05, | |
| "loss": 0.6319, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.3240506329113924, | |
| "grad_norm": 0.6622733709006261, | |
| "learning_rate": 2.601626016260163e-05, | |
| "loss": 0.5958, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.3281012658227848, | |
| "grad_norm": 0.6061409289778379, | |
| "learning_rate": 2.634146341463415e-05, | |
| "loss": 0.6077, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.3321518987341772, | |
| "grad_norm": 0.5608443801246532, | |
| "learning_rate": 2.6666666666666667e-05, | |
| "loss": 0.6189, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.3362025316455696, | |
| "grad_norm": 0.6407790595134794, | |
| "learning_rate": 2.699186991869919e-05, | |
| "loss": 0.6122, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.34025316455696203, | |
| "grad_norm": 0.5013268060202937, | |
| "learning_rate": 2.731707317073171e-05, | |
| "loss": 0.6147, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.34430379746835443, | |
| "grad_norm": 0.673211258614305, | |
| "learning_rate": 2.764227642276423e-05, | |
| "loss": 0.6224, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.34835443037974684, | |
| "grad_norm": 0.6500998849186943, | |
| "learning_rate": 2.796747967479675e-05, | |
| "loss": 0.6219, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.35240506329113924, | |
| "grad_norm": 0.6881382375633367, | |
| "learning_rate": 2.829268292682927e-05, | |
| "loss": 0.5877, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.35645569620253165, | |
| "grad_norm": 0.6538399166277753, | |
| "learning_rate": 2.8617886178861792e-05, | |
| "loss": 0.6127, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.36050632911392405, | |
| "grad_norm": 0.6673693527332356, | |
| "learning_rate": 2.8943089430894314e-05, | |
| "loss": 0.6064, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.36455696202531646, | |
| "grad_norm": 0.6435865622653156, | |
| "learning_rate": 2.926829268292683e-05, | |
| "loss": 0.6227, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.36860759493670886, | |
| "grad_norm": 0.6061801625589748, | |
| "learning_rate": 2.959349593495935e-05, | |
| "loss": 0.5998, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.37265822784810126, | |
| "grad_norm": 0.7004898619528622, | |
| "learning_rate": 2.991869918699187e-05, | |
| "loss": 0.6088, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.37670886075949367, | |
| "grad_norm": 0.6329491215213612, | |
| "learning_rate": 3.0243902439024392e-05, | |
| "loss": 0.5816, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.3807594936708861, | |
| "grad_norm": 0.6291953434236165, | |
| "learning_rate": 3.056910569105692e-05, | |
| "loss": 0.6135, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.3848101265822785, | |
| "grad_norm": 0.5646486186711192, | |
| "learning_rate": 3.089430894308943e-05, | |
| "loss": 0.6254, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.3888607594936709, | |
| "grad_norm": 0.5588405585306142, | |
| "learning_rate": 3.1219512195121955e-05, | |
| "loss": 0.6181, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.3929113924050633, | |
| "grad_norm": 0.6090569622349508, | |
| "learning_rate": 3.154471544715447e-05, | |
| "loss": 0.6241, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.3969620253164557, | |
| "grad_norm": 0.6592102214276453, | |
| "learning_rate": 3.186991869918699e-05, | |
| "loss": 0.6159, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.4010126582278481, | |
| "grad_norm": 0.5840309246985822, | |
| "learning_rate": 3.2195121951219514e-05, | |
| "loss": 0.5796, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.4050632911392405, | |
| "grad_norm": 0.7467970959605532, | |
| "learning_rate": 3.2520325203252037e-05, | |
| "loss": 0.5922, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.4091139240506329, | |
| "grad_norm": 0.8194736760940756, | |
| "learning_rate": 3.284552845528456e-05, | |
| "loss": 0.6133, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.4131645569620253, | |
| "grad_norm": 1.0144578977657448, | |
| "learning_rate": 3.3170731707317074e-05, | |
| "loss": 0.6216, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.4172151898734177, | |
| "grad_norm": 0.6775048187699853, | |
| "learning_rate": 3.3495934959349596e-05, | |
| "loss": 0.6182, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.4212658227848101, | |
| "grad_norm": 0.6113068331939774, | |
| "learning_rate": 3.382113821138212e-05, | |
| "loss": 0.6052, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.4253164556962025, | |
| "grad_norm": 0.8456062724729609, | |
| "learning_rate": 3.414634146341463e-05, | |
| "loss": 0.6112, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.4293670886075949, | |
| "grad_norm": 1.0825740906837211, | |
| "learning_rate": 3.4471544715447155e-05, | |
| "loss": 0.6077, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.43341772151898733, | |
| "grad_norm": 0.6541651225804224, | |
| "learning_rate": 3.479674796747968e-05, | |
| "loss": 0.609, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.43746835443037974, | |
| "grad_norm": 0.8519757827412257, | |
| "learning_rate": 3.51219512195122e-05, | |
| "loss": 0.5937, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.44151898734177214, | |
| "grad_norm": 0.9211472979410629, | |
| "learning_rate": 3.544715447154472e-05, | |
| "loss": 0.6461, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.44556962025316454, | |
| "grad_norm": 0.6137722565175325, | |
| "learning_rate": 3.577235772357724e-05, | |
| "loss": 0.5939, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.44962025316455695, | |
| "grad_norm": 0.656857990479076, | |
| "learning_rate": 3.609756097560976e-05, | |
| "loss": 0.6096, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.45367088607594935, | |
| "grad_norm": 0.7852666926770623, | |
| "learning_rate": 3.642276422764228e-05, | |
| "loss": 0.6143, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.45772151898734176, | |
| "grad_norm": 0.6580716052028316, | |
| "learning_rate": 3.67479674796748e-05, | |
| "loss": 0.6154, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.46177215189873416, | |
| "grad_norm": 0.5947979326827149, | |
| "learning_rate": 3.7073170731707325e-05, | |
| "loss": 0.6113, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.46582278481012657, | |
| "grad_norm": 0.7770493177926533, | |
| "learning_rate": 3.739837398373984e-05, | |
| "loss": 0.6013, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.46987341772151897, | |
| "grad_norm": 0.7024616318584296, | |
| "learning_rate": 3.772357723577236e-05, | |
| "loss": 0.6151, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.4739240506329114, | |
| "grad_norm": 0.6346037172305418, | |
| "learning_rate": 3.804878048780488e-05, | |
| "loss": 0.6277, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.4779746835443038, | |
| "grad_norm": 0.5744007160048461, | |
| "learning_rate": 3.83739837398374e-05, | |
| "loss": 0.6188, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.4820253164556962, | |
| "grad_norm": 0.6725047890493829, | |
| "learning_rate": 3.869918699186992e-05, | |
| "loss": 0.5887, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.4860759493670886, | |
| "grad_norm": 0.6852570006283522, | |
| "learning_rate": 3.9024390243902444e-05, | |
| "loss": 0.5932, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.490126582278481, | |
| "grad_norm": 0.6710149270584966, | |
| "learning_rate": 3.9349593495934966e-05, | |
| "loss": 0.6097, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.4941772151898734, | |
| "grad_norm": 0.6636621367931815, | |
| "learning_rate": 3.967479674796748e-05, | |
| "loss": 0.6143, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.4982278481012658, | |
| "grad_norm": 0.710755017071099, | |
| "learning_rate": 4e-05, | |
| "loss": 0.6269, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.5022784810126583, | |
| "grad_norm": 0.6275540909964789, | |
| "learning_rate": 3.999991946137476e-05, | |
| "loss": 0.6113, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.5063291139240507, | |
| "grad_norm": 0.6901366809135769, | |
| "learning_rate": 3.999967784614766e-05, | |
| "loss": 0.6073, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.5103797468354431, | |
| "grad_norm": 0.7346298849301377, | |
| "learning_rate": 3.9999275156264656e-05, | |
| "loss": 0.6011, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.5144303797468355, | |
| "grad_norm": 0.9800665767350758, | |
| "learning_rate": 3.999871139496895e-05, | |
| "loss": 0.5966, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.5184810126582279, | |
| "grad_norm": 0.5884804993063429, | |
| "learning_rate": 3.9997986566800995e-05, | |
| "loss": 0.5944, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.5225316455696203, | |
| "grad_norm": 0.7508414192755807, | |
| "learning_rate": 3.999710067759846e-05, | |
| "loss": 0.6028, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.5265822784810127, | |
| "grad_norm": 0.6871060770932548, | |
| "learning_rate": 3.999605373449617e-05, | |
| "loss": 0.5926, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.5306329113924051, | |
| "grad_norm": 0.5916655495006354, | |
| "learning_rate": 3.9994845745926075e-05, | |
| "loss": 0.5751, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.5346835443037975, | |
| "grad_norm": 0.7826157784275275, | |
| "learning_rate": 3.999347672161713e-05, | |
| "loss": 0.6117, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.5387341772151899, | |
| "grad_norm": 0.6642512515630379, | |
| "learning_rate": 3.999194667259528e-05, | |
| "loss": 0.6148, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.5427848101265823, | |
| "grad_norm": 0.7617489932819406, | |
| "learning_rate": 3.999025561118334e-05, | |
| "loss": 0.5945, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.5468354430379747, | |
| "grad_norm": 0.6189617390006067, | |
| "learning_rate": 3.998840355100086e-05, | |
| "loss": 0.6049, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.5508860759493671, | |
| "grad_norm": 0.6031115478369041, | |
| "learning_rate": 3.998639050696409e-05, | |
| "loss": 0.5958, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.5549367088607595, | |
| "grad_norm": 0.7201004473334499, | |
| "learning_rate": 3.998421649528582e-05, | |
| "loss": 0.5927, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.5589873417721519, | |
| "grad_norm": 0.5379927300985372, | |
| "learning_rate": 3.9981881533475234e-05, | |
| "loss": 0.5965, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.5630379746835443, | |
| "grad_norm": 0.6563389837085589, | |
| "learning_rate": 3.997938564033779e-05, | |
| "loss": 0.6243, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.5670886075949367, | |
| "grad_norm": 0.6753157024054424, | |
| "learning_rate": 3.9976728835975064e-05, | |
| "loss": 0.6146, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.5711392405063291, | |
| "grad_norm": 0.6877767099181129, | |
| "learning_rate": 3.9973911141784605e-05, | |
| "loss": 0.6296, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.5751898734177215, | |
| "grad_norm": 0.5752605559701961, | |
| "learning_rate": 3.997093258045973e-05, | |
| "loss": 0.5883, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.579240506329114, | |
| "grad_norm": 0.8374236313404673, | |
| "learning_rate": 3.996779317598936e-05, | |
| "loss": 0.6307, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.5832911392405064, | |
| "grad_norm": 0.9151232703658515, | |
| "learning_rate": 3.996449295365782e-05, | |
| "loss": 0.604, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.5873417721518988, | |
| "grad_norm": 0.6536304616997746, | |
| "learning_rate": 3.996103194004467e-05, | |
| "loss": 0.5889, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.5913924050632912, | |
| "grad_norm": 0.770032852859139, | |
| "learning_rate": 3.995741016302441e-05, | |
| "loss": 0.5813, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.5954430379746836, | |
| "grad_norm": 0.6242371711219942, | |
| "learning_rate": 3.9953627651766364e-05, | |
| "loss": 0.6197, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.599493670886076, | |
| "grad_norm": 0.7500252297814209, | |
| "learning_rate": 3.9949684436734325e-05, | |
| "loss": 0.6094, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.6035443037974684, | |
| "grad_norm": 0.6493090127303927, | |
| "learning_rate": 3.994558054968643e-05, | |
| "loss": 0.6057, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.6075949367088608, | |
| "grad_norm": 0.6453297509560996, | |
| "learning_rate": 3.994131602367481e-05, | |
| "loss": 0.6181, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.6116455696202532, | |
| "grad_norm": 0.5307546520149223, | |
| "learning_rate": 3.9936890893045376e-05, | |
| "loss": 0.5879, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.6156962025316456, | |
| "grad_norm": 0.7430731696636991, | |
| "learning_rate": 3.993230519343752e-05, | |
| "loss": 0.6191, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.619746835443038, | |
| "grad_norm": 0.5322885618585647, | |
| "learning_rate": 3.992755896178383e-05, | |
| "loss": 0.5736, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.6237974683544304, | |
| "grad_norm": 0.6884472258792911, | |
| "learning_rate": 3.992265223630981e-05, | |
| "loss": 0.5956, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.6278481012658228, | |
| "grad_norm": 0.5263316895719893, | |
| "learning_rate": 3.991758505653355e-05, | |
| "loss": 0.5843, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.6318987341772152, | |
| "grad_norm": 0.5908621809303349, | |
| "learning_rate": 3.991235746326543e-05, | |
| "loss": 0.6174, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.6359493670886076, | |
| "grad_norm": 0.5307657018899118, | |
| "learning_rate": 3.9906969498607745e-05, | |
| "loss": 0.6233, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.5581620129178141, | |
| "learning_rate": 3.990142120595444e-05, | |
| "loss": 0.6286, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.6440506329113924, | |
| "grad_norm": 0.6852498281327223, | |
| "learning_rate": 3.98957126299907e-05, | |
| "loss": 0.5937, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.6481012658227848, | |
| "grad_norm": 0.4888967534305476, | |
| "learning_rate": 3.9889843816692596e-05, | |
| "loss": 0.6164, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.6521518987341772, | |
| "grad_norm": 0.6899908678833494, | |
| "learning_rate": 3.9883814813326766e-05, | |
| "loss": 0.607, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.6562025316455696, | |
| "grad_norm": 0.7245805460275944, | |
| "learning_rate": 3.9877625668449956e-05, | |
| "loss": 0.6324, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.660253164556962, | |
| "grad_norm": 0.5788193119252569, | |
| "learning_rate": 3.98712764319087e-05, | |
| "loss": 0.6169, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.6643037974683544, | |
| "grad_norm": 0.48773449638781474, | |
| "learning_rate": 3.9864767154838864e-05, | |
| "loss": 0.5661, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.6683544303797468, | |
| "grad_norm": 0.5580113881533004, | |
| "learning_rate": 3.9858097889665277e-05, | |
| "loss": 0.6074, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.6724050632911392, | |
| "grad_norm": 0.5349589030169938, | |
| "learning_rate": 3.985126869010129e-05, | |
| "loss": 0.6152, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.6764556962025317, | |
| "grad_norm": 0.49175258866167015, | |
| "learning_rate": 3.984427961114833e-05, | |
| "loss": 0.5573, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.6805063291139241, | |
| "grad_norm": 0.5079252854704773, | |
| "learning_rate": 3.9837130709095475e-05, | |
| "loss": 0.5933, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.6845569620253165, | |
| "grad_norm": 0.498785004422796, | |
| "learning_rate": 3.982982204151901e-05, | |
| "loss": 0.622, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.6886075949367089, | |
| "grad_norm": 0.4755082809002897, | |
| "learning_rate": 3.982235366728193e-05, | |
| "loss": 0.6062, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.6926582278481013, | |
| "grad_norm": 0.5348528734662116, | |
| "learning_rate": 3.9814725646533505e-05, | |
| "loss": 0.6117, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.6967088607594937, | |
| "grad_norm": 0.5547833146001612, | |
| "learning_rate": 3.9806938040708746e-05, | |
| "loss": 0.6146, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.7007594936708861, | |
| "grad_norm": 0.5885419275139901, | |
| "learning_rate": 3.9798990912527976e-05, | |
| "loss": 0.5892, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.7048101265822785, | |
| "grad_norm": 0.4995308120697694, | |
| "learning_rate": 3.979088432599627e-05, | |
| "loss": 0.6069, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.7088607594936709, | |
| "grad_norm": 0.5491403011308585, | |
| "learning_rate": 3.9782618346402964e-05, | |
| "loss": 0.632, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.7129113924050633, | |
| "grad_norm": 0.5286876157633286, | |
| "learning_rate": 3.977419304032111e-05, | |
| "loss": 0.5971, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.7169620253164557, | |
| "grad_norm": 0.5345013170992413, | |
| "learning_rate": 3.976560847560697e-05, | |
| "loss": 0.6115, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.7210126582278481, | |
| "grad_norm": 0.5568082783726575, | |
| "learning_rate": 3.9756864721399456e-05, | |
| "loss": 0.6034, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.7250632911392405, | |
| "grad_norm": 0.5254289375831895, | |
| "learning_rate": 3.974796184811956e-05, | |
| "loss": 0.5741, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.7291139240506329, | |
| "grad_norm": 0.5545690505018157, | |
| "learning_rate": 3.973889992746979e-05, | |
| "loss": 0.581, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.7331645569620253, | |
| "grad_norm": 0.5051485704407143, | |
| "learning_rate": 3.972967903243361e-05, | |
| "loss": 0.59, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.7372151898734177, | |
| "grad_norm": 0.5277848667754391, | |
| "learning_rate": 3.972029923727486e-05, | |
| "loss": 0.6101, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.7412658227848101, | |
| "grad_norm": 0.5084676229680698, | |
| "learning_rate": 3.971076061753709e-05, | |
| "loss": 0.6131, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.7453164556962025, | |
| "grad_norm": 0.60984490654569, | |
| "learning_rate": 3.9701063250043066e-05, | |
| "loss": 0.6105, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.7493670886075949, | |
| "grad_norm": 0.5591147215011563, | |
| "learning_rate": 3.969120721289402e-05, | |
| "loss": 0.6062, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.7534177215189873, | |
| "grad_norm": 0.5010226640977942, | |
| "learning_rate": 3.9681192585469146e-05, | |
| "loss": 0.5988, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.7574683544303797, | |
| "grad_norm": 0.48361277255168245, | |
| "learning_rate": 3.9671019448424865e-05, | |
| "loss": 0.5968, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.7615189873417721, | |
| "grad_norm": 0.5186851571985629, | |
| "learning_rate": 3.966068788369422e-05, | |
| "loss": 0.6003, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.7655696202531646, | |
| "grad_norm": 0.5478305860906268, | |
| "learning_rate": 3.965019797448622e-05, | |
| "loss": 0.6009, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.769620253164557, | |
| "grad_norm": 0.4956522694001798, | |
| "learning_rate": 3.963954980528515e-05, | |
| "loss": 0.6016, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.7736708860759494, | |
| "grad_norm": 0.6015328330076325, | |
| "learning_rate": 3.9628743461849905e-05, | |
| "loss": 0.5872, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.7777215189873418, | |
| "grad_norm": 0.584511705426499, | |
| "learning_rate": 3.961777903121329e-05, | |
| "loss": 0.5964, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.7817721518987342, | |
| "grad_norm": 0.574769932895746, | |
| "learning_rate": 3.960665660168131e-05, | |
| "loss": 0.6301, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.7858227848101266, | |
| "grad_norm": 0.7597426887703222, | |
| "learning_rate": 3.9595376262832485e-05, | |
| "loss": 0.6443, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.789873417721519, | |
| "grad_norm": 0.5802595370489321, | |
| "learning_rate": 3.9583938105517127e-05, | |
| "loss": 0.5915, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.7939240506329114, | |
| "grad_norm": 0.6741671606982922, | |
| "learning_rate": 3.957234222185657e-05, | |
| "loss": 0.6108, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.7979746835443038, | |
| "grad_norm": 0.6779656992456008, | |
| "learning_rate": 3.9560588705242474e-05, | |
| "loss": 0.6172, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.8020253164556962, | |
| "grad_norm": 0.46610432599374396, | |
| "learning_rate": 3.954867765033605e-05, | |
| "loss": 0.602, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.8060759493670886, | |
| "grad_norm": 0.5860503271918165, | |
| "learning_rate": 3.953660915306728e-05, | |
| "loss": 0.582, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.810126582278481, | |
| "grad_norm": 0.47741833706033293, | |
| "learning_rate": 3.952438331063419e-05, | |
| "loss": 0.5862, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.8141772151898734, | |
| "grad_norm": 0.5916555935168274, | |
| "learning_rate": 3.951200022150205e-05, | |
| "loss": 0.6036, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.8182278481012658, | |
| "grad_norm": 0.5030102582206382, | |
| "learning_rate": 3.949945998540253e-05, | |
| "loss": 0.5816, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.8222784810126582, | |
| "grad_norm": 0.5102149560705204, | |
| "learning_rate": 3.9486762703332993e-05, | |
| "loss": 0.5922, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.8263291139240506, | |
| "grad_norm": 0.5148338451792978, | |
| "learning_rate": 3.947390847755559e-05, | |
| "loss": 0.5757, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.830379746835443, | |
| "grad_norm": 0.46217041373978324, | |
| "learning_rate": 3.946089741159648e-05, | |
| "loss": 0.5835, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.8344303797468354, | |
| "grad_norm": 0.4976071018127545, | |
| "learning_rate": 3.944772961024501e-05, | |
| "loss": 0.5806, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.8384810126582278, | |
| "grad_norm": 0.4732137221621373, | |
| "learning_rate": 3.943440517955285e-05, | |
| "loss": 0.5848, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.8425316455696202, | |
| "grad_norm": 0.5081718159431623, | |
| "learning_rate": 3.9420924226833126e-05, | |
| "loss": 0.5901, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.8465822784810126, | |
| "grad_norm": 0.6346184408951029, | |
| "learning_rate": 3.9407286860659566e-05, | |
| "loss": 0.6291, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.850632911392405, | |
| "grad_norm": 0.5018102279464927, | |
| "learning_rate": 3.9393493190865657e-05, | |
| "loss": 0.6144, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.8546835443037974, | |
| "grad_norm": 0.501579611506324, | |
| "learning_rate": 3.937954332854371e-05, | |
| "loss": 0.5979, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.8587341772151899, | |
| "grad_norm": 0.5629385199921488, | |
| "learning_rate": 3.9365437386044016e-05, | |
| "loss": 0.6334, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.8627848101265823, | |
| "grad_norm": 0.58033578996432, | |
| "learning_rate": 3.935117547697387e-05, | |
| "loss": 0.5984, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.8668354430379747, | |
| "grad_norm": 0.44027330863922365, | |
| "learning_rate": 3.933675771619675e-05, | |
| "loss": 0.5732, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.8708860759493671, | |
| "grad_norm": 0.594747973179513, | |
| "learning_rate": 3.932218421983131e-05, | |
| "loss": 0.5971, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.8749367088607595, | |
| "grad_norm": 0.4853521947125498, | |
| "learning_rate": 3.9307455105250484e-05, | |
| "loss": 0.6289, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.8789873417721519, | |
| "grad_norm": 0.44834791621734155, | |
| "learning_rate": 3.929257049108054e-05, | |
| "loss": 0.585, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.8830379746835443, | |
| "grad_norm": 0.4589296718851956, | |
| "learning_rate": 3.927753049720011e-05, | |
| "loss": 0.5856, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.8870886075949367, | |
| "grad_norm": 0.5304748376249275, | |
| "learning_rate": 3.9262335244739234e-05, | |
| "loss": 0.6053, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.8911392405063291, | |
| "grad_norm": 0.465515468585629, | |
| "learning_rate": 3.92469848560784e-05, | |
| "loss": 0.5771, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.8951898734177215, | |
| "grad_norm": 0.5187602764975598, | |
| "learning_rate": 3.923147945484751e-05, | |
| "loss": 0.6214, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.8992405063291139, | |
| "grad_norm": 0.5157609367399433, | |
| "learning_rate": 3.9215819165924956e-05, | |
| "loss": 0.5738, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.9032911392405063, | |
| "grad_norm": 0.4947264924900894, | |
| "learning_rate": 3.920000411543654e-05, | |
| "loss": 0.6205, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.9073417721518987, | |
| "grad_norm": 0.5657917813269095, | |
| "learning_rate": 3.9184034430754495e-05, | |
| "loss": 0.5876, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.9113924050632911, | |
| "grad_norm": 0.5197020692561348, | |
| "learning_rate": 3.916791024049648e-05, | |
| "loss": 0.5653, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.9154430379746835, | |
| "grad_norm": 0.48195701413116193, | |
| "learning_rate": 3.91516316745245e-05, | |
| "loss": 0.5868, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.9194936708860759, | |
| "grad_norm": 0.5024574769925578, | |
| "learning_rate": 3.913519886394389e-05, | |
| "loss": 0.5717, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.9235443037974683, | |
| "grad_norm": 0.4567493541715026, | |
| "learning_rate": 3.911861194110225e-05, | |
| "loss": 0.5657, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.9275949367088607, | |
| "grad_norm": 0.5450795603433926, | |
| "learning_rate": 3.910187103958837e-05, | |
| "loss": 0.6141, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.9316455696202531, | |
| "grad_norm": 0.45535282712309183, | |
| "learning_rate": 3.908497629423117e-05, | |
| "loss": 0.5804, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.9356962025316455, | |
| "grad_norm": 0.49484589406920376, | |
| "learning_rate": 3.9067927841098614e-05, | |
| "loss": 0.5788, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.9397468354430379, | |
| "grad_norm": 0.554836307601814, | |
| "learning_rate": 3.9050725817496594e-05, | |
| "loss": 0.5863, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.9437974683544303, | |
| "grad_norm": 0.4213365910438387, | |
| "learning_rate": 3.9033370361967844e-05, | |
| "loss": 0.5718, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.9478481012658228, | |
| "grad_norm": 0.5864085685294956, | |
| "learning_rate": 3.901586161429081e-05, | |
| "loss": 0.6015, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.9518987341772152, | |
| "grad_norm": 0.45369661093829344, | |
| "learning_rate": 3.8998199715478545e-05, | |
| "loss": 0.5977, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.9559493670886076, | |
| "grad_norm": 0.5420358730818624, | |
| "learning_rate": 3.8980384807777564e-05, | |
| "loss": 0.6019, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.4556418754035017, | |
| "learning_rate": 3.896241703466667e-05, | |
| "loss": 0.574, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.9640506329113924, | |
| "grad_norm": 0.5059351259331573, | |
| "learning_rate": 3.894429654085585e-05, | |
| "loss": 0.6248, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.9681012658227848, | |
| "grad_norm": 0.44513647511365634, | |
| "learning_rate": 3.892602347228505e-05, | |
| "loss": 0.6076, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.9721518987341772, | |
| "grad_norm": 0.4854613196415651, | |
| "learning_rate": 3.890759797612307e-05, | |
| "loss": 0.5855, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.9762025316455696, | |
| "grad_norm": 0.42021487057433615, | |
| "learning_rate": 3.888902020076632e-05, | |
| "loss": 0.5791, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.980253164556962, | |
| "grad_norm": 0.4765056042972292, | |
| "learning_rate": 3.887029029583764e-05, | |
| "loss": 0.5641, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.9843037974683544, | |
| "grad_norm": 0.4677621916597309, | |
| "learning_rate": 3.8851408412185125e-05, | |
| "loss": 0.584, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.9883544303797468, | |
| "grad_norm": 0.4897677782905455, | |
| "learning_rate": 3.8832374701880855e-05, | |
| "loss": 0.5817, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.9924050632911392, | |
| "grad_norm": 0.54823939819082, | |
| "learning_rate": 3.881318931821972e-05, | |
| "loss": 0.5918, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.9964556962025316, | |
| "grad_norm": 0.46854387807150577, | |
| "learning_rate": 3.879385241571817e-05, | |
| "loss": 0.5586, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.0020253164556963, | |
| "grad_norm": 0.5633863557720329, | |
| "learning_rate": 3.8774364150112955e-05, | |
| "loss": 0.5219, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.0060759493670886, | |
| "grad_norm": 0.5188609809682287, | |
| "learning_rate": 3.8754724678359884e-05, | |
| "loss": 0.4431, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.010126582278481, | |
| "grad_norm": 0.5547705682261194, | |
| "learning_rate": 3.873493415863256e-05, | |
| "loss": 0.4446, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.0141772151898734, | |
| "grad_norm": 0.7445849040323272, | |
| "learning_rate": 3.871499275032111e-05, | |
| "loss": 0.4646, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.0182278481012659, | |
| "grad_norm": 0.714040220560507, | |
| "learning_rate": 3.869490061403091e-05, | |
| "loss": 0.4564, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.0222784810126582, | |
| "grad_norm": 0.75956906899657, | |
| "learning_rate": 3.867465791158124e-05, | |
| "loss": 0.447, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.0263291139240507, | |
| "grad_norm": 0.5331684410653749, | |
| "learning_rate": 3.865426480600407e-05, | |
| "loss": 0.4363, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.030379746835443, | |
| "grad_norm": 0.8099532384256963, | |
| "learning_rate": 3.863372146154264e-05, | |
| "loss": 0.4531, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.0344303797468355, | |
| "grad_norm": 0.768728346767181, | |
| "learning_rate": 3.861302804365024e-05, | |
| "loss": 0.4798, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.0384810126582278, | |
| "grad_norm": 0.7586718198939353, | |
| "learning_rate": 3.85921847189888e-05, | |
| "loss": 0.4393, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.0425316455696203, | |
| "grad_norm": 0.5074153304577635, | |
| "learning_rate": 3.85711916554276e-05, | |
| "loss": 0.4413, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.0465822784810126, | |
| "grad_norm": 0.8203666231165918, | |
| "learning_rate": 3.85500490220419e-05, | |
| "loss": 0.443, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.0506329113924051, | |
| "grad_norm": 0.5644041730740964, | |
| "learning_rate": 3.852875698911154e-05, | |
| "loss": 0.4438, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.0546835443037974, | |
| "grad_norm": 0.6421064685223954, | |
| "learning_rate": 3.850731572811963e-05, | |
| "loss": 0.4586, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.05873417721519, | |
| "grad_norm": 0.6920875272272764, | |
| "learning_rate": 3.848572541175116e-05, | |
| "loss": 0.4329, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.0627848101265822, | |
| "grad_norm": 0.5470370442846337, | |
| "learning_rate": 3.846398621389154e-05, | |
| "loss": 0.4407, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.0668354430379747, | |
| "grad_norm": 0.6373835166651246, | |
| "learning_rate": 3.84420983096253e-05, | |
| "loss": 0.443, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.070886075949367, | |
| "grad_norm": 0.7033504585266319, | |
| "learning_rate": 3.8420061875234606e-05, | |
| "loss": 0.4427, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.0749367088607595, | |
| "grad_norm": 0.4856772162082972, | |
| "learning_rate": 3.839787708819787e-05, | |
| "loss": 0.4465, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.0789873417721518, | |
| "grad_norm": 0.7306608438137587, | |
| "learning_rate": 3.8375544127188325e-05, | |
| "loss": 0.4424, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.0830379746835443, | |
| "grad_norm": 0.5840008775376311, | |
| "learning_rate": 3.8353063172072564e-05, | |
| "loss": 0.44, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.0870886075949366, | |
| "grad_norm": 0.5263219910705594, | |
| "learning_rate": 3.8330434403909105e-05, | |
| "loss": 0.4573, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.0911392405063292, | |
| "grad_norm": 0.6172437077847818, | |
| "learning_rate": 3.8307658004946934e-05, | |
| "loss": 0.4459, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.0951898734177214, | |
| "grad_norm": 0.48889895571295977, | |
| "learning_rate": 3.8284734158624046e-05, | |
| "loss": 0.4604, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.099240506329114, | |
| "grad_norm": 0.5352382594845302, | |
| "learning_rate": 3.826166304956594e-05, | |
| "loss": 0.4537, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.1032911392405063, | |
| "grad_norm": 0.6234904958325614, | |
| "learning_rate": 3.8238444863584164e-05, | |
| "loss": 0.4307, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.1073417721518988, | |
| "grad_norm": 0.45936000688763257, | |
| "learning_rate": 3.821507978767479e-05, | |
| "loss": 0.4453, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.111392405063291, | |
| "grad_norm": 0.6956214833856849, | |
| "learning_rate": 3.819156801001693e-05, | |
| "loss": 0.4577, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.1154430379746836, | |
| "grad_norm": 0.5619224841540308, | |
| "learning_rate": 3.816790971997121e-05, | |
| "loss": 0.4453, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.1194936708860759, | |
| "grad_norm": 0.4564171928605993, | |
| "learning_rate": 3.8144105108078246e-05, | |
| "loss": 0.4287, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.1235443037974684, | |
| "grad_norm": 0.502058528731428, | |
| "learning_rate": 3.81201543660571e-05, | |
| "loss": 0.4099, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.1275949367088607, | |
| "grad_norm": 0.4310338912861414, | |
| "learning_rate": 3.809605768680377e-05, | |
| "loss": 0.4334, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.1316455696202532, | |
| "grad_norm": 0.49592284721689966, | |
| "learning_rate": 3.807181526438958e-05, | |
| "loss": 0.4435, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.1356962025316455, | |
| "grad_norm": 0.474191544758235, | |
| "learning_rate": 3.8047427294059697e-05, | |
| "loss": 0.4626, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.139746835443038, | |
| "grad_norm": 0.4478609087781942, | |
| "learning_rate": 3.802289397223145e-05, | |
| "loss": 0.4482, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.1437974683544303, | |
| "grad_norm": 0.4966344590039205, | |
| "learning_rate": 3.7998215496492854e-05, | |
| "loss": 0.4534, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.1478481012658228, | |
| "grad_norm": 0.502151197830293, | |
| "learning_rate": 3.797339206560096e-05, | |
| "loss": 0.4377, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.1518987341772151, | |
| "grad_norm": 0.5390038357940732, | |
| "learning_rate": 3.794842387948027e-05, | |
| "loss": 0.4536, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.1559493670886076, | |
| "grad_norm": 0.48948266370451937, | |
| "learning_rate": 3.7923311139221114e-05, | |
| "loss": 0.4254, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "grad_norm": 0.5793884819174924, | |
| "learning_rate": 3.7898054047078054e-05, | |
| "loss": 0.4661, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.1640506329113924, | |
| "grad_norm": 0.5630897459045852, | |
| "learning_rate": 3.787265280646825e-05, | |
| "loss": 0.4675, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.1681012658227847, | |
| "grad_norm": 0.49622049641019533, | |
| "learning_rate": 3.7847107621969786e-05, | |
| "loss": 0.4491, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.1721518987341772, | |
| "grad_norm": 0.5108146999686072, | |
| "learning_rate": 3.7821418699320064e-05, | |
| "loss": 0.4332, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.1762025316455695, | |
| "grad_norm": 0.5825240746219692, | |
| "learning_rate": 3.7795586245414145e-05, | |
| "loss": 0.4564, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.180253164556962, | |
| "grad_norm": 0.48447503726621205, | |
| "learning_rate": 3.776961046830306e-05, | |
| "loss": 0.467, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.1843037974683543, | |
| "grad_norm": 0.5193482826349367, | |
| "learning_rate": 3.774349157719215e-05, | |
| "loss": 0.4557, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.1883544303797469, | |
| "grad_norm": 0.4853480064064051, | |
| "learning_rate": 3.7717229782439365e-05, | |
| "loss": 0.4684, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.1924050632911392, | |
| "grad_norm": 0.5151775463593634, | |
| "learning_rate": 3.769082529555359e-05, | |
| "loss": 0.4309, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.1964556962025317, | |
| "grad_norm": 0.42011065690869204, | |
| "learning_rate": 3.766427832919294e-05, | |
| "loss": 0.4415, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.200506329113924, | |
| "grad_norm": 0.5144041584733956, | |
| "learning_rate": 3.7637589097163024e-05, | |
| "loss": 0.4429, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.2045569620253165, | |
| "grad_norm": 0.4636271891458896, | |
| "learning_rate": 3.761075781441526e-05, | |
| "loss": 0.4385, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.2086075949367088, | |
| "grad_norm": 0.50328932470835, | |
| "learning_rate": 3.75837846970451e-05, | |
| "loss": 0.432, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.2126582278481013, | |
| "grad_norm": 0.4656628257324611, | |
| "learning_rate": 3.755666996229032e-05, | |
| "loss": 0.4531, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.2167088607594936, | |
| "grad_norm": 0.44976410243264225, | |
| "learning_rate": 3.752941382852927e-05, | |
| "loss": 0.4463, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.220759493670886, | |
| "grad_norm": 0.5198217282151804, | |
| "learning_rate": 3.7502016515279115e-05, | |
| "loss": 0.4573, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.2248101265822784, | |
| "grad_norm": 0.4568922256249722, | |
| "learning_rate": 3.7474478243194043e-05, | |
| "loss": 0.4603, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.228860759493671, | |
| "grad_norm": 0.5631822047739654, | |
| "learning_rate": 3.744679923406351e-05, | |
| "loss": 0.4388, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.2329113924050632, | |
| "grad_norm": 0.44137016357884423, | |
| "learning_rate": 3.741897971081043e-05, | |
| "loss": 0.4807, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.2369620253164557, | |
| "grad_norm": 0.5229933917790297, | |
| "learning_rate": 3.739101989748946e-05, | |
| "loss": 0.4381, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.241012658227848, | |
| "grad_norm": 0.47522144522921916, | |
| "learning_rate": 3.7362920019285066e-05, | |
| "loss": 0.4568, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.2450632911392405, | |
| "grad_norm": 0.5124681465336983, | |
| "learning_rate": 3.73346803025098e-05, | |
| "loss": 0.433, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.2491139240506328, | |
| "grad_norm": 0.4400194876893733, | |
| "learning_rate": 3.730630097460247e-05, | |
| "loss": 0.4575, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.2531645569620253, | |
| "grad_norm": 0.511099663546347, | |
| "learning_rate": 3.727778226412628e-05, | |
| "loss": 0.4463, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.2572151898734178, | |
| "grad_norm": 0.4794562174128532, | |
| "learning_rate": 3.7249124400767006e-05, | |
| "loss": 0.4628, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.2612658227848101, | |
| "grad_norm": 0.5003092145524097, | |
| "learning_rate": 3.722032761533114e-05, | |
| "loss": 0.4499, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.2653164556962024, | |
| "grad_norm": 0.5559576602021349, | |
| "learning_rate": 3.719139213974403e-05, | |
| "loss": 0.4506, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.269367088607595, | |
| "grad_norm": 0.5379625062679073, | |
| "learning_rate": 3.7162318207048006e-05, | |
| "loss": 0.4578, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.2734177215189875, | |
| "grad_norm": 0.5268942142243712, | |
| "learning_rate": 3.713310605140055e-05, | |
| "loss": 0.4596, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.2774683544303798, | |
| "grad_norm": 0.6285710810473106, | |
| "learning_rate": 3.710375590807233e-05, | |
| "loss": 0.4648, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.281518987341772, | |
| "grad_norm": 0.5072507376263062, | |
| "learning_rate": 3.7074268013445365e-05, | |
| "loss": 0.4616, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.2855696202531646, | |
| "grad_norm": 0.6338686214760938, | |
| "learning_rate": 3.7044642605011114e-05, | |
| "loss": 0.4542, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.289620253164557, | |
| "grad_norm": 0.6082728042768922, | |
| "learning_rate": 3.701487992136854e-05, | |
| "loss": 0.4469, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.2936708860759494, | |
| "grad_norm": 0.47337758511090083, | |
| "learning_rate": 3.69849802022222e-05, | |
| "loss": 0.446, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.2977215189873417, | |
| "grad_norm": 0.5787152705955461, | |
| "learning_rate": 3.6954943688380334e-05, | |
| "loss": 0.4547, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.3017721518987342, | |
| "grad_norm": 0.4414592539103977, | |
| "learning_rate": 3.692477062175289e-05, | |
| "loss": 0.4574, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.3058227848101267, | |
| "grad_norm": 0.5300335852391126, | |
| "learning_rate": 3.689446124534958e-05, | |
| "loss": 0.4682, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.309873417721519, | |
| "grad_norm": 0.4550622068242876, | |
| "learning_rate": 3.686401580327799e-05, | |
| "loss": 0.4563, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.3139240506329113, | |
| "grad_norm": 0.5786825903558119, | |
| "learning_rate": 3.683343454074149e-05, | |
| "loss": 0.4416, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.3179746835443038, | |
| "grad_norm": 0.5270953149436474, | |
| "learning_rate": 3.6802717704037386e-05, | |
| "loss": 0.4439, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.3220253164556963, | |
| "grad_norm": 0.486133170243594, | |
| "learning_rate": 3.6771865540554855e-05, | |
| "loss": 0.45, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.3260759493670886, | |
| "grad_norm": 0.5122278579874243, | |
| "learning_rate": 3.674087829877297e-05, | |
| "loss": 0.4508, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.330126582278481, | |
| "grad_norm": 0.5008354188235864, | |
| "learning_rate": 3.6709756228258735e-05, | |
| "loss": 0.431, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.3341772151898734, | |
| "grad_norm": 0.4593932528816152, | |
| "learning_rate": 3.667849957966501e-05, | |
| "loss": 0.4335, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.338227848101266, | |
| "grad_norm": 0.4311811629427631, | |
| "learning_rate": 3.6647108604728546e-05, | |
| "loss": 0.4428, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.3422784810126582, | |
| "grad_norm": 0.4697963768853903, | |
| "learning_rate": 3.661558355626795e-05, | |
| "loss": 0.4599, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.3463291139240505, | |
| "grad_norm": 0.47566450638451785, | |
| "learning_rate": 3.658392468818163e-05, | |
| "loss": 0.4615, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.350379746835443, | |
| "grad_norm": 0.43231101368824704, | |
| "learning_rate": 3.655213225544574e-05, | |
| "loss": 0.4619, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.3544303797468356, | |
| "grad_norm": 0.5406729548176163, | |
| "learning_rate": 3.652020651411218e-05, | |
| "loss": 0.4355, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.3584810126582278, | |
| "grad_norm": 0.49014672381817576, | |
| "learning_rate": 3.6488147721306474e-05, | |
| "loss": 0.4629, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.3625316455696201, | |
| "grad_norm": 0.573910962527949, | |
| "learning_rate": 3.645595613522574e-05, | |
| "loss": 0.4599, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.3665822784810127, | |
| "grad_norm": 0.4852929494727567, | |
| "learning_rate": 3.642363201513657e-05, | |
| "loss": 0.4543, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.3706329113924052, | |
| "grad_norm": 0.49502879101106434, | |
| "learning_rate": 3.6391175621373006e-05, | |
| "loss": 0.4629, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.3746835443037975, | |
| "grad_norm": 0.4714402271438486, | |
| "learning_rate": 3.6358587215334355e-05, | |
| "loss": 0.4251, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.3787341772151898, | |
| "grad_norm": 0.4800007691991582, | |
| "learning_rate": 3.632586705948318e-05, | |
| "loss": 0.4596, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.3827848101265823, | |
| "grad_norm": 0.4629135179733209, | |
| "learning_rate": 3.629301541734311e-05, | |
| "loss": 0.4623, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.3868354430379748, | |
| "grad_norm": 0.45008604944508745, | |
| "learning_rate": 3.626003255349676e-05, | |
| "loss": 0.4714, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.390886075949367, | |
| "grad_norm": 0.4850469389371198, | |
| "learning_rate": 3.622691873358357e-05, | |
| "loss": 0.4497, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.3949367088607594, | |
| "grad_norm": 0.5692926276664114, | |
| "learning_rate": 3.61936742242977e-05, | |
| "loss": 0.4678, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.398987341772152, | |
| "grad_norm": 0.5939729042005103, | |
| "learning_rate": 3.6160299293385864e-05, | |
| "loss": 0.4489, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.4030379746835444, | |
| "grad_norm": 0.49412739463752553, | |
| "learning_rate": 3.612679420964516e-05, | |
| "loss": 0.4702, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.4070886075949367, | |
| "grad_norm": 0.5306905007129946, | |
| "learning_rate": 3.609315924292092e-05, | |
| "loss": 0.4542, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.411139240506329, | |
| "grad_norm": 0.4707680382867705, | |
| "learning_rate": 3.6059394664104554e-05, | |
| "loss": 0.4585, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.4151898734177215, | |
| "grad_norm": 0.4877057568878019, | |
| "learning_rate": 3.602550074513133e-05, | |
| "loss": 0.4622, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.419240506329114, | |
| "grad_norm": 0.4762358792917313, | |
| "learning_rate": 3.599147775897822e-05, | |
| "loss": 0.4602, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.4232911392405063, | |
| "grad_norm": 0.5109987134418685, | |
| "learning_rate": 3.595732597966167e-05, | |
| "loss": 0.4535, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.4273417721518986, | |
| "grad_norm": 0.491271194085416, | |
| "learning_rate": 3.592304568223542e-05, | |
| "loss": 0.4429, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.4313924050632911, | |
| "grad_norm": 0.4967172590213714, | |
| "learning_rate": 3.588863714278826e-05, | |
| "loss": 0.4437, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.4354430379746836, | |
| "grad_norm": 0.5327104559400961, | |
| "learning_rate": 3.585410063844186e-05, | |
| "loss": 0.4652, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.439493670886076, | |
| "grad_norm": 0.49324808623388516, | |
| "learning_rate": 3.581943644734846e-05, | |
| "loss": 0.4644, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.4435443037974682, | |
| "grad_norm": 0.6047695627817109, | |
| "learning_rate": 3.578464484868869e-05, | |
| "loss": 0.4664, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.4475949367088607, | |
| "grad_norm": 0.42114739613079893, | |
| "learning_rate": 3.5749726122669316e-05, | |
| "loss": 0.435, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.4516455696202533, | |
| "grad_norm": 0.506209229858558, | |
| "learning_rate": 3.5714680550520943e-05, | |
| "loss": 0.4432, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.4556962025316456, | |
| "grad_norm": 0.5238171396310222, | |
| "learning_rate": 3.5679508414495794e-05, | |
| "loss": 0.4602, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.4597468354430378, | |
| "grad_norm": 0.49871844135254173, | |
| "learning_rate": 3.564420999786543e-05, | |
| "loss": 0.4653, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.4637974683544304, | |
| "grad_norm": 0.5309438670903763, | |
| "learning_rate": 3.560878558491842e-05, | |
| "loss": 0.4479, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.4678481012658229, | |
| "grad_norm": 0.4539900027066011, | |
| "learning_rate": 3.5573235460958145e-05, | |
| "loss": 0.4593, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.4718987341772152, | |
| "grad_norm": 0.4767470613263526, | |
| "learning_rate": 3.553755991230039e-05, | |
| "loss": 0.4595, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.4759493670886075, | |
| "grad_norm": 0.4269770880856819, | |
| "learning_rate": 3.5501759226271144e-05, | |
| "loss": 0.4601, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "grad_norm": 0.45107684957366373, | |
| "learning_rate": 3.546583369120419e-05, | |
| "loss": 0.4334, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.4840506329113925, | |
| "grad_norm": 0.4636441270482069, | |
| "learning_rate": 3.5429783596438864e-05, | |
| "loss": 0.4537, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.4881012658227848, | |
| "grad_norm": 0.43440612282715996, | |
| "learning_rate": 3.539360923231766e-05, | |
| "loss": 0.449, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.492151898734177, | |
| "grad_norm": 0.4517059782322685, | |
| "learning_rate": 3.535731089018394e-05, | |
| "loss": 0.454, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.4962025316455696, | |
| "grad_norm": 0.4429472843587689, | |
| "learning_rate": 3.532088886237956e-05, | |
| "loss": 0.452, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.5002531645569621, | |
| "grad_norm": 0.46940435265045627, | |
| "learning_rate": 3.528434344224253e-05, | |
| "loss": 0.4519, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.5043037974683544, | |
| "grad_norm": 0.4623462098056322, | |
| "learning_rate": 3.524767492410464e-05, | |
| "loss": 0.4598, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.5083544303797467, | |
| "grad_norm": 0.4765572294797722, | |
| "learning_rate": 3.521088360328908e-05, | |
| "loss": 0.4691, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.5124050632911392, | |
| "grad_norm": 0.4680130805601337, | |
| "learning_rate": 3.517396977610811e-05, | |
| "loss": 0.4609, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.5164556962025317, | |
| "grad_norm": 0.5114083808862041, | |
| "learning_rate": 3.5136933739860595e-05, | |
| "loss": 0.4531, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.520506329113924, | |
| "grad_norm": 0.4756799865874779, | |
| "learning_rate": 3.509977579282971e-05, | |
| "loss": 0.4824, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.5245569620253163, | |
| "grad_norm": 0.5243241690660272, | |
| "learning_rate": 3.5062496234280424e-05, | |
| "loss": 0.4598, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.5286075949367088, | |
| "grad_norm": 0.49529062534122525, | |
| "learning_rate": 3.502509536445719e-05, | |
| "loss": 0.4636, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.5326582278481014, | |
| "grad_norm": 0.4439672604767949, | |
| "learning_rate": 3.498757348458147e-05, | |
| "loss": 0.4542, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.5367088607594936, | |
| "grad_norm": 0.506578381776089, | |
| "learning_rate": 3.4949930896849324e-05, | |
| "loss": 0.4648, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.540759493670886, | |
| "grad_norm": 0.4295681418092351, | |
| "learning_rate": 3.491216790442899e-05, | |
| "loss": 0.4382, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.5448101265822785, | |
| "grad_norm": 0.4663286363124973, | |
| "learning_rate": 3.487428481145839e-05, | |
| "loss": 0.4628, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.548860759493671, | |
| "grad_norm": 0.44597237772341775, | |
| "learning_rate": 3.483628192304278e-05, | |
| "loss": 0.4665, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.5529113924050633, | |
| "grad_norm": 0.4855517491083876, | |
| "learning_rate": 3.479815954525219e-05, | |
| "loss": 0.4562, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.5569620253164556, | |
| "grad_norm": 0.4384263959094648, | |
| "learning_rate": 3.475991798511899e-05, | |
| "loss": 0.4526, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.561012658227848, | |
| "grad_norm": 0.41392434443482745, | |
| "learning_rate": 3.4721557550635464e-05, | |
| "loss": 0.4676, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.5650632911392406, | |
| "grad_norm": 0.5195043906093884, | |
| "learning_rate": 3.468307855075128e-05, | |
| "loss": 0.4605, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.5691139240506329, | |
| "grad_norm": 0.41103984129677196, | |
| "learning_rate": 3.4644481295371005e-05, | |
| "loss": 0.4453, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.5731645569620252, | |
| "grad_norm": 0.5209008372111822, | |
| "learning_rate": 3.460576609535163e-05, | |
| "loss": 0.4811, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.5772151898734177, | |
| "grad_norm": 0.4215880796155846, | |
| "learning_rate": 3.456693326250006e-05, | |
| "loss": 0.4547, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.5812658227848102, | |
| "grad_norm": 0.4653220020236176, | |
| "learning_rate": 3.452798310957058e-05, | |
| "loss": 0.4569, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.5853164556962025, | |
| "grad_norm": 0.4638887660948843, | |
| "learning_rate": 3.4488915950262386e-05, | |
| "loss": 0.4433, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.5893670886075948, | |
| "grad_norm": 0.43326948880282945, | |
| "learning_rate": 3.4449732099216985e-05, | |
| "loss": 0.4662, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.5934177215189873, | |
| "grad_norm": 0.4535814544804793, | |
| "learning_rate": 3.441043187201574e-05, | |
| "loss": 0.4539, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.5974683544303798, | |
| "grad_norm": 0.45924505641389524, | |
| "learning_rate": 3.437101558517728e-05, | |
| "loss": 0.4614, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.6015189873417721, | |
| "grad_norm": 0.45541308514596684, | |
| "learning_rate": 3.433148355615496e-05, | |
| "loss": 0.474, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.6055696202531644, | |
| "grad_norm": 0.4428179209505052, | |
| "learning_rate": 3.4291836103334294e-05, | |
| "loss": 0.4541, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.609620253164557, | |
| "grad_norm": 0.43482329180374724, | |
| "learning_rate": 3.425207354603043e-05, | |
| "loss": 0.4546, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.6136708860759494, | |
| "grad_norm": 0.44876333541280805, | |
| "learning_rate": 3.421219620448553e-05, | |
| "loss": 0.4596, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.6177215189873417, | |
| "grad_norm": 0.43019113540814125, | |
| "learning_rate": 3.417220439986623e-05, | |
| "loss": 0.4434, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.621772151898734, | |
| "grad_norm": 0.44856499894666524, | |
| "learning_rate": 3.4132098454261024e-05, | |
| "loss": 0.4435, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.6258227848101265, | |
| "grad_norm": 0.43820750413743054, | |
| "learning_rate": 3.4091878690677676e-05, | |
| "loss": 0.4503, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.629873417721519, | |
| "grad_norm": 0.47391853497725966, | |
| "learning_rate": 3.405154543304065e-05, | |
| "loss": 0.4338, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.6339240506329114, | |
| "grad_norm": 0.43003980024059846, | |
| "learning_rate": 3.401109900618843e-05, | |
| "loss": 0.4648, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.6379746835443036, | |
| "grad_norm": 0.4874708733022434, | |
| "learning_rate": 3.3970539735870996e-05, | |
| "loss": 0.4488, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.6420253164556962, | |
| "grad_norm": 0.43812083582408085, | |
| "learning_rate": 3.392986794874714e-05, | |
| "loss": 0.4679, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.6460759493670887, | |
| "grad_norm": 0.43930099331225564, | |
| "learning_rate": 3.388908397238184e-05, | |
| "loss": 0.452, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.650126582278481, | |
| "grad_norm": 0.47214406664877, | |
| "learning_rate": 3.384818813524362e-05, | |
| "loss": 0.4468, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.6541772151898733, | |
| "grad_norm": 0.43919344919148295, | |
| "learning_rate": 3.380718076670195e-05, | |
| "loss": 0.4496, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.6582278481012658, | |
| "grad_norm": 0.44603543789737365, | |
| "learning_rate": 3.376606219702454e-05, | |
| "loss": 0.459, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.6622784810126583, | |
| "grad_norm": 0.4404855779437762, | |
| "learning_rate": 3.372483275737468e-05, | |
| "loss": 0.4459, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.6663291139240506, | |
| "grad_norm": 0.42436801220420267, | |
| "learning_rate": 3.368349277980861e-05, | |
| "loss": 0.4626, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.6703797468354429, | |
| "grad_norm": 0.44749319863306847, | |
| "learning_rate": 3.3642042597272844e-05, | |
| "loss": 0.4569, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.6744303797468354, | |
| "grad_norm": 0.4952571555558035, | |
| "learning_rate": 3.360048254360144e-05, | |
| "loss": 0.4491, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.678481012658228, | |
| "grad_norm": 0.46748168797632306, | |
| "learning_rate": 3.355881295351336e-05, | |
| "loss": 0.4655, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.6825316455696202, | |
| "grad_norm": 0.5727876468946981, | |
| "learning_rate": 3.351703416260975e-05, | |
| "loss": 0.465, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.6865822784810125, | |
| "grad_norm": 0.4347978718167872, | |
| "learning_rate": 3.347514650737126e-05, | |
| "loss": 0.4592, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.690632911392405, | |
| "grad_norm": 0.4435845503073326, | |
| "learning_rate": 3.3433150325155295e-05, | |
| "loss": 0.455, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.6946835443037975, | |
| "grad_norm": 0.4158224580776832, | |
| "learning_rate": 3.339104595419334e-05, | |
| "loss": 0.4472, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.6987341772151898, | |
| "grad_norm": 0.4478745541443723, | |
| "learning_rate": 3.3348833733588204e-05, | |
| "loss": 0.4494, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.7027848101265821, | |
| "grad_norm": 0.4129900649103557, | |
| "learning_rate": 3.3306514003311305e-05, | |
| "loss": 0.4415, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.7068354430379746, | |
| "grad_norm": 0.4831878934303196, | |
| "learning_rate": 3.326408710419996e-05, | |
| "loss": 0.4621, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.7108860759493671, | |
| "grad_norm": 0.42428741570828865, | |
| "learning_rate": 3.322155337795454e-05, | |
| "loss": 0.4583, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.7149367088607594, | |
| "grad_norm": 0.4570411194195891, | |
| "learning_rate": 3.317891316713587e-05, | |
| "loss": 0.4411, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.7189873417721517, | |
| "grad_norm": 0.42553195113640907, | |
| "learning_rate": 3.313616681516231e-05, | |
| "loss": 0.4595, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.7230379746835442, | |
| "grad_norm": 0.45539003711904885, | |
| "learning_rate": 3.309331466630713e-05, | |
| "loss": 0.4583, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.7270886075949368, | |
| "grad_norm": 0.4351079798004603, | |
| "learning_rate": 3.305035706569563e-05, | |
| "loss": 0.4576, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.7311392405063293, | |
| "grad_norm": 0.4219817705375046, | |
| "learning_rate": 3.3007294359302433e-05, | |
| "loss": 0.4557, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.7351898734177216, | |
| "grad_norm": 0.5272735002301088, | |
| "learning_rate": 3.296412689394864e-05, | |
| "loss": 0.4408, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.7392405063291139, | |
| "grad_norm": 0.4737049647975755, | |
| "learning_rate": 3.292085501729909e-05, | |
| "loss": 0.4589, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.7432911392405064, | |
| "grad_norm": 0.49473794488911804, | |
| "learning_rate": 3.2877479077859534e-05, | |
| "loss": 0.4439, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.747341772151899, | |
| "grad_norm": 0.4643682283016952, | |
| "learning_rate": 3.283399942497381e-05, | |
| "loss": 0.4704, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.7513924050632912, | |
| "grad_norm": 0.4357168056810496, | |
| "learning_rate": 3.279041640882108e-05, | |
| "loss": 0.4618, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.7554430379746835, | |
| "grad_norm": 0.4491691603154195, | |
| "learning_rate": 3.2746730380412964e-05, | |
| "loss": 0.4591, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.759493670886076, | |
| "grad_norm": 0.4681970573954517, | |
| "learning_rate": 3.2702941691590726e-05, | |
| "loss": 0.4456, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.7635443037974685, | |
| "grad_norm": 0.4394205977094379, | |
| "learning_rate": 3.265905069502244e-05, | |
| "loss": 0.4663, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.7675949367088608, | |
| "grad_norm": 0.43439412551988177, | |
| "learning_rate": 3.261505774420016e-05, | |
| "loss": 0.454, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.771645569620253, | |
| "grad_norm": 0.4477708968259312, | |
| "learning_rate": 3.257096319343707e-05, | |
| "loss": 0.4395, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.7756962025316456, | |
| "grad_norm": 0.43845935059967445, | |
| "learning_rate": 3.2526767397864614e-05, | |
| "loss": 0.4523, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.7797468354430381, | |
| "grad_norm": 0.446495679262483, | |
| "learning_rate": 3.248247071342966e-05, | |
| "loss": 0.4748, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.7837974683544304, | |
| "grad_norm": 0.4661263167468186, | |
| "learning_rate": 3.243807349689161e-05, | |
| "loss": 0.455, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.7878481012658227, | |
| "grad_norm": 0.42408841809510944, | |
| "learning_rate": 3.2393576105819544e-05, | |
| "loss": 0.4641, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.7918987341772152, | |
| "grad_norm": 0.45567877440852483, | |
| "learning_rate": 3.2348978898589333e-05, | |
| "loss": 0.464, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.7959493670886078, | |
| "grad_norm": 0.4468376555201501, | |
| "learning_rate": 3.230428223438075e-05, | |
| "loss": 0.4609, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "grad_norm": 0.4718183132512513, | |
| "learning_rate": 3.225948647317459e-05, | |
| "loss": 0.438, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.8040506329113923, | |
| "grad_norm": 0.42337369145963594, | |
| "learning_rate": 3.2214591975749745e-05, | |
| "loss": 0.4377, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.8081012658227849, | |
| "grad_norm": 0.47856531600688323, | |
| "learning_rate": 3.216959910368034e-05, | |
| "loss": 0.4579, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.8121518987341774, | |
| "grad_norm": 0.4360984135190318, | |
| "learning_rate": 3.212450821933277e-05, | |
| "loss": 0.4471, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 1.8162025316455697, | |
| "grad_norm": 0.4442290277389359, | |
| "learning_rate": 3.207931968586281e-05, | |
| "loss": 0.4444, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.820253164556962, | |
| "grad_norm": 0.4131124126315819, | |
| "learning_rate": 3.203403386721272e-05, | |
| "loss": 0.4647, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.8243037974683545, | |
| "grad_norm": 0.44953520073882774, | |
| "learning_rate": 3.1988651128108245e-05, | |
| "loss": 0.4685, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.828354430379747, | |
| "grad_norm": 0.40941125401912676, | |
| "learning_rate": 3.194317183405573e-05, | |
| "loss": 0.472, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.8324050632911393, | |
| "grad_norm": 0.4735092993184471, | |
| "learning_rate": 3.189759635133914e-05, | |
| "loss": 0.452, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.8364556962025316, | |
| "grad_norm": 0.4247818089797212, | |
| "learning_rate": 3.185192504701718e-05, | |
| "loss": 0.4585, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.840506329113924, | |
| "grad_norm": 0.48078805828342847, | |
| "learning_rate": 3.1806158288920234e-05, | |
| "loss": 0.4688, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.8445569620253166, | |
| "grad_norm": 0.44447642491894224, | |
| "learning_rate": 3.1760296445647477e-05, | |
| "loss": 0.465, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.848607594936709, | |
| "grad_norm": 0.4714152855182472, | |
| "learning_rate": 3.1714339886563896e-05, | |
| "loss": 0.4549, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.8526582278481012, | |
| "grad_norm": 0.45894251730168717, | |
| "learning_rate": 3.166828898179731e-05, | |
| "loss": 0.4558, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.8567088607594937, | |
| "grad_norm": 0.43950829711740313, | |
| "learning_rate": 3.162214410223536e-05, | |
| "loss": 0.4556, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.8607594936708862, | |
| "grad_norm": 0.4889459407202916, | |
| "learning_rate": 3.157590561952257e-05, | |
| "loss": 0.4405, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.8648101265822785, | |
| "grad_norm": 0.4393466663256847, | |
| "learning_rate": 3.152957390605732e-05, | |
| "loss": 0.44, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.8688607594936708, | |
| "grad_norm": 0.5086927007474396, | |
| "learning_rate": 3.148314933498886e-05, | |
| "loss": 0.454, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.8729113924050633, | |
| "grad_norm": 0.47419912012984017, | |
| "learning_rate": 3.143663228021431e-05, | |
| "loss": 0.4734, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.8769620253164558, | |
| "grad_norm": 0.4309235410855153, | |
| "learning_rate": 3.1390023116375624e-05, | |
| "loss": 0.4628, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.8810126582278481, | |
| "grad_norm": 0.4575207430224126, | |
| "learning_rate": 3.134332221885661e-05, | |
| "loss": 0.4539, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.8850632911392404, | |
| "grad_norm": 0.475994213750644, | |
| "learning_rate": 3.129652996377987e-05, | |
| "loss": 0.4588, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.889113924050633, | |
| "grad_norm": 0.46076457077095717, | |
| "learning_rate": 3.12496467280038e-05, | |
| "loss": 0.442, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.8931645569620255, | |
| "grad_norm": 0.497514459401866, | |
| "learning_rate": 3.120267288911952e-05, | |
| "loss": 0.4511, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.8972151898734178, | |
| "grad_norm": 0.42541323729098723, | |
| "learning_rate": 3.11556088254479e-05, | |
| "loss": 0.4593, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.90126582278481, | |
| "grad_norm": 0.4720109625021154, | |
| "learning_rate": 3.11084549160364e-05, | |
| "loss": 0.4606, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.9053164556962026, | |
| "grad_norm": 0.4446298839503674, | |
| "learning_rate": 3.106121154065615e-05, | |
| "loss": 0.4458, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.909367088607595, | |
| "grad_norm": 0.47428217959295255, | |
| "learning_rate": 3.1013879079798805e-05, | |
| "loss": 0.4617, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.9134177215189874, | |
| "grad_norm": 0.40940714090223096, | |
| "learning_rate": 3.096645791467348e-05, | |
| "loss": 0.4336, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.9174683544303797, | |
| "grad_norm": 0.5233991800769894, | |
| "learning_rate": 3.091894842720373e-05, | |
| "loss": 0.4497, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.9215189873417722, | |
| "grad_norm": 0.42471419218443895, | |
| "learning_rate": 3.0871351000024425e-05, | |
| "loss": 0.4607, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.9255696202531647, | |
| "grad_norm": 0.4966401604773226, | |
| "learning_rate": 3.0823666016478716e-05, | |
| "loss": 0.4868, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.929620253164557, | |
| "grad_norm": 0.4878366198257101, | |
| "learning_rate": 3.0775893860614896e-05, | |
| "loss": 0.4623, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.9336708860759493, | |
| "grad_norm": 0.4660042280347889, | |
| "learning_rate": 3.0728034917183336e-05, | |
| "loss": 0.4389, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.9377215189873418, | |
| "grad_norm": 0.4910785438015133, | |
| "learning_rate": 3.06800895716334e-05, | |
| "loss": 0.4576, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.9417721518987343, | |
| "grad_norm": 0.4448322951417005, | |
| "learning_rate": 3.063205821011029e-05, | |
| "loss": 0.4701, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.9458227848101266, | |
| "grad_norm": 0.46291023850748136, | |
| "learning_rate": 3.0583941219452016e-05, | |
| "loss": 0.4617, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.949873417721519, | |
| "grad_norm": 0.46636419055788425, | |
| "learning_rate": 3.053573898718618e-05, | |
| "loss": 0.4679, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.9539240506329114, | |
| "grad_norm": 0.4841778524316119, | |
| "learning_rate": 3.0487451901526956e-05, | |
| "loss": 0.4672, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.957974683544304, | |
| "grad_norm": 0.49723537633003567, | |
| "learning_rate": 3.0439080351371875e-05, | |
| "loss": 0.4583, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.9620253164556962, | |
| "grad_norm": 0.4599579440129499, | |
| "learning_rate": 3.0390624726298764e-05, | |
| "loss": 0.4393, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.9660759493670885, | |
| "grad_norm": 0.5177015884252069, | |
| "learning_rate": 3.034208541656255e-05, | |
| "loss": 0.4562, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.970126582278481, | |
| "grad_norm": 0.44692843897407725, | |
| "learning_rate": 3.029346281309218e-05, | |
| "loss": 0.4557, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.9741772151898735, | |
| "grad_norm": 0.44270875705277035, | |
| "learning_rate": 3.0244757307487415e-05, | |
| "loss": 0.4575, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.9782278481012658, | |
| "grad_norm": 0.49249455920515695, | |
| "learning_rate": 3.019596929201569e-05, | |
| "loss": 0.4499, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.9822784810126581, | |
| "grad_norm": 0.42727915940760763, | |
| "learning_rate": 3.0147099159608985e-05, | |
| "loss": 0.4418, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.9863291139240506, | |
| "grad_norm": 0.46514600813093215, | |
| "learning_rate": 3.0098147303860616e-05, | |
| "loss": 0.442, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.9903797468354432, | |
| "grad_norm": 0.42257263128823197, | |
| "learning_rate": 3.0049114119022117e-05, | |
| "loss": 0.4449, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.9944303797468355, | |
| "grad_norm": 0.4389879095470197, | |
| "learning_rate": 3.0000000000000004e-05, | |
| "loss": 0.4503, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.9984810126582278, | |
| "grad_norm": 0.4058092089418687, | |
| "learning_rate": 2.995080534235264e-05, | |
| "loss": 0.4477, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 2.0040506329113925, | |
| "grad_norm": 0.887807355398491, | |
| "learning_rate": 2.9901530542287044e-05, | |
| "loss": 0.2962, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 2.008101265822785, | |
| "grad_norm": 0.5435283196155324, | |
| "learning_rate": 2.9852175996655676e-05, | |
| "loss": 0.2562, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 2.012151898734177, | |
| "grad_norm": 1.2945345590490893, | |
| "learning_rate": 2.980274210295326e-05, | |
| "loss": 0.2893, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 2.0162025316455696, | |
| "grad_norm": 0.4997376817195279, | |
| "learning_rate": 2.9753229259313578e-05, | |
| "loss": 0.2928, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 2.020253164556962, | |
| "grad_norm": 0.5761779473394475, | |
| "learning_rate": 2.9703637864506274e-05, | |
| "loss": 0.2957, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 2.0243037974683546, | |
| "grad_norm": 0.5649309049483469, | |
| "learning_rate": 2.965396831793362e-05, | |
| "loss": 0.2835, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 2.0283544303797467, | |
| "grad_norm": 0.4895935777559413, | |
| "learning_rate": 2.9604221019627316e-05, | |
| "loss": 0.2741, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.0324050632911392, | |
| "grad_norm": 0.49822439691553094, | |
| "learning_rate": 2.955439637024526e-05, | |
| "loss": 0.2798, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 2.0364556962025318, | |
| "grad_norm": 0.5264037679747599, | |
| "learning_rate": 2.9504494771068334e-05, | |
| "loss": 0.2806, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 2.0405063291139243, | |
| "grad_norm": 0.5538597683460899, | |
| "learning_rate": 2.9454516623997156e-05, | |
| "loss": 0.2828, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 2.0445569620253163, | |
| "grad_norm": 0.4520760213597788, | |
| "learning_rate": 2.9404462331548847e-05, | |
| "loss": 0.2803, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 2.048607594936709, | |
| "grad_norm": 0.516746700536684, | |
| "learning_rate": 2.93543322968538e-05, | |
| "loss": 0.2893, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 2.0526582278481014, | |
| "grad_norm": 0.4831625416909708, | |
| "learning_rate": 2.9304126923652428e-05, | |
| "loss": 0.2727, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 2.056708860759494, | |
| "grad_norm": 0.495872060604196, | |
| "learning_rate": 2.9253846616291896e-05, | |
| "loss": 0.2598, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 2.060759493670886, | |
| "grad_norm": 0.49543171634415145, | |
| "learning_rate": 2.9203491779722896e-05, | |
| "loss": 0.2692, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 2.0648101265822785, | |
| "grad_norm": 0.5334201360289501, | |
| "learning_rate": 2.9153062819496357e-05, | |
| "loss": 0.2765, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 2.068860759493671, | |
| "grad_norm": 0.4580726546548506, | |
| "learning_rate": 2.9102560141760178e-05, | |
| "loss": 0.3007, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.0729113924050635, | |
| "grad_norm": 0.4907436560405742, | |
| "learning_rate": 2.9051984153256004e-05, | |
| "loss": 0.2896, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 2.0769620253164556, | |
| "grad_norm": 0.4891987363387425, | |
| "learning_rate": 2.900133526131588e-05, | |
| "loss": 0.2675, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 2.081012658227848, | |
| "grad_norm": 0.4352949162515945, | |
| "learning_rate": 2.8950613873859025e-05, | |
| "loss": 0.2835, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 2.0850632911392406, | |
| "grad_norm": 0.47206776155641983, | |
| "learning_rate": 2.8899820399388515e-05, | |
| "loss": 0.278, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 2.089113924050633, | |
| "grad_norm": 0.447534846472364, | |
| "learning_rate": 2.8848955246988012e-05, | |
| "loss": 0.2593, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 2.093164556962025, | |
| "grad_norm": 0.4635585789510846, | |
| "learning_rate": 2.879801882631847e-05, | |
| "loss": 0.2864, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 2.0972151898734177, | |
| "grad_norm": 0.43967947059215085, | |
| "learning_rate": 2.8747011547614808e-05, | |
| "loss": 0.2755, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 2.1012658227848102, | |
| "grad_norm": 0.4796508649232373, | |
| "learning_rate": 2.8695933821682635e-05, | |
| "loss": 0.2776, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 2.1053164556962027, | |
| "grad_norm": 0.4353981468378608, | |
| "learning_rate": 2.864478605989494e-05, | |
| "loss": 0.2804, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 2.109367088607595, | |
| "grad_norm": 0.4789607000400417, | |
| "learning_rate": 2.8593568674188765e-05, | |
| "loss": 0.2768, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.1134177215189873, | |
| "grad_norm": 0.4205319131862905, | |
| "learning_rate": 2.8542282077061892e-05, | |
| "loss": 0.2662, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 2.11746835443038, | |
| "grad_norm": 0.5079200179667785, | |
| "learning_rate": 2.8490926681569523e-05, | |
| "loss": 0.2799, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 2.1215189873417724, | |
| "grad_norm": 0.4239292194872771, | |
| "learning_rate": 2.8439502901320956e-05, | |
| "loss": 0.2668, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 2.1255696202531644, | |
| "grad_norm": 0.474292982568178, | |
| "learning_rate": 2.8388011150476237e-05, | |
| "loss": 0.2687, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 2.129620253164557, | |
| "grad_norm": 0.42285142080403637, | |
| "learning_rate": 2.8336451843742866e-05, | |
| "loss": 0.2857, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 2.1336708860759495, | |
| "grad_norm": 0.4361514628524347, | |
| "learning_rate": 2.8284825396372387e-05, | |
| "loss": 0.2662, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 2.137721518987342, | |
| "grad_norm": 0.40444027007368194, | |
| "learning_rate": 2.8233132224157132e-05, | |
| "loss": 0.267, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 2.141772151898734, | |
| "grad_norm": 0.448971202886092, | |
| "learning_rate": 2.8181372743426805e-05, | |
| "loss": 0.2811, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 2.1458227848101266, | |
| "grad_norm": 0.4324429195721114, | |
| "learning_rate": 2.8129547371045128e-05, | |
| "loss": 0.2942, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 2.149873417721519, | |
| "grad_norm": 0.43164374881848705, | |
| "learning_rate": 2.8077656524406534e-05, | |
| "loss": 0.2742, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.1539240506329116, | |
| "grad_norm": 0.4438764784103316, | |
| "learning_rate": 2.802570062143278e-05, | |
| "loss": 0.2861, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 2.1579746835443037, | |
| "grad_norm": 0.40688452884485815, | |
| "learning_rate": 2.7973680080569555e-05, | |
| "loss": 0.2709, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 2.162025316455696, | |
| "grad_norm": 0.44569675704774303, | |
| "learning_rate": 2.792159532078314e-05, | |
| "loss": 0.2768, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 2.1660759493670887, | |
| "grad_norm": 0.3861782204161773, | |
| "learning_rate": 2.7869446761557033e-05, | |
| "loss": 0.2787, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 2.170126582278481, | |
| "grad_norm": 0.4614079771907007, | |
| "learning_rate": 2.781723482288857e-05, | |
| "loss": 0.2745, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 2.1741772151898733, | |
| "grad_norm": 0.40003271299013593, | |
| "learning_rate": 2.7764959925285517e-05, | |
| "loss": 0.2725, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 2.178227848101266, | |
| "grad_norm": 0.4488778262477719, | |
| "learning_rate": 2.771262248976272e-05, | |
| "loss": 0.2739, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 2.1822784810126583, | |
| "grad_norm": 0.3982221741590615, | |
| "learning_rate": 2.7660222937838677e-05, | |
| "loss": 0.2605, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 2.186329113924051, | |
| "grad_norm": 0.43445724618303316, | |
| "learning_rate": 2.7607761691532186e-05, | |
| "loss": 0.2655, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 2.190379746835443, | |
| "grad_norm": 0.4270201394130976, | |
| "learning_rate": 2.7555239173358916e-05, | |
| "loss": 0.2655, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.1944303797468354, | |
| "grad_norm": 0.44471928583652215, | |
| "learning_rate": 2.7502655806328e-05, | |
| "loss": 0.2786, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 2.198481012658228, | |
| "grad_norm": 0.4249257805754625, | |
| "learning_rate": 2.7450012013938648e-05, | |
| "loss": 0.2592, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 2.2025316455696204, | |
| "grad_norm": 0.43147509211332147, | |
| "learning_rate": 2.739730822017673e-05, | |
| "loss": 0.2782, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 2.2065822784810125, | |
| "grad_norm": 0.4344075266075954, | |
| "learning_rate": 2.7344544849511355e-05, | |
| "loss": 0.2826, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 2.210632911392405, | |
| "grad_norm": 0.40429897542755927, | |
| "learning_rate": 2.7291722326891456e-05, | |
| "loss": 0.2879, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 2.2146835443037975, | |
| "grad_norm": 0.4398660392082459, | |
| "learning_rate": 2.723884107774236e-05, | |
| "loss": 0.2651, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 2.21873417721519, | |
| "grad_norm": 0.42978347317414284, | |
| "learning_rate": 2.718590152796239e-05, | |
| "loss": 0.2778, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 2.222784810126582, | |
| "grad_norm": 0.41666283964125467, | |
| "learning_rate": 2.71329041039194e-05, | |
| "loss": 0.2913, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 2.2268354430379746, | |
| "grad_norm": 0.4148622352965865, | |
| "learning_rate": 2.7079849232447357e-05, | |
| "loss": 0.275, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 2.230886075949367, | |
| "grad_norm": 0.434317473424833, | |
| "learning_rate": 2.7026737340842895e-05, | |
| "loss": 0.2839, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.2349367088607597, | |
| "grad_norm": 0.3911093220341875, | |
| "learning_rate": 2.697356885686189e-05, | |
| "loss": 0.306, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 2.2389873417721518, | |
| "grad_norm": 0.4207321630245316, | |
| "learning_rate": 2.6920344208716014e-05, | |
| "loss": 0.2755, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 2.2430379746835443, | |
| "grad_norm": 0.41168108013654636, | |
| "learning_rate": 2.6867063825069252e-05, | |
| "loss": 0.2905, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 2.247088607594937, | |
| "grad_norm": 0.4343241085247585, | |
| "learning_rate": 2.6813728135034494e-05, | |
| "loss": 0.279, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 2.2511392405063293, | |
| "grad_norm": 0.40338271789968805, | |
| "learning_rate": 2.6760337568170056e-05, | |
| "loss": 0.2728, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 2.2551898734177214, | |
| "grad_norm": 0.3975827052872322, | |
| "learning_rate": 2.6706892554476226e-05, | |
| "loss": 0.2649, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 2.259240506329114, | |
| "grad_norm": 0.4105831250425762, | |
| "learning_rate": 2.6653393524391795e-05, | |
| "loss": 0.2783, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 2.2632911392405064, | |
| "grad_norm": 0.4061507831480528, | |
| "learning_rate": 2.6599840908790592e-05, | |
| "loss": 0.2704, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 2.267341772151899, | |
| "grad_norm": 0.4063355465961293, | |
| "learning_rate": 2.6546235138978028e-05, | |
| "loss": 0.2804, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 2.271392405063291, | |
| "grad_norm": 0.3843731689702759, | |
| "learning_rate": 2.6492576646687597e-05, | |
| "loss": 0.2809, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.2754430379746835, | |
| "grad_norm": 0.3977963423453418, | |
| "learning_rate": 2.6438865864077425e-05, | |
| "loss": 0.2757, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 2.279493670886076, | |
| "grad_norm": 0.3918142735094154, | |
| "learning_rate": 2.6385103223726766e-05, | |
| "loss": 0.2928, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 2.2835443037974685, | |
| "grad_norm": 0.3962155221596916, | |
| "learning_rate": 2.6331289158632537e-05, | |
| "loss": 0.2837, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 2.2875949367088606, | |
| "grad_norm": 0.4198199336226785, | |
| "learning_rate": 2.6277424102205817e-05, | |
| "loss": 0.2729, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 2.291645569620253, | |
| "grad_norm": 0.37623339777093245, | |
| "learning_rate": 2.6223508488268374e-05, | |
| "loss": 0.2711, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 2.2956962025316456, | |
| "grad_norm": 0.4381870071350417, | |
| "learning_rate": 2.6169542751049148e-05, | |
| "loss": 0.2839, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 2.299746835443038, | |
| "grad_norm": 0.4026126196127328, | |
| "learning_rate": 2.6115527325180754e-05, | |
| "loss": 0.2681, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 2.3037974683544302, | |
| "grad_norm": 0.4406291843410084, | |
| "learning_rate": 2.606146264569603e-05, | |
| "loss": 0.2808, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 2.3078481012658227, | |
| "grad_norm": 0.42866376564842745, | |
| "learning_rate": 2.6007349148024447e-05, | |
| "loss": 0.3008, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 2.3118987341772153, | |
| "grad_norm": 0.38986558838555857, | |
| "learning_rate": 2.5953187267988694e-05, | |
| "loss": 0.2769, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.3159493670886078, | |
| "grad_norm": 0.41367913952458146, | |
| "learning_rate": 2.5898977441801097e-05, | |
| "loss": 0.2732, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "grad_norm": 0.4084019728968326, | |
| "learning_rate": 2.584472010606015e-05, | |
| "loss": 0.2694, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 2.3240506329113924, | |
| "grad_norm": 0.39601891601335404, | |
| "learning_rate": 2.5790415697746976e-05, | |
| "loss": 0.2715, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 2.328101265822785, | |
| "grad_norm": 0.3983730413847545, | |
| "learning_rate": 2.5736064654221808e-05, | |
| "loss": 0.2842, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 2.3321518987341774, | |
| "grad_norm": 0.40981757086008463, | |
| "learning_rate": 2.568166741322048e-05, | |
| "loss": 0.2949, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 2.3362025316455695, | |
| "grad_norm": 0.41513635069567334, | |
| "learning_rate": 2.56272244128509e-05, | |
| "loss": 0.2823, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 2.340253164556962, | |
| "grad_norm": 0.4071400289965067, | |
| "learning_rate": 2.55727360915895e-05, | |
| "loss": 0.2846, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 2.3443037974683545, | |
| "grad_norm": 0.4020843757812165, | |
| "learning_rate": 2.5518202888277734e-05, | |
| "loss": 0.2795, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 2.348354430379747, | |
| "grad_norm": 0.4084385651124559, | |
| "learning_rate": 2.5463625242118523e-05, | |
| "loss": 0.2791, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 2.352405063291139, | |
| "grad_norm": 0.4063170384560954, | |
| "learning_rate": 2.5409003592672723e-05, | |
| "loss": 0.3018, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.3564556962025316, | |
| "grad_norm": 0.40595527735198056, | |
| "learning_rate": 2.535433837985559e-05, | |
| "loss": 0.2975, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 2.360506329113924, | |
| "grad_norm": 0.4068420895100194, | |
| "learning_rate": 2.529963004393324e-05, | |
| "loss": 0.2824, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 2.3645569620253166, | |
| "grad_norm": 0.3853087531642031, | |
| "learning_rate": 2.524487902551908e-05, | |
| "loss": 0.2804, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 2.3686075949367087, | |
| "grad_norm": 0.3862868276131623, | |
| "learning_rate": 2.519008576557029e-05, | |
| "loss": 0.2768, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 2.372658227848101, | |
| "grad_norm": 0.3962106060774979, | |
| "learning_rate": 2.5135250705384254e-05, | |
| "loss": 0.2948, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 2.3767088607594937, | |
| "grad_norm": 0.3917501597643043, | |
| "learning_rate": 2.5080374286595007e-05, | |
| "loss": 0.2718, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 2.3807594936708862, | |
| "grad_norm": 0.4018043890813915, | |
| "learning_rate": 2.5025456951169677e-05, | |
| "loss": 0.2763, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 2.3848101265822783, | |
| "grad_norm": 0.44110410789354415, | |
| "learning_rate": 2.4970499141404942e-05, | |
| "loss": 0.2783, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 2.388860759493671, | |
| "grad_norm": 0.4145846990560126, | |
| "learning_rate": 2.491550129992345e-05, | |
| "loss": 0.2815, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 2.3929113924050633, | |
| "grad_norm": 0.43812613400215145, | |
| "learning_rate": 2.486046386967024e-05, | |
| "loss": 0.2669, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.396962025316456, | |
| "grad_norm": 0.41430714374310484, | |
| "learning_rate": 2.4805387293909214e-05, | |
| "loss": 0.27, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 2.401012658227848, | |
| "grad_norm": 0.42689922384406487, | |
| "learning_rate": 2.4750272016219552e-05, | |
| "loss": 0.2796, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 2.4050632911392404, | |
| "grad_norm": 0.47287978915914153, | |
| "learning_rate": 2.4695118480492114e-05, | |
| "loss": 0.2844, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 2.409113924050633, | |
| "grad_norm": 0.39844538116392225, | |
| "learning_rate": 2.4639927130925898e-05, | |
| "loss": 0.3153, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 2.4131645569620255, | |
| "grad_norm": 0.44483754302711725, | |
| "learning_rate": 2.458469841202444e-05, | |
| "loss": 0.2961, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 2.4172151898734175, | |
| "grad_norm": 0.40001449905800607, | |
| "learning_rate": 2.452943276859226e-05, | |
| "loss": 0.2728, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 2.42126582278481, | |
| "grad_norm": 0.46357947665813315, | |
| "learning_rate": 2.447413064573125e-05, | |
| "loss": 0.2864, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 2.4253164556962026, | |
| "grad_norm": 0.40902071556265907, | |
| "learning_rate": 2.4418792488837095e-05, | |
| "loss": 0.2975, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 2.429367088607595, | |
| "grad_norm": 0.42490240484739783, | |
| "learning_rate": 2.4363418743595713e-05, | |
| "loss": 0.2998, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 2.433417721518987, | |
| "grad_norm": 0.4226332265672028, | |
| "learning_rate": 2.430800985597963e-05, | |
| "loss": 0.2829, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.4374683544303797, | |
| "grad_norm": 0.45133413038379155, | |
| "learning_rate": 2.4252566272244415e-05, | |
| "loss": 0.2854, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 2.441518987341772, | |
| "grad_norm": 0.3951015144180344, | |
| "learning_rate": 2.4197088438925063e-05, | |
| "loss": 0.3031, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 2.4455696202531647, | |
| "grad_norm": 0.4017401045660559, | |
| "learning_rate": 2.4141576802832417e-05, | |
| "loss": 0.2812, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 2.449620253164557, | |
| "grad_norm": 0.42618206127904945, | |
| "learning_rate": 2.408603181104957e-05, | |
| "loss": 0.2972, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 2.4536708860759493, | |
| "grad_norm": 0.3892992134970232, | |
| "learning_rate": 2.4030453910928245e-05, | |
| "loss": 0.2982, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 2.457721518987342, | |
| "grad_norm": 0.46274140174745043, | |
| "learning_rate": 2.397484355008521e-05, | |
| "loss": 0.2912, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 2.4617721518987343, | |
| "grad_norm": 0.38598921624590915, | |
| "learning_rate": 2.3919201176398662e-05, | |
| "loss": 0.28, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 2.4658227848101264, | |
| "grad_norm": 0.4103318324244433, | |
| "learning_rate": 2.3863527238004633e-05, | |
| "loss": 0.2936, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 2.469873417721519, | |
| "grad_norm": 0.4162004139712585, | |
| "learning_rate": 2.380782218329337e-05, | |
| "loss": 0.2586, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 2.4739240506329114, | |
| "grad_norm": 0.4167120438057965, | |
| "learning_rate": 2.3752086460905725e-05, | |
| "loss": 0.2795, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.477974683544304, | |
| "grad_norm": 0.40361362159179043, | |
| "learning_rate": 2.3696320519729544e-05, | |
| "loss": 0.2995, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 2.482025316455696, | |
| "grad_norm": 0.4118890214590572, | |
| "learning_rate": 2.3640524808896045e-05, | |
| "loss": 0.2904, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 2.4860759493670885, | |
| "grad_norm": 0.4144631673094275, | |
| "learning_rate": 2.3584699777776222e-05, | |
| "loss": 0.2825, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 2.490126582278481, | |
| "grad_norm": 0.4638498124463217, | |
| "learning_rate": 2.3528845875977195e-05, | |
| "loss": 0.2918, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 2.4941772151898736, | |
| "grad_norm": 0.3997371056362086, | |
| "learning_rate": 2.3472963553338614e-05, | |
| "loss": 0.2729, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 2.4982278481012656, | |
| "grad_norm": 0.47453298338556965, | |
| "learning_rate": 2.341705325992901e-05, | |
| "loss": 0.2929, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 2.502278481012658, | |
| "grad_norm": 0.38923737716869256, | |
| "learning_rate": 2.336111544604222e-05, | |
| "loss": 0.2788, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 2.5063291139240507, | |
| "grad_norm": 0.4587734861427746, | |
| "learning_rate": 2.33051505621937e-05, | |
| "loss": 0.2728, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 2.510379746835443, | |
| "grad_norm": 0.40861894469379634, | |
| "learning_rate": 2.324915905911693e-05, | |
| "loss": 0.2684, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 2.5144303797468357, | |
| "grad_norm": 0.4476299326617825, | |
| "learning_rate": 2.319314138775977e-05, | |
| "loss": 0.2847, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.5184810126582278, | |
| "grad_norm": 0.43434532124331626, | |
| "learning_rate": 2.3137097999280856e-05, | |
| "loss": 0.2773, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 2.5225316455696203, | |
| "grad_norm": 0.41039608281124634, | |
| "learning_rate": 2.308102934504593e-05, | |
| "loss": 0.2789, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 2.526582278481013, | |
| "grad_norm": 0.402319200013175, | |
| "learning_rate": 2.3024935876624222e-05, | |
| "loss": 0.2865, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 2.530632911392405, | |
| "grad_norm": 0.3879383062460831, | |
| "learning_rate": 2.2968818045784813e-05, | |
| "loss": 0.2767, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 2.5346835443037974, | |
| "grad_norm": 0.42328792463880227, | |
| "learning_rate": 2.2912676304493006e-05, | |
| "loss": 0.2984, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 2.53873417721519, | |
| "grad_norm": 0.40294053633781485, | |
| "learning_rate": 2.2856511104906668e-05, | |
| "loss": 0.2767, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 2.5427848101265824, | |
| "grad_norm": 0.44530068731917544, | |
| "learning_rate": 2.2800322899372586e-05, | |
| "loss": 0.2903, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 2.546835443037975, | |
| "grad_norm": 0.38326878980211054, | |
| "learning_rate": 2.2744112140422844e-05, | |
| "loss": 0.2844, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 2.550886075949367, | |
| "grad_norm": 0.4066151883154577, | |
| "learning_rate": 2.2687879280771177e-05, | |
| "loss": 0.2749, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 2.5549367088607595, | |
| "grad_norm": 0.38338624299936197, | |
| "learning_rate": 2.26316247733093e-05, | |
| "loss": 0.2802, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.558987341772152, | |
| "grad_norm": 0.40052197974224457, | |
| "learning_rate": 2.257534907110328e-05, | |
| "loss": 0.2761, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 2.563037974683544, | |
| "grad_norm": 0.3868991397452875, | |
| "learning_rate": 2.2519052627389882e-05, | |
| "loss": 0.2679, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 2.5670886075949366, | |
| "grad_norm": 0.41442787181187635, | |
| "learning_rate": 2.246273589557294e-05, | |
| "loss": 0.2786, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 2.571139240506329, | |
| "grad_norm": 0.392637797774676, | |
| "learning_rate": 2.240639932921966e-05, | |
| "loss": 0.2988, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 2.5751898734177217, | |
| "grad_norm": 0.3793664769959631, | |
| "learning_rate": 2.2350043382056995e-05, | |
| "loss": 0.2809, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 2.579240506329114, | |
| "grad_norm": 0.3894588294010504, | |
| "learning_rate": 2.2293668507968015e-05, | |
| "loss": 0.2572, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 2.5832911392405062, | |
| "grad_norm": 0.39916040109465367, | |
| "learning_rate": 2.2237275160988186e-05, | |
| "loss": 0.2735, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 2.5873417721518988, | |
| "grad_norm": 0.37138000654219194, | |
| "learning_rate": 2.2180863795301787e-05, | |
| "loss": 0.2755, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 2.5913924050632913, | |
| "grad_norm": 0.4310158435345107, | |
| "learning_rate": 2.212443486523819e-05, | |
| "loss": 0.2706, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 2.5954430379746833, | |
| "grad_norm": 0.38475782773851575, | |
| "learning_rate": 2.2067988825268243e-05, | |
| "loss": 0.2902, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.599493670886076, | |
| "grad_norm": 0.39215878171418994, | |
| "learning_rate": 2.2011526130000596e-05, | |
| "loss": 0.2794, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 2.6035443037974684, | |
| "grad_norm": 0.41005505355094884, | |
| "learning_rate": 2.1955047234178038e-05, | |
| "loss": 0.3018, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 2.607594936708861, | |
| "grad_norm": 0.3941953647251406, | |
| "learning_rate": 2.1898552592673825e-05, | |
| "loss": 0.2645, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 2.6116455696202534, | |
| "grad_norm": 0.38857431500571293, | |
| "learning_rate": 2.184204266048803e-05, | |
| "loss": 0.2871, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 2.6156962025316455, | |
| "grad_norm": 0.40219787966246956, | |
| "learning_rate": 2.1785517892743887e-05, | |
| "loss": 0.277, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 2.619746835443038, | |
| "grad_norm": 0.3895987835520362, | |
| "learning_rate": 2.17289787446841e-05, | |
| "loss": 0.2892, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 2.6237974683544305, | |
| "grad_norm": 0.3723526514938595, | |
| "learning_rate": 2.1672425671667198e-05, | |
| "loss": 0.2767, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 2.6278481012658226, | |
| "grad_norm": 0.3974183760005829, | |
| "learning_rate": 2.161585912916385e-05, | |
| "loss": 0.2866, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 2.631898734177215, | |
| "grad_norm": 0.3831490839076185, | |
| "learning_rate": 2.1559279572753214e-05, | |
| "loss": 0.2792, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 2.6359493670886076, | |
| "grad_norm": 0.4161678280406475, | |
| "learning_rate": 2.1502687458119268e-05, | |
| "loss": 0.2765, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "grad_norm": 0.38505534951354214, | |
| "learning_rate": 2.1446083241047116e-05, | |
| "loss": 0.2608, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 2.6440506329113926, | |
| "grad_norm": 0.38919161854555384, | |
| "learning_rate": 2.1389467377419333e-05, | |
| "loss": 0.2649, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 2.6481012658227847, | |
| "grad_norm": 0.42551472791220846, | |
| "learning_rate": 2.133284032321232e-05, | |
| "loss": 0.2847, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 2.6521518987341772, | |
| "grad_norm": 0.4493637319018401, | |
| "learning_rate": 2.1276202534492566e-05, | |
| "loss": 0.2769, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 2.6562025316455697, | |
| "grad_norm": 0.4129104895868485, | |
| "learning_rate": 2.121955446741306e-05, | |
| "loss": 0.2724, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 2.660253164556962, | |
| "grad_norm": 0.40501399280962863, | |
| "learning_rate": 2.1162896578209517e-05, | |
| "loss": 0.288, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 2.6643037974683543, | |
| "grad_norm": 0.38387580492089346, | |
| "learning_rate": 2.1106229323196813e-05, | |
| "loss": 0.2783, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 2.668354430379747, | |
| "grad_norm": 0.421050220704885, | |
| "learning_rate": 2.1049553158765214e-05, | |
| "loss": 0.2747, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 2.6724050632911394, | |
| "grad_norm": 0.3935307811182046, | |
| "learning_rate": 2.0992868541376764e-05, | |
| "loss": 0.2839, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 2.676455696202532, | |
| "grad_norm": 0.3994248683435304, | |
| "learning_rate": 2.093617592756158e-05, | |
| "loss": 0.2901, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.680506329113924, | |
| "grad_norm": 0.3879840049172871, | |
| "learning_rate": 2.0879475773914167e-05, | |
| "loss": 0.2823, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 2.6845569620253165, | |
| "grad_norm": 0.3974592604705227, | |
| "learning_rate": 2.082276853708978e-05, | |
| "loss": 0.2678, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 2.688607594936709, | |
| "grad_norm": 0.37657223130076417, | |
| "learning_rate": 2.076605467380071e-05, | |
| "loss": 0.2847, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 2.692658227848101, | |
| "grad_norm": 2.9293731698471692, | |
| "learning_rate": 2.0709334640812613e-05, | |
| "loss": 0.2844, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 2.6967088607594936, | |
| "grad_norm": 0.47184564972594567, | |
| "learning_rate": 2.0652608894940824e-05, | |
| "loss": 0.2898, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 2.700759493670886, | |
| "grad_norm": 0.40562123530519484, | |
| "learning_rate": 2.0595877893046722e-05, | |
| "loss": 0.2784, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 2.7048101265822786, | |
| "grad_norm": 0.4089940174101844, | |
| "learning_rate": 2.0539142092033985e-05, | |
| "loss": 0.2874, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 2.708860759493671, | |
| "grad_norm": 0.40615526052550305, | |
| "learning_rate": 2.048240194884496e-05, | |
| "loss": 0.2705, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 2.712911392405063, | |
| "grad_norm": 0.422078862714908, | |
| "learning_rate": 2.042565792045695e-05, | |
| "loss": 0.2826, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 2.7169620253164557, | |
| "grad_norm": 0.41766632298099254, | |
| "learning_rate": 2.036891046387857e-05, | |
| "loss": 0.2988, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 2.721012658227848, | |
| "grad_norm": 0.41059839433785983, | |
| "learning_rate": 2.0312160036146036e-05, | |
| "loss": 0.2762, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 2.7250632911392403, | |
| "grad_norm": 0.4260039787619452, | |
| "learning_rate": 2.025540709431948e-05, | |
| "loss": 0.2837, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 2.729113924050633, | |
| "grad_norm": 0.40015145441369426, | |
| "learning_rate": 2.0198652095479298e-05, | |
| "loss": 0.2848, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 2.7331645569620253, | |
| "grad_norm": 0.4022405804821003, | |
| "learning_rate": 2.014189549672245e-05, | |
| "loss": 0.2806, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 2.737215189873418, | |
| "grad_norm": 0.4034152944951102, | |
| "learning_rate": 2.0085137755158776e-05, | |
| "loss": 0.2871, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 2.7412658227848103, | |
| "grad_norm": 0.4052117657519094, | |
| "learning_rate": 2.0028379327907327e-05, | |
| "loss": 0.2721, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 2.7453164556962024, | |
| "grad_norm": 0.38686698410747433, | |
| "learning_rate": 1.9971620672092676e-05, | |
| "loss": 0.2921, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 2.749367088607595, | |
| "grad_norm": 0.4073552163549865, | |
| "learning_rate": 1.991486224484123e-05, | |
| "loss": 0.2774, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 2.7534177215189874, | |
| "grad_norm": 0.40554192125804167, | |
| "learning_rate": 1.985810450327756e-05, | |
| "loss": 0.2965, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 2.7574683544303795, | |
| "grad_norm": 0.39528770175166766, | |
| "learning_rate": 1.9801347904520706e-05, | |
| "loss": 0.2592, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 2.761518987341772, | |
| "grad_norm": 0.40591364526839707, | |
| "learning_rate": 1.974459290568053e-05, | |
| "loss": 0.282, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 2.7655696202531646, | |
| "grad_norm": 0.3873380794916649, | |
| "learning_rate": 1.968783996385397e-05, | |
| "loss": 0.2956, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 2.769620253164557, | |
| "grad_norm": 0.38998840618842867, | |
| "learning_rate": 1.963108953612143e-05, | |
| "loss": 0.2759, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 2.7736708860759496, | |
| "grad_norm": 0.3892743812431699, | |
| "learning_rate": 1.9574342079543056e-05, | |
| "loss": 0.2795, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 2.7777215189873417, | |
| "grad_norm": 0.39374787503012065, | |
| "learning_rate": 1.9517598051155046e-05, | |
| "loss": 0.2593, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 2.781772151898734, | |
| "grad_norm": 0.400191457265138, | |
| "learning_rate": 1.9460857907966025e-05, | |
| "loss": 0.305, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 2.7858227848101267, | |
| "grad_norm": 0.4016887107925414, | |
| "learning_rate": 1.9404122106953285e-05, | |
| "loss": 0.2937, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 2.7898734177215188, | |
| "grad_norm": 0.3894657499273909, | |
| "learning_rate": 1.9347391105059176e-05, | |
| "loss": 0.2996, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 2.7939240506329113, | |
| "grad_norm": 0.38447511860708417, | |
| "learning_rate": 1.92906653591874e-05, | |
| "loss": 0.2995, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 2.797974683544304, | |
| "grad_norm": 0.3757686004032544, | |
| "learning_rate": 1.9233945326199295e-05, | |
| "loss": 0.2929, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 2.8020253164556963, | |
| "grad_norm": 0.39580971699523954, | |
| "learning_rate": 1.917723146291022e-05, | |
| "loss": 0.298, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 2.806075949367089, | |
| "grad_norm": 0.3970232196307396, | |
| "learning_rate": 1.912052422608584e-05, | |
| "loss": 0.2905, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 2.810126582278481, | |
| "grad_norm": 0.386656078497716, | |
| "learning_rate": 1.9063824072438428e-05, | |
| "loss": 0.275, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 2.8141772151898734, | |
| "grad_norm": 0.37774544325016146, | |
| "learning_rate": 1.9007131458623246e-05, | |
| "loss": 0.278, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 2.818227848101266, | |
| "grad_norm": 0.38603814451996676, | |
| "learning_rate": 1.895044684123479e-05, | |
| "loss": 0.2867, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 2.822278481012658, | |
| "grad_norm": 0.3968609585349975, | |
| "learning_rate": 1.8893770676803194e-05, | |
| "loss": 0.302, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 2.8263291139240505, | |
| "grad_norm": 0.3954445007228064, | |
| "learning_rate": 1.8837103421790486e-05, | |
| "loss": 0.2754, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 2.830379746835443, | |
| "grad_norm": 0.38081920404110664, | |
| "learning_rate": 1.8780445532586952e-05, | |
| "loss": 0.2884, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 2.8344303797468355, | |
| "grad_norm": 0.3858584404747633, | |
| "learning_rate": 1.872379746550743e-05, | |
| "loss": 0.2654, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 2.838481012658228, | |
| "grad_norm": 0.3704639260666378, | |
| "learning_rate": 1.866715967678769e-05, | |
| "loss": 0.2852, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.84253164556962, | |
| "grad_norm": 0.38921933086655336, | |
| "learning_rate": 1.861053262258067e-05, | |
| "loss": 0.293, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 2.8465822784810126, | |
| "grad_norm": 0.4270909038432567, | |
| "learning_rate": 1.8553916758952897e-05, | |
| "loss": 0.2713, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 2.850632911392405, | |
| "grad_norm": 0.37250649068322894, | |
| "learning_rate": 1.8497312541880735e-05, | |
| "loss": 0.2816, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 2.8546835443037972, | |
| "grad_norm": 0.393740302005169, | |
| "learning_rate": 1.8440720427246786e-05, | |
| "loss": 0.2835, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 2.8587341772151897, | |
| "grad_norm": 0.4029789900794336, | |
| "learning_rate": 1.8384140870836157e-05, | |
| "loss": 0.2684, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 2.8627848101265823, | |
| "grad_norm": 0.37516533685767506, | |
| "learning_rate": 1.8327574328332806e-05, | |
| "loss": 0.2692, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 2.8668354430379748, | |
| "grad_norm": 0.37860778912657733, | |
| "learning_rate": 1.8271021255315906e-05, | |
| "loss": 0.2801, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 2.8708860759493673, | |
| "grad_norm": 0.39035614553581255, | |
| "learning_rate": 1.8214482107256117e-05, | |
| "loss": 0.2781, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 2.8749367088607594, | |
| "grad_norm": 0.4036138935014821, | |
| "learning_rate": 1.8157957339511968e-05, | |
| "loss": 0.2897, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 2.878987341772152, | |
| "grad_norm": 0.37088265394242964, | |
| "learning_rate": 1.8101447407326182e-05, | |
| "loss": 0.2825, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 2.8830379746835444, | |
| "grad_norm": 0.3773492253576865, | |
| "learning_rate": 1.8044952765821966e-05, | |
| "loss": 0.2933, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 2.8870886075949365, | |
| "grad_norm": 0.38082138763442513, | |
| "learning_rate": 1.7988473869999407e-05, | |
| "loss": 0.2832, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 2.891139240506329, | |
| "grad_norm": 0.3714812482429228, | |
| "learning_rate": 1.7932011174731764e-05, | |
| "loss": 0.2878, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 2.8951898734177215, | |
| "grad_norm": 0.3691780633215788, | |
| "learning_rate": 1.7875565134761817e-05, | |
| "loss": 0.274, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 2.899240506329114, | |
| "grad_norm": 0.39326847549024857, | |
| "learning_rate": 1.7819136204698226e-05, | |
| "loss": 0.2854, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 2.9032911392405065, | |
| "grad_norm": 0.38636281166057007, | |
| "learning_rate": 1.776272483901182e-05, | |
| "loss": 0.2903, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 2.9073417721518986, | |
| "grad_norm": 0.37145568560121217, | |
| "learning_rate": 1.7706331492031995e-05, | |
| "loss": 0.2789, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 2.911392405063291, | |
| "grad_norm": 0.35609624961833913, | |
| "learning_rate": 1.764995661794301e-05, | |
| "loss": 0.2885, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 2.9154430379746836, | |
| "grad_norm": 0.3839931163218711, | |
| "learning_rate": 1.759360067078035e-05, | |
| "loss": 0.2762, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 2.9194936708860757, | |
| "grad_norm": 0.38413383321256717, | |
| "learning_rate": 1.7537264104427064e-05, | |
| "loss": 0.2826, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 2.923544303797468, | |
| "grad_norm": 0.3683790196953599, | |
| "learning_rate": 1.748094737261012e-05, | |
| "loss": 0.2848, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 2.9275949367088607, | |
| "grad_norm": 0.3795333717422509, | |
| "learning_rate": 1.7424650928896726e-05, | |
| "loss": 0.2731, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 2.9316455696202532, | |
| "grad_norm": 0.38027522884057113, | |
| "learning_rate": 1.7368375226690712e-05, | |
| "loss": 0.2681, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 2.9356962025316458, | |
| "grad_norm": 0.379915889072295, | |
| "learning_rate": 1.731212071922883e-05, | |
| "loss": 0.2825, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 2.939746835443038, | |
| "grad_norm": 0.38284617087613293, | |
| "learning_rate": 1.7255887859577156e-05, | |
| "loss": 0.2602, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 2.9437974683544303, | |
| "grad_norm": 0.3849443605937237, | |
| "learning_rate": 1.7199677100627427e-05, | |
| "loss": 0.2735, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 2.947848101265823, | |
| "grad_norm": 0.38118196122394704, | |
| "learning_rate": 1.7143488895093343e-05, | |
| "loss": 0.2812, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 2.951898734177215, | |
| "grad_norm": 0.3911637241302771, | |
| "learning_rate": 1.7087323695506994e-05, | |
| "loss": 0.2905, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 2.9559493670886074, | |
| "grad_norm": 0.3734866838442479, | |
| "learning_rate": 1.7031181954215194e-05, | |
| "loss": 0.2751, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "grad_norm": 0.38065735015857904, | |
| "learning_rate": 1.6975064123375788e-05, | |
| "loss": 0.2675, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 2.9640506329113925, | |
| "grad_norm": 0.41387259745290444, | |
| "learning_rate": 1.6918970654954084e-05, | |
| "loss": 0.2926, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 2.968101265822785, | |
| "grad_norm": 0.38004420843882886, | |
| "learning_rate": 1.686290200071915e-05, | |
| "loss": 0.2669, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 2.972151898734177, | |
| "grad_norm": 0.373740722614481, | |
| "learning_rate": 1.6806858612240234e-05, | |
| "loss": 0.2875, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 2.9762025316455696, | |
| "grad_norm": 0.4005778730185085, | |
| "learning_rate": 1.6750840940883078e-05, | |
| "loss": 0.2736, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 2.980253164556962, | |
| "grad_norm": 0.3684684178058191, | |
| "learning_rate": 1.6694849437806305e-05, | |
| "loss": 0.2849, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 2.984303797468354, | |
| "grad_norm": 0.3653152320574722, | |
| "learning_rate": 1.663888455395778e-05, | |
| "loss": 0.2745, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 2.9883544303797467, | |
| "grad_norm": 0.38147755155978824, | |
| "learning_rate": 1.6582946740070995e-05, | |
| "loss": 0.282, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 2.992405063291139, | |
| "grad_norm": 0.3943466596169522, | |
| "learning_rate": 1.6527036446661396e-05, | |
| "loss": 0.2928, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 2.9964556962025317, | |
| "grad_norm": 0.4062818156700276, | |
| "learning_rate": 1.6471154124022818e-05, | |
| "loss": 0.2811, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 3.002025316455696, | |
| "grad_norm": 0.41702483695113596, | |
| "learning_rate": 1.6415300222223788e-05, | |
| "loss": 0.2144, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 3.0060759493670886, | |
| "grad_norm": 0.5208230254665859, | |
| "learning_rate": 1.6359475191103958e-05, | |
| "loss": 0.1801, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 3.010126582278481, | |
| "grad_norm": 0.3581313985880267, | |
| "learning_rate": 1.6303679480270466e-05, | |
| "loss": 0.165, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 3.0141772151898736, | |
| "grad_norm": 0.4395398217335517, | |
| "learning_rate": 1.624791353909428e-05, | |
| "loss": 0.1508, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 3.0182278481012657, | |
| "grad_norm": 0.6624088223982547, | |
| "learning_rate": 1.619217781670663e-05, | |
| "loss": 0.1735, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 3.022278481012658, | |
| "grad_norm": 0.531586629136385, | |
| "learning_rate": 1.6136472761995373e-05, | |
| "loss": 0.1501, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 3.0263291139240507, | |
| "grad_norm": 0.3829936707883009, | |
| "learning_rate": 1.608079882360134e-05, | |
| "loss": 0.1696, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 3.030379746835443, | |
| "grad_norm": 0.45912826387171035, | |
| "learning_rate": 1.60251564499148e-05, | |
| "loss": 0.1518, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 3.0344303797468353, | |
| "grad_norm": 0.4671641838971554, | |
| "learning_rate": 1.596954608907176e-05, | |
| "loss": 0.165, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 3.038481012658228, | |
| "grad_norm": 0.376370827741618, | |
| "learning_rate": 1.591396818895043e-05, | |
| "loss": 0.1434, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 3.0425316455696203, | |
| "grad_norm": 0.36084305316384363, | |
| "learning_rate": 1.585842319716759e-05, | |
| "loss": 0.1574, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 3.046582278481013, | |
| "grad_norm": 0.39203365833931164, | |
| "learning_rate": 1.5802911561074944e-05, | |
| "loss": 0.1577, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 3.050632911392405, | |
| "grad_norm": 0.3895488379905902, | |
| "learning_rate": 1.5747433727755595e-05, | |
| "loss": 0.1466, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 3.0546835443037974, | |
| "grad_norm": 0.39846688322965884, | |
| "learning_rate": 1.5691990144020376e-05, | |
| "loss": 0.1354, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 3.05873417721519, | |
| "grad_norm": 0.37721865741203525, | |
| "learning_rate": 1.5636581256404297e-05, | |
| "loss": 0.1545, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 3.0627848101265824, | |
| "grad_norm": 0.33324899064536356, | |
| "learning_rate": 1.558120751116291e-05, | |
| "loss": 0.1451, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 3.0668354430379745, | |
| "grad_norm": 0.3492066408451327, | |
| "learning_rate": 1.552586935426876e-05, | |
| "loss": 0.1542, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 3.070886075949367, | |
| "grad_norm": 0.3566816034374583, | |
| "learning_rate": 1.547056723140774e-05, | |
| "loss": 0.1659, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 3.0749367088607595, | |
| "grad_norm": 0.36508175328894676, | |
| "learning_rate": 1.5415301587975565e-05, | |
| "loss": 0.1527, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 3.078987341772152, | |
| "grad_norm": 0.35450374059136003, | |
| "learning_rate": 1.536007286907411e-05, | |
| "loss": 0.1479, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 3.083037974683544, | |
| "grad_norm": 0.3665451683738164, | |
| "learning_rate": 1.5304881519507896e-05, | |
| "loss": 0.1504, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 3.0870886075949366, | |
| "grad_norm": 0.3631537460742381, | |
| "learning_rate": 1.5249727983780453e-05, | |
| "loss": 0.1466, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 3.091139240506329, | |
| "grad_norm": 0.3701272364903994, | |
| "learning_rate": 1.5194612706090786e-05, | |
| "loss": 0.1451, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 3.0951898734177217, | |
| "grad_norm": 0.3613756280656335, | |
| "learning_rate": 1.5139536130329771e-05, | |
| "loss": 0.1529, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 3.0992405063291137, | |
| "grad_norm": 0.32742420712071185, | |
| "learning_rate": 1.508449870007656e-05, | |
| "loss": 0.1464, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 3.1032911392405063, | |
| "grad_norm": 0.3657674991022707, | |
| "learning_rate": 1.5029500858595056e-05, | |
| "loss": 0.1498, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 3.1073417721518988, | |
| "grad_norm": 0.34449910061927125, | |
| "learning_rate": 1.4974543048830328e-05, | |
| "loss": 0.1489, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 3.1113924050632913, | |
| "grad_norm": 0.3396554564168939, | |
| "learning_rate": 1.4919625713405e-05, | |
| "loss": 0.1571, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 3.1154430379746834, | |
| "grad_norm": 0.34144718574050364, | |
| "learning_rate": 1.4864749294615756e-05, | |
| "loss": 0.176, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 3.119493670886076, | |
| "grad_norm": 0.34567960736705444, | |
| "learning_rate": 1.4809914234429716e-05, | |
| "loss": 0.1533, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 3.1235443037974684, | |
| "grad_norm": 0.3385740125866981, | |
| "learning_rate": 1.4755120974480923e-05, | |
| "loss": 0.1499, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 3.127594936708861, | |
| "grad_norm": 0.36147132953308375, | |
| "learning_rate": 1.4700369956066771e-05, | |
| "loss": 0.1586, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 3.131645569620253, | |
| "grad_norm": 0.342823907727766, | |
| "learning_rate": 1.4645661620144413e-05, | |
| "loss": 0.1743, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 3.1356962025316455, | |
| "grad_norm": 0.32999857819382683, | |
| "learning_rate": 1.4590996407327284e-05, | |
| "loss": 0.1547, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 3.139746835443038, | |
| "grad_norm": 0.34168623507442575, | |
| "learning_rate": 1.4536374757881487e-05, | |
| "loss": 0.1505, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 3.1437974683544305, | |
| "grad_norm": 0.35162269802183044, | |
| "learning_rate": 1.4481797111722271e-05, | |
| "loss": 0.1701, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 3.1478481012658226, | |
| "grad_norm": 0.3128185817111032, | |
| "learning_rate": 1.4427263908410507e-05, | |
| "loss": 0.1643, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 3.151898734177215, | |
| "grad_norm": 0.3291473167272917, | |
| "learning_rate": 1.4372775587149108e-05, | |
| "loss": 0.1528, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 3.1559493670886076, | |
| "grad_norm": 0.35125276570926045, | |
| "learning_rate": 1.4318332586779522e-05, | |
| "loss": 0.1662, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "grad_norm": 0.33032720776636004, | |
| "learning_rate": 1.4263935345778202e-05, | |
| "loss": 0.1704, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 3.164050632911392, | |
| "grad_norm": 0.3459081544103809, | |
| "learning_rate": 1.420958430225303e-05, | |
| "loss": 0.1525, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 3.1681012658227847, | |
| "grad_norm": 0.3288639062250872, | |
| "learning_rate": 1.415527989393985e-05, | |
| "loss": 0.1635, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 3.1721518987341772, | |
| "grad_norm": 0.3317516004324739, | |
| "learning_rate": 1.410102255819891e-05, | |
| "loss": 0.1703, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 3.1762025316455698, | |
| "grad_norm": 0.3273875864944427, | |
| "learning_rate": 1.404681273201131e-05, | |
| "loss": 0.1497, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 3.180253164556962, | |
| "grad_norm": 0.3491289268720269, | |
| "learning_rate": 1.399265085197556e-05, | |
| "loss": 0.1638, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 3.1843037974683543, | |
| "grad_norm": 0.33641481812870866, | |
| "learning_rate": 1.393853735430398e-05, | |
| "loss": 0.1479, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 3.188354430379747, | |
| "grad_norm": 0.3228939815536499, | |
| "learning_rate": 1.3884472674819246e-05, | |
| "loss": 0.1751, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 3.1924050632911394, | |
| "grad_norm": 0.3459761724298092, | |
| "learning_rate": 1.3830457248950864e-05, | |
| "loss": 0.1657, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 3.1964556962025314, | |
| "grad_norm": 0.3378081730403833, | |
| "learning_rate": 1.377649151173163e-05, | |
| "loss": 0.1559, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 3.200506329113924, | |
| "grad_norm": 0.336956879468339, | |
| "learning_rate": 1.3722575897794181e-05, | |
| "loss": 0.1512, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 3.2045569620253165, | |
| "grad_norm": 0.3319089717093352, | |
| "learning_rate": 1.3668710841367472e-05, | |
| "loss": 0.1475, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 3.208607594936709, | |
| "grad_norm": 0.3345569474740725, | |
| "learning_rate": 1.361489677627324e-05, | |
| "loss": 0.1682, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 3.212658227848101, | |
| "grad_norm": 0.33226703565172655, | |
| "learning_rate": 1.3561134135922585e-05, | |
| "loss": 0.1436, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 3.2167088607594936, | |
| "grad_norm": 0.33204477479837075, | |
| "learning_rate": 1.350742335331241e-05, | |
| "loss": 0.1236, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 3.220759493670886, | |
| "grad_norm": 0.3330547868654958, | |
| "learning_rate": 1.345376486102198e-05, | |
| "loss": 0.1597, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 3.2248101265822786, | |
| "grad_norm": 0.3359415582835304, | |
| "learning_rate": 1.3400159091209414e-05, | |
| "loss": 0.1449, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 3.2288607594936707, | |
| "grad_norm": 0.34016798388268593, | |
| "learning_rate": 1.3346606475608216e-05, | |
| "loss": 0.16, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 3.232911392405063, | |
| "grad_norm": 0.3348829326436335, | |
| "learning_rate": 1.3293107445523781e-05, | |
| "loss": 0.1546, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 3.2369620253164557, | |
| "grad_norm": 0.3269851180389334, | |
| "learning_rate": 1.3239662431829949e-05, | |
| "loss": 0.1562, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 3.2410126582278482, | |
| "grad_norm": 0.3295694107777284, | |
| "learning_rate": 1.3186271864965509e-05, | |
| "loss": 0.1475, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 3.2450632911392403, | |
| "grad_norm": 0.330153436574161, | |
| "learning_rate": 1.3132936174930756e-05, | |
| "loss": 0.164, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 3.249113924050633, | |
| "grad_norm": 0.3397556052969577, | |
| "learning_rate": 1.3079655791283995e-05, | |
| "loss": 0.156, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 3.2531645569620253, | |
| "grad_norm": 0.34211593836858256, | |
| "learning_rate": 1.3026431143138108e-05, | |
| "loss": 0.1659, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 3.257215189873418, | |
| "grad_norm": 0.3361348619631758, | |
| "learning_rate": 1.2973262659157114e-05, | |
| "loss": 0.1471, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 3.26126582278481, | |
| "grad_norm": 0.34025704966440734, | |
| "learning_rate": 1.2920150767552651e-05, | |
| "loss": 0.1451, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 3.2653164556962024, | |
| "grad_norm": 0.3314681357863452, | |
| "learning_rate": 1.2867095896080607e-05, | |
| "loss": 0.1638, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 3.269367088607595, | |
| "grad_norm": 0.3376688597135208, | |
| "learning_rate": 1.2814098472037612e-05, | |
| "loss": 0.1659, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 3.2734177215189875, | |
| "grad_norm": 0.33279953737677526, | |
| "learning_rate": 1.276115892225764e-05, | |
| "loss": 0.159, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 3.27746835443038, | |
| "grad_norm": 0.33258251834062164, | |
| "learning_rate": 1.2708277673108555e-05, | |
| "loss": 0.1586, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 3.281518987341772, | |
| "grad_norm": 0.3374241901443898, | |
| "learning_rate": 1.2655455150488649e-05, | |
| "loss": 0.1451, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 3.2855696202531646, | |
| "grad_norm": 0.3422319264813296, | |
| "learning_rate": 1.2602691779823272e-05, | |
| "loss": 0.1502, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 3.289620253164557, | |
| "grad_norm": 0.33697301273370056, | |
| "learning_rate": 1.2549987986061355e-05, | |
| "loss": 0.1723, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 3.293670886075949, | |
| "grad_norm": 0.34709230649898043, | |
| "learning_rate": 1.2497344193672005e-05, | |
| "loss": 0.1525, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 3.2977215189873417, | |
| "grad_norm": 0.3363221221090753, | |
| "learning_rate": 1.2444760826641092e-05, | |
| "loss": 0.1571, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 3.301772151898734, | |
| "grad_norm": 0.33253467564376354, | |
| "learning_rate": 1.2392238308467817e-05, | |
| "loss": 0.135, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 3.3058227848101267, | |
| "grad_norm": 0.3338429085744522, | |
| "learning_rate": 1.2339777062161326e-05, | |
| "loss": 0.1457, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 3.309873417721519, | |
| "grad_norm": 0.3221354782426449, | |
| "learning_rate": 1.2287377510237293e-05, | |
| "loss": 0.1439, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 3.3139240506329113, | |
| "grad_norm": 0.321031992060786, | |
| "learning_rate": 1.2235040074714488e-05, | |
| "loss": 0.137, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 3.317974683544304, | |
| "grad_norm": 0.33659120538072446, | |
| "learning_rate": 1.2182765177111434e-05, | |
| "loss": 0.1552, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 3.3220253164556963, | |
| "grad_norm": 0.3325370776614276, | |
| "learning_rate": 1.213055323844297e-05, | |
| "loss": 0.1696, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 3.3260759493670884, | |
| "grad_norm": 0.318054327157221, | |
| "learning_rate": 1.2078404679216864e-05, | |
| "loss": 0.1453, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 3.330126582278481, | |
| "grad_norm": 0.3303501279466663, | |
| "learning_rate": 1.2026319919430458e-05, | |
| "loss": 0.16, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 3.3341772151898734, | |
| "grad_norm": 0.31659947960267604, | |
| "learning_rate": 1.1974299378567227e-05, | |
| "loss": 0.1499, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 3.338227848101266, | |
| "grad_norm": 0.3357945158114857, | |
| "learning_rate": 1.1922343475593462e-05, | |
| "loss": 0.1627, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 3.3422784810126585, | |
| "grad_norm": 0.3308894408670392, | |
| "learning_rate": 1.187045262895488e-05, | |
| "loss": 0.1382, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 3.3463291139240505, | |
| "grad_norm": 0.32553244646722096, | |
| "learning_rate": 1.1818627256573203e-05, | |
| "loss": 0.1387, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 3.350379746835443, | |
| "grad_norm": 0.3426506925985158, | |
| "learning_rate": 1.1766867775842864e-05, | |
| "loss": 0.1561, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 3.3544303797468356, | |
| "grad_norm": 0.33875721495857963, | |
| "learning_rate": 1.1715174603627615e-05, | |
| "loss": 0.1753, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 3.3584810126582276, | |
| "grad_norm": 0.320096444917792, | |
| "learning_rate": 1.1663548156257147e-05, | |
| "loss": 0.153, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 3.36253164556962, | |
| "grad_norm": 0.33222589494369476, | |
| "learning_rate": 1.161198884952377e-05, | |
| "loss": 0.1527, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 3.3665822784810127, | |
| "grad_norm": 0.331452361636876, | |
| "learning_rate": 1.1560497098679056e-05, | |
| "loss": 0.1827, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 3.370632911392405, | |
| "grad_norm": 0.33889888847752886, | |
| "learning_rate": 1.1509073318430479e-05, | |
| "loss": 0.1541, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 3.3746835443037977, | |
| "grad_norm": 0.3222295648401658, | |
| "learning_rate": 1.1457717922938116e-05, | |
| "loss": 0.1587, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 3.3787341772151898, | |
| "grad_norm": 0.3283827918110762, | |
| "learning_rate": 1.1406431325811233e-05, | |
| "loss": 0.1586, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 3.3827848101265823, | |
| "grad_norm": 0.3388736823556234, | |
| "learning_rate": 1.135521394010506e-05, | |
| "loss": 0.1672, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 3.386835443037975, | |
| "grad_norm": 0.33088376978421774, | |
| "learning_rate": 1.1304066178317367e-05, | |
| "loss": 0.1545, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 3.390886075949367, | |
| "grad_norm": 0.33031506063457244, | |
| "learning_rate": 1.1252988452385199e-05, | |
| "loss": 0.1515, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 3.3949367088607594, | |
| "grad_norm": 0.3417501633893918, | |
| "learning_rate": 1.1201981173681536e-05, | |
| "loss": 0.1508, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 3.398987341772152, | |
| "grad_norm": 0.33611963169746134, | |
| "learning_rate": 1.1151044753011991e-05, | |
| "loss": 0.1567, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 3.4030379746835444, | |
| "grad_norm": 0.31536759569321376, | |
| "learning_rate": 1.1100179600611491e-05, | |
| "loss": 0.1399, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 3.407088607594937, | |
| "grad_norm": 0.3385571975399883, | |
| "learning_rate": 1.1049386126140985e-05, | |
| "loss": 0.1647, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 3.411139240506329, | |
| "grad_norm": 0.35084062134264155, | |
| "learning_rate": 1.0998664738684128e-05, | |
| "loss": 0.1645, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 3.4151898734177215, | |
| "grad_norm": 0.32118200208703873, | |
| "learning_rate": 1.0948015846744e-05, | |
| "loss": 0.1447, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 3.419240506329114, | |
| "grad_norm": 0.3396076851798396, | |
| "learning_rate": 1.0897439858239832e-05, | |
| "loss": 0.1507, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 3.423291139240506, | |
| "grad_norm": 0.3437163897158861, | |
| "learning_rate": 1.0846937180503652e-05, | |
| "loss": 0.1512, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 3.4273417721518986, | |
| "grad_norm": 0.334879775170315, | |
| "learning_rate": 1.0796508220277117e-05, | |
| "loss": 0.1418, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 3.431392405063291, | |
| "grad_norm": 0.3434324463333136, | |
| "learning_rate": 1.0746153383708107e-05, | |
| "loss": 0.1733, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 3.4354430379746836, | |
| "grad_norm": 0.34752907368080965, | |
| "learning_rate": 1.0695873076347579e-05, | |
| "loss": 0.1565, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 3.439493670886076, | |
| "grad_norm": 0.33751208937620725, | |
| "learning_rate": 1.0645667703146205e-05, | |
| "loss": 0.1504, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 3.4435443037974682, | |
| "grad_norm": 0.3262943167641179, | |
| "learning_rate": 1.0595537668451161e-05, | |
| "loss": 0.1639, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 3.4475949367088607, | |
| "grad_norm": 0.33191939685435606, | |
| "learning_rate": 1.0545483376002854e-05, | |
| "loss": 0.1536, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 3.4516455696202533, | |
| "grad_norm": 0.33321545501274996, | |
| "learning_rate": 1.0495505228931676e-05, | |
| "loss": 0.1569, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 3.4556962025316453, | |
| "grad_norm": 0.32499327190906185, | |
| "learning_rate": 1.044560362975474e-05, | |
| "loss": 0.1534, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 3.459746835443038, | |
| "grad_norm": 0.33607073008638333, | |
| "learning_rate": 1.0395778980372695e-05, | |
| "loss": 0.1383, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 3.4637974683544304, | |
| "grad_norm": 0.33439427204269273, | |
| "learning_rate": 1.0346031682066381e-05, | |
| "loss": 0.1481, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 3.467848101265823, | |
| "grad_norm": 0.3292194991473772, | |
| "learning_rate": 1.0296362135493724e-05, | |
| "loss": 0.1481, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 3.4718987341772154, | |
| "grad_norm": 0.330039328460945, | |
| "learning_rate": 1.0246770740686422e-05, | |
| "loss": 0.1558, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 3.4759493670886075, | |
| "grad_norm": 0.3547037671563254, | |
| "learning_rate": 1.0197257897046743e-05, | |
| "loss": 0.1567, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "grad_norm": 0.3428076328044179, | |
| "learning_rate": 1.014782400334433e-05, | |
| "loss": 0.1614, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 3.4840506329113925, | |
| "grad_norm": 0.3339264421847442, | |
| "learning_rate": 1.009846945771296e-05, | |
| "loss": 0.1646, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 3.4881012658227846, | |
| "grad_norm": 0.325535128895608, | |
| "learning_rate": 1.0049194657647363e-05, | |
| "loss": 0.1508, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 3.492151898734177, | |
| "grad_norm": 0.3355441093607341, | |
| "learning_rate": 1.0000000000000006e-05, | |
| "loss": 0.1746, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 3.4962025316455696, | |
| "grad_norm": 0.3240077482768786, | |
| "learning_rate": 9.950885880977891e-06, | |
| "loss": 0.1561, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 3.500253164556962, | |
| "grad_norm": 0.32403119758287646, | |
| "learning_rate": 9.901852696139382e-06, | |
| "loss": 0.163, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 3.5043037974683546, | |
| "grad_norm": 0.3278880482428015, | |
| "learning_rate": 9.852900840391027e-06, | |
| "loss": 0.1634, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 3.5083544303797467, | |
| "grad_norm": 0.3516761266449727, | |
| "learning_rate": 9.804030707984313e-06, | |
| "loss": 0.146, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 3.512405063291139, | |
| "grad_norm": 0.33236098379618795, | |
| "learning_rate": 9.755242692512599e-06, | |
| "loss": 0.1469, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 3.5164556962025317, | |
| "grad_norm": 0.32010312342187974, | |
| "learning_rate": 9.70653718690782e-06, | |
| "loss": 0.165, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 3.520506329113924, | |
| "grad_norm": 0.3253472028441089, | |
| "learning_rate": 9.657914583437454e-06, | |
| "loss": 0.1528, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 3.5245569620253163, | |
| "grad_norm": 0.3318605262115, | |
| "learning_rate": 9.609375273701246e-06, | |
| "loss": 0.1478, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 3.528607594936709, | |
| "grad_norm": 0.3205790089521689, | |
| "learning_rate": 9.560919648628133e-06, | |
| "loss": 0.1522, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 3.5326582278481014, | |
| "grad_norm": 0.3211776311642773, | |
| "learning_rate": 9.512548098473047e-06, | |
| "loss": 0.139, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 3.536708860759494, | |
| "grad_norm": 0.3333626958028667, | |
| "learning_rate": 9.464261012813825e-06, | |
| "loss": 0.1674, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 3.540759493670886, | |
| "grad_norm": 0.3139921919849614, | |
| "learning_rate": 9.416058780547987e-06, | |
| "loss": 0.1437, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 3.5448101265822785, | |
| "grad_norm": 0.33375320784571105, | |
| "learning_rate": 9.367941789889714e-06, | |
| "loss": 0.1514, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 3.548860759493671, | |
| "grad_norm": 0.31954960123964576, | |
| "learning_rate": 9.319910428366607e-06, | |
| "loss": 0.1487, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 3.552911392405063, | |
| "grad_norm": 0.3168513083366419, | |
| "learning_rate": 9.271965082816667e-06, | |
| "loss": 0.1472, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 3.5569620253164556, | |
| "grad_norm": 0.3231969531272366, | |
| "learning_rate": 9.224106139385111e-06, | |
| "loss": 0.1645, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 3.561012658227848, | |
| "grad_norm": 0.3201601685187134, | |
| "learning_rate": 9.176333983521291e-06, | |
| "loss": 0.1516, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 3.5650632911392406, | |
| "grad_norm": 0.3107517839234179, | |
| "learning_rate": 9.12864899997558e-06, | |
| "loss": 0.1615, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 3.569113924050633, | |
| "grad_norm": 0.3240755904321847, | |
| "learning_rate": 9.08105157279628e-06, | |
| "loss": 0.1463, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 3.573164556962025, | |
| "grad_norm": 0.3295288960355797, | |
| "learning_rate": 9.03354208532653e-06, | |
| "loss": 0.1416, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 3.5772151898734177, | |
| "grad_norm": 0.31673586555832733, | |
| "learning_rate": 8.986120920201205e-06, | |
| "loss": 0.15, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 3.58126582278481, | |
| "grad_norm": 0.33069209784879433, | |
| "learning_rate": 8.938788459343852e-06, | |
| "loss": 0.163, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 3.5853164556962023, | |
| "grad_norm": 0.3219206775185867, | |
| "learning_rate": 8.8915450839636e-06, | |
| "loss": 0.161, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 3.589367088607595, | |
| "grad_norm": 0.3367685139414499, | |
| "learning_rate": 8.844391174552116e-06, | |
| "loss": 0.138, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 3.5934177215189873, | |
| "grad_norm": 0.3338478628572284, | |
| "learning_rate": 8.797327110880479e-06, | |
| "loss": 0.1559, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 3.59746835443038, | |
| "grad_norm": 0.3081018718195932, | |
| "learning_rate": 8.750353271996206e-06, | |
| "loss": 0.1606, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 3.6015189873417723, | |
| "grad_norm": 0.30363955042671853, | |
| "learning_rate": 8.703470036220132e-06, | |
| "loss": 0.1471, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 3.6055696202531644, | |
| "grad_norm": 0.32851319387053624, | |
| "learning_rate": 8.656677781143394e-06, | |
| "loss": 0.1394, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 3.609620253164557, | |
| "grad_norm": 0.3253973951059937, | |
| "learning_rate": 8.609976883624377e-06, | |
| "loss": 0.1539, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 3.6136708860759494, | |
| "grad_norm": 0.32816819757843785, | |
| "learning_rate": 8.563367719785698e-06, | |
| "loss": 0.144, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 3.6177215189873415, | |
| "grad_norm": 0.33142570255432513, | |
| "learning_rate": 8.516850665011138e-06, | |
| "loss": 0.1467, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 3.621772151898734, | |
| "grad_norm": 0.32069601682572074, | |
| "learning_rate": 8.47042609394269e-06, | |
| "loss": 0.1375, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 3.6258227848101265, | |
| "grad_norm": 0.3248769631677263, | |
| "learning_rate": 8.424094380477432e-06, | |
| "loss": 0.1624, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 3.629873417721519, | |
| "grad_norm": 0.3216932026652119, | |
| "learning_rate": 8.37785589776465e-06, | |
| "loss": 0.1452, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 3.6339240506329116, | |
| "grad_norm": 0.312089221923854, | |
| "learning_rate": 8.331711018202694e-06, | |
| "loss": 0.1555, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 3.6379746835443036, | |
| "grad_norm": 0.3277758855510551, | |
| "learning_rate": 8.285660113436104e-06, | |
| "loss": 0.1558, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 3.642025316455696, | |
| "grad_norm": 0.3204895099558436, | |
| "learning_rate": 8.239703554352527e-06, | |
| "loss": 0.1444, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 3.6460759493670887, | |
| "grad_norm": 0.32196846909904664, | |
| "learning_rate": 8.193841711079775e-06, | |
| "loss": 0.1437, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 3.6501265822784807, | |
| "grad_norm": 0.30685216053761266, | |
| "learning_rate": 8.148074952982828e-06, | |
| "loss": 0.1532, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 3.6541772151898733, | |
| "grad_norm": 0.32052275874136643, | |
| "learning_rate": 8.102403648660859e-06, | |
| "loss": 0.1532, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 3.6582278481012658, | |
| "grad_norm": 0.32966000638634874, | |
| "learning_rate": 8.056828165944282e-06, | |
| "loss": 0.1593, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 3.6622784810126583, | |
| "grad_norm": 0.31254563272090186, | |
| "learning_rate": 8.011348871891762e-06, | |
| "loss": 0.1563, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 3.666329113924051, | |
| "grad_norm": 0.338348294882547, | |
| "learning_rate": 7.965966132787287e-06, | |
| "loss": 0.1644, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 3.670379746835443, | |
| "grad_norm": 0.3253443105130793, | |
| "learning_rate": 7.920680314137189e-06, | |
| "loss": 0.1595, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 3.6744303797468354, | |
| "grad_norm": 0.3181426528494649, | |
| "learning_rate": 7.875491780667246e-06, | |
| "loss": 0.1533, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 3.678481012658228, | |
| "grad_norm": 0.31763838970381586, | |
| "learning_rate": 7.830400896319667e-06, | |
| "loss": 0.1561, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 3.68253164556962, | |
| "grad_norm": 0.32784107378183885, | |
| "learning_rate": 7.785408024250259e-06, | |
| "loss": 0.1433, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 3.6865822784810125, | |
| "grad_norm": 0.3204888814889805, | |
| "learning_rate": 7.74051352682542e-06, | |
| "loss": 0.1495, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 3.690632911392405, | |
| "grad_norm": 0.3301524658013571, | |
| "learning_rate": 7.695717765619257e-06, | |
| "loss": 0.1506, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 3.6946835443037975, | |
| "grad_norm": 0.31575519419103204, | |
| "learning_rate": 7.651021101410673e-06, | |
| "loss": 0.1537, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 3.69873417721519, | |
| "grad_norm": 0.32384239668693826, | |
| "learning_rate": 7.606423894180464e-06, | |
| "loss": 0.1537, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 3.702784810126582, | |
| "grad_norm": 0.32161019476628494, | |
| "learning_rate": 7.56192650310839e-06, | |
| "loss": 0.1542, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 3.7068354430379746, | |
| "grad_norm": 0.32459087598351566, | |
| "learning_rate": 7.517529286570349e-06, | |
| "loss": 0.1437, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 3.710886075949367, | |
| "grad_norm": 0.3238804086153072, | |
| "learning_rate": 7.473232602135387e-06, | |
| "loss": 0.1459, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 3.714936708860759, | |
| "grad_norm": 0.3203992819021694, | |
| "learning_rate": 7.429036806562935e-06, | |
| "loss": 0.1604, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 3.7189873417721517, | |
| "grad_norm": 0.3178472180850627, | |
| "learning_rate": 7.3849422557998455e-06, | |
| "loss": 0.1578, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 3.7230379746835442, | |
| "grad_norm": 0.3162742737926381, | |
| "learning_rate": 7.340949304977567e-06, | |
| "loss": 0.1662, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 3.7270886075949368, | |
| "grad_norm": 0.3144971514621422, | |
| "learning_rate": 7.297058308409282e-06, | |
| "loss": 0.1514, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 3.7311392405063293, | |
| "grad_norm": 0.32776354350884246, | |
| "learning_rate": 7.25326961958704e-06, | |
| "loss": 0.1591, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 3.7351898734177214, | |
| "grad_norm": 0.32047903777566944, | |
| "learning_rate": 7.209583591178921e-06, | |
| "loss": 0.1527, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 3.739240506329114, | |
| "grad_norm": 0.32638772307789227, | |
| "learning_rate": 7.1660005750261925e-06, | |
| "loss": 0.1493, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 3.7432911392405064, | |
| "grad_norm": 0.3275383714288657, | |
| "learning_rate": 7.1225209221404765e-06, | |
| "loss": 0.1798, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 3.747341772151899, | |
| "grad_norm": 0.3112463269111321, | |
| "learning_rate": 7.079144982700909e-06, | |
| "loss": 0.1616, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 3.7513924050632914, | |
| "grad_norm": 0.31714327343502824, | |
| "learning_rate": 7.0358731060513695e-06, | |
| "loss": 0.1665, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 3.7554430379746835, | |
| "grad_norm": 0.3141587620518263, | |
| "learning_rate": 6.99270564069757e-06, | |
| "loss": 0.1515, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 3.759493670886076, | |
| "grad_norm": 0.3128344332285383, | |
| "learning_rate": 6.949642934304375e-06, | |
| "loss": 0.1541, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 3.7635443037974685, | |
| "grad_norm": 0.3231478675072037, | |
| "learning_rate": 6.906685333692871e-06, | |
| "loss": 0.1541, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 3.7675949367088606, | |
| "grad_norm": 0.32020053655977526, | |
| "learning_rate": 6.86383318483769e-06, | |
| "loss": 0.1519, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 3.771645569620253, | |
| "grad_norm": 0.31593866499636036, | |
| "learning_rate": 6.821086832864139e-06, | |
| "loss": 0.1452, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 3.7756962025316456, | |
| "grad_norm": 0.3141193399401488, | |
| "learning_rate": 6.77844662204546e-06, | |
| "loss": 0.1423, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 3.779746835443038, | |
| "grad_norm": 0.3181111353998613, | |
| "learning_rate": 6.7359128958000455e-06, | |
| "loss": 0.1609, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 3.7837974683544306, | |
| "grad_norm": 0.3167461981948986, | |
| "learning_rate": 6.693485996688695e-06, | |
| "loss": 0.1588, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 3.7878481012658227, | |
| "grad_norm": 0.3175358983107682, | |
| "learning_rate": 6.651166266411801e-06, | |
| "loss": 0.1384, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 3.7918987341772152, | |
| "grad_norm": 0.31372041253256017, | |
| "learning_rate": 6.6089540458066725e-06, | |
| "loss": 0.139, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 3.7959493670886078, | |
| "grad_norm": 0.31848945027985176, | |
| "learning_rate": 6.566849674844711e-06, | |
| "loss": 0.1376, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "grad_norm": 0.31283354485917075, | |
| "learning_rate": 6.524853492628747e-06, | |
| "loss": 0.1465, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 3.8040506329113923, | |
| "grad_norm": 0.3284666732895116, | |
| "learning_rate": 6.4829658373902536e-06, | |
| "loss": 0.1391, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 3.808101265822785, | |
| "grad_norm": 0.321425371066553, | |
| "learning_rate": 6.441187046486648e-06, | |
| "loss": 0.1644, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 3.8121518987341774, | |
| "grad_norm": 0.3304505917292473, | |
| "learning_rate": 6.399517456398567e-06, | |
| "loss": 0.1572, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 3.81620253164557, | |
| "grad_norm": 0.31069336593544844, | |
| "learning_rate": 6.357957402727164e-06, | |
| "loss": 0.1404, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 3.820253164556962, | |
| "grad_norm": 0.31267142102348927, | |
| "learning_rate": 6.316507220191395e-06, | |
| "loss": 0.1525, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 3.8243037974683545, | |
| "grad_norm": 0.32689286569992754, | |
| "learning_rate": 6.275167242625331e-06, | |
| "loss": 0.1556, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 3.828354430379747, | |
| "grad_norm": 0.32070661270642914, | |
| "learning_rate": 6.233937802975471e-06, | |
| "loss": 0.145, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 3.832405063291139, | |
| "grad_norm": 0.31703398092540014, | |
| "learning_rate": 6.192819233298046e-06, | |
| "loss": 0.1608, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 3.8364556962025316, | |
| "grad_norm": 0.31260159488328415, | |
| "learning_rate": 6.151811864756383e-06, | |
| "loss": 0.1466, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 3.840506329113924, | |
| "grad_norm": 0.31539490471805615, | |
| "learning_rate": 6.1109160276181655e-06, | |
| "loss": 0.1547, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 3.8445569620253166, | |
| "grad_norm": 0.31866834781677983, | |
| "learning_rate": 6.070132051252868e-06, | |
| "loss": 0.1813, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 3.848607594936709, | |
| "grad_norm": 0.3044800991395673, | |
| "learning_rate": 6.0294602641290034e-06, | |
| "loss": 0.1463, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 3.852658227848101, | |
| "grad_norm": 0.3184302692361387, | |
| "learning_rate": 5.988900993811575e-06, | |
| "loss": 0.1592, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 3.8567088607594937, | |
| "grad_norm": 0.31928593291005253, | |
| "learning_rate": 5.948454566959363e-06, | |
| "loss": 0.1632, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 3.8607594936708862, | |
| "grad_norm": 0.3213322427858275, | |
| "learning_rate": 5.908121309322328e-06, | |
| "loss": 0.1592, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 3.8648101265822783, | |
| "grad_norm": 0.3176953671619879, | |
| "learning_rate": 5.867901545738976e-06, | |
| "loss": 0.1492, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 3.868860759493671, | |
| "grad_norm": 0.31488920246171764, | |
| "learning_rate": 5.827795600133774e-06, | |
| "loss": 0.1446, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 3.8729113924050633, | |
| "grad_norm": 0.31424209352110233, | |
| "learning_rate": 5.787803795514466e-06, | |
| "loss": 0.1505, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 3.876962025316456, | |
| "grad_norm": 0.33108918791350317, | |
| "learning_rate": 5.747926453969576e-06, | |
| "loss": 0.1359, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 3.8810126582278484, | |
| "grad_norm": 0.31382631553110146, | |
| "learning_rate": 5.708163896665708e-06, | |
| "loss": 0.1392, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 3.8850632911392404, | |
| "grad_norm": 0.3172499210075426, | |
| "learning_rate": 5.668516443845047e-06, | |
| "loss": 0.1282, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 3.889113924050633, | |
| "grad_norm": 0.4368791980179305, | |
| "learning_rate": 5.6289844148227225e-06, | |
| "loss": 0.1714, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 3.8931645569620255, | |
| "grad_norm": 0.33007870936232886, | |
| "learning_rate": 5.5895681279842615e-06, | |
| "loss": 0.153, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 3.8972151898734175, | |
| "grad_norm": 0.31298268643301963, | |
| "learning_rate": 5.550267900783019e-06, | |
| "loss": 0.1677, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 3.90126582278481, | |
| "grad_norm": 0.31329103759132, | |
| "learning_rate": 5.511084049737623e-06, | |
| "loss": 0.1407, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 3.9053164556962026, | |
| "grad_norm": 0.3192456742993008, | |
| "learning_rate": 5.4720168904294215e-06, | |
| "loss": 0.1369, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 3.909367088607595, | |
| "grad_norm": 0.30567346404327145, | |
| "learning_rate": 5.433066737499948e-06, | |
| "loss": 0.1637, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 3.9134177215189876, | |
| "grad_norm": 0.31701501065130744, | |
| "learning_rate": 5.394233904648376e-06, | |
| "loss": 0.1435, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 3.9174683544303797, | |
| "grad_norm": 0.3229064150851624, | |
| "learning_rate": 5.355518704628997e-06, | |
| "loss": 0.1681, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 3.921518987341772, | |
| "grad_norm": 0.3168013589789412, | |
| "learning_rate": 5.316921449248731e-06, | |
| "loss": 0.1466, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 3.9255696202531647, | |
| "grad_norm": 0.31613237368175484, | |
| "learning_rate": 5.278442449364538e-06, | |
| "loss": 0.1642, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 3.9296202531645568, | |
| "grad_norm": 0.3100148205301194, | |
| "learning_rate": 5.240082014881016e-06, | |
| "loss": 0.1581, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 3.9336708860759493, | |
| "grad_norm": 0.3230934568058146, | |
| "learning_rate": 5.201840454747822e-06, | |
| "loss": 0.1563, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 3.937721518987342, | |
| "grad_norm": 0.31467976260399083, | |
| "learning_rate": 5.163718076957223e-06, | |
| "loss": 0.1588, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 3.9417721518987343, | |
| "grad_norm": 0.31520797468882866, | |
| "learning_rate": 5.125715188541609e-06, | |
| "loss": 0.1536, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 3.945822784810127, | |
| "grad_norm": 0.3119702134055112, | |
| "learning_rate": 5.087832095571021e-06, | |
| "loss": 0.1369, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 3.949873417721519, | |
| "grad_norm": 0.3124377485837646, | |
| "learning_rate": 5.0500691031506766e-06, | |
| "loss": 0.163, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 3.9539240506329114, | |
| "grad_norm": 0.3155885332944191, | |
| "learning_rate": 5.01242651541854e-06, | |
| "loss": 0.1619, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 3.957974683544304, | |
| "grad_norm": 0.30883246170672224, | |
| "learning_rate": 4.974904635542815e-06, | |
| "loss": 0.144, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 3.962025316455696, | |
| "grad_norm": 0.3146703999924596, | |
| "learning_rate": 4.937503765719582e-06, | |
| "loss": 0.1422, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 3.9660759493670885, | |
| "grad_norm": 0.33193658340558335, | |
| "learning_rate": 4.900224207170299e-06, | |
| "loss": 0.1577, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 3.970126582278481, | |
| "grad_norm": 0.31992902281256563, | |
| "learning_rate": 4.8630662601394065e-06, | |
| "loss": 0.1553, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 3.9741772151898735, | |
| "grad_norm": 0.31208880613017953, | |
| "learning_rate": 4.8260302238918995e-06, | |
| "loss": 0.1635, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 3.978227848101266, | |
| "grad_norm": 0.3064798583350837, | |
| "learning_rate": 4.789116396710924e-06, | |
| "loss": 0.1558, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 3.982278481012658, | |
| "grad_norm": 0.3112614227984009, | |
| "learning_rate": 4.752325075895368e-06, | |
| "loss": 0.1439, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 3.9863291139240506, | |
| "grad_norm": 0.3096568728715481, | |
| "learning_rate": 4.715656557757473e-06, | |
| "loss": 0.1423, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 3.990379746835443, | |
| "grad_norm": 0.32387575104652144, | |
| "learning_rate": 4.679111137620442e-06, | |
| "loss": 0.1534, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 3.9944303797468352, | |
| "grad_norm": 0.3127665221321124, | |
| "learning_rate": 4.6426891098160585e-06, | |
| "loss": 0.1412, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 3.9984810126582278, | |
| "grad_norm": 0.3202222984314029, | |
| "learning_rate": 4.6063907676823474e-06, | |
| "loss": 0.1508, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 4.004050632911392, | |
| "grad_norm": 0.3926121997639457, | |
| "learning_rate": 4.570216403561141e-06, | |
| "loss": 0.0976, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 4.008101265822785, | |
| "grad_norm": 0.37909297675979675, | |
| "learning_rate": 4.534166308795815e-06, | |
| "loss": 0.0912, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 4.012151898734177, | |
| "grad_norm": 0.3182557104140122, | |
| "learning_rate": 4.498240773728859e-06, | |
| "loss": 0.0981, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 4.01620253164557, | |
| "grad_norm": 0.26486359485542693, | |
| "learning_rate": 4.462440087699609e-06, | |
| "loss": 0.0852, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 4.020253164556962, | |
| "grad_norm": 0.23556833426366727, | |
| "learning_rate": 4.426764539041861e-06, | |
| "loss": 0.0965, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 4.024303797468354, | |
| "grad_norm": 0.28710320472581974, | |
| "learning_rate": 4.391214415081582e-06, | |
| "loss": 0.1004, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 4.028354430379747, | |
| "grad_norm": 0.3236154303718752, | |
| "learning_rate": 4.355790002134579e-06, | |
| "loss": 0.0906, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 4.032405063291139, | |
| "grad_norm": 1.4139435948670624, | |
| "learning_rate": 4.320491585504207e-06, | |
| "loss": 0.0775, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 4.036455696202531, | |
| "grad_norm": 0.3771864718142506, | |
| "learning_rate": 4.2853194494790615e-06, | |
| "loss": 0.0898, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 4.040506329113924, | |
| "grad_norm": 0.3534639766508332, | |
| "learning_rate": 4.250273877330691e-06, | |
| "loss": 0.0957, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 4.044556962025316, | |
| "grad_norm": 0.3123023407261607, | |
| "learning_rate": 4.215355151311313e-06, | |
| "loss": 0.0891, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 4.048607594936709, | |
| "grad_norm": 0.28691265632673113, | |
| "learning_rate": 4.180563552651542e-06, | |
| "loss": 0.1022, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 4.052658227848101, | |
| "grad_norm": 0.2580827683848673, | |
| "learning_rate": 4.145899361558147e-06, | |
| "loss": 0.0907, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 4.056708860759493, | |
| "grad_norm": 0.27862534676773915, | |
| "learning_rate": 4.111362857211738e-06, | |
| "loss": 0.0945, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 4.060759493670886, | |
| "grad_norm": 0.3149722944418918, | |
| "learning_rate": 4.076954317764592e-06, | |
| "loss": 0.1014, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 4.0648101265822785, | |
| "grad_norm": 0.3049779436681834, | |
| "learning_rate": 4.042674020338335e-06, | |
| "loss": 0.0949, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 4.0688607594936705, | |
| "grad_norm": 0.28827132705363523, | |
| "learning_rate": 4.0085222410217835e-06, | |
| "loss": 0.0898, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 4.0729113924050635, | |
| "grad_norm": 0.2690793326775318, | |
| "learning_rate": 3.974499254868674e-06, | |
| "loss": 0.0981, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 4.076962025316456, | |
| "grad_norm": 0.23827820688182857, | |
| "learning_rate": 3.940605335895451e-06, | |
| "loss": 0.0872, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 4.0810126582278485, | |
| "grad_norm": 0.2506531687328481, | |
| "learning_rate": 3.90684075707908e-06, | |
| "loss": 0.1004, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 4.085063291139241, | |
| "grad_norm": 0.2705327199048512, | |
| "learning_rate": 3.8732057903548505e-06, | |
| "loss": 0.1011, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 4.089113924050633, | |
| "grad_norm": 0.27655059132281024, | |
| "learning_rate": 3.8397007066141375e-06, | |
| "loss": 0.0936, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 4.093164556962026, | |
| "grad_norm": 0.26333406888431193, | |
| "learning_rate": 3.806325775702304e-06, | |
| "loss": 0.0804, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 4.097215189873418, | |
| "grad_norm": 0.26818861090663104, | |
| "learning_rate": 3.773081266416434e-06, | |
| "loss": 0.0918, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 4.10126582278481, | |
| "grad_norm": 0.2706837852779368, | |
| "learning_rate": 3.739967446503245e-06, | |
| "loss": 0.0947, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 4.105316455696203, | |
| "grad_norm": 0.2545855188261536, | |
| "learning_rate": 3.706984582656894e-06, | |
| "loss": 0.0953, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 4.109367088607595, | |
| "grad_norm": 0.2508179323640254, | |
| "learning_rate": 3.6741329405168237e-06, | |
| "loss": 0.0875, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 4.113417721518988, | |
| "grad_norm": 0.2449526304921578, | |
| "learning_rate": 3.641412784665648e-06, | |
| "loss": 0.0982, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 4.11746835443038, | |
| "grad_norm": 0.233999844659075, | |
| "learning_rate": 3.608824378627005e-06, | |
| "loss": 0.0914, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 4.121518987341772, | |
| "grad_norm": 0.2623536952193745, | |
| "learning_rate": 3.5763679848634337e-06, | |
| "loss": 0.1045, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 4.125569620253165, | |
| "grad_norm": 0.2528562417230574, | |
| "learning_rate": 3.544043864774269e-06, | |
| "loss": 0.0982, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 4.129620253164557, | |
| "grad_norm": 0.2490462964483932, | |
| "learning_rate": 3.5118522786935282e-06, | |
| "loss": 0.0981, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 4.133670886075949, | |
| "grad_norm": 0.23875068889368903, | |
| "learning_rate": 3.479793485887819e-06, | |
| "loss": 0.0957, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 4.137721518987342, | |
| "grad_norm": 0.24055936913684228, | |
| "learning_rate": 3.4478677445542653e-06, | |
| "loss": 0.0933, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 4.141772151898734, | |
| "grad_norm": 0.23815149948042125, | |
| "learning_rate": 3.4160753118183767e-06, | |
| "loss": 0.0774, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 4.145822784810127, | |
| "grad_norm": 0.24524698487562038, | |
| "learning_rate": 3.3844164437320527e-06, | |
| "loss": 0.0909, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 4.149873417721519, | |
| "grad_norm": 0.2444982655358449, | |
| "learning_rate": 3.3528913952714558e-06, | |
| "loss": 0.0819, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 4.153924050632911, | |
| "grad_norm": 0.25500753622090344, | |
| "learning_rate": 3.321500420335e-06, | |
| "loss": 0.1046, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 4.157974683544304, | |
| "grad_norm": 0.2618212143328462, | |
| "learning_rate": 3.290243771741275e-06, | |
| "loss": 0.0903, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 4.162025316455696, | |
| "grad_norm": 0.2458037396856661, | |
| "learning_rate": 3.2591217012270325e-06, | |
| "loss": 0.093, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 4.166075949367088, | |
| "grad_norm": 0.24045577570669296, | |
| "learning_rate": 3.228134459445149e-06, | |
| "loss": 0.0916, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 4.170126582278481, | |
| "grad_norm": 0.24630914107101254, | |
| "learning_rate": 3.1972822959626205e-06, | |
| "loss": 0.0962, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 4.174177215189873, | |
| "grad_norm": 0.23154935904439297, | |
| "learning_rate": 3.166565459258513e-06, | |
| "loss": 0.0873, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 4.178227848101266, | |
| "grad_norm": 0.23572274703578164, | |
| "learning_rate": 3.1359841967220193e-06, | |
| "loss": 0.0872, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 4.182278481012658, | |
| "grad_norm": 0.23645936955050662, | |
| "learning_rate": 3.105538754650419e-06, | |
| "loss": 0.0924, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 4.18632911392405, | |
| "grad_norm": 0.24495488304948712, | |
| "learning_rate": 3.07522937824712e-06, | |
| "loss": 0.1053, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 4.190379746835443, | |
| "grad_norm": 0.24397026424860574, | |
| "learning_rate": 3.0450563116196697e-06, | |
| "loss": 0.0916, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 4.194430379746835, | |
| "grad_norm": 0.24618389894754203, | |
| "learning_rate": 3.0150197977778008e-06, | |
| "loss": 0.0923, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 4.1984810126582275, | |
| "grad_norm": 0.24518674467791307, | |
| "learning_rate": 2.985120078631465e-06, | |
| "loss": 0.1032, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 4.2025316455696204, | |
| "grad_norm": 0.2489009641252913, | |
| "learning_rate": 2.9553573949888893e-06, | |
| "loss": 0.1125, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 4.2065822784810125, | |
| "grad_norm": 0.2402523279331498, | |
| "learning_rate": 2.9257319865546384e-06, | |
| "loss": 0.0825, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 4.2106329113924055, | |
| "grad_norm": 0.23448870037701422, | |
| "learning_rate": 2.896244091927678e-06, | |
| "loss": 0.0896, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 4.2146835443037975, | |
| "grad_norm": 0.2509842665568552, | |
| "learning_rate": 2.8668939485994584e-06, | |
| "loss": 0.0999, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 4.21873417721519, | |
| "grad_norm": 0.24583205543106165, | |
| "learning_rate": 2.837681792951994e-06, | |
| "loss": 0.0905, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 4.222784810126583, | |
| "grad_norm": 0.2503118877931947, | |
| "learning_rate": 2.808607860255981e-06, | |
| "loss": 0.1109, | |
| "step": 1041 | |
| }, | |
| { | |
| "epoch": 4.226835443037975, | |
| "grad_norm": 0.2523486374166944, | |
| "learning_rate": 2.7796723846688634e-06, | |
| "loss": 0.0834, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 4.230886075949367, | |
| "grad_norm": 0.25839494333535545, | |
| "learning_rate": 2.7508755992329937e-06, | |
| "loss": 0.0982, | |
| "step": 1043 | |
| }, | |
| { | |
| "epoch": 4.23493670886076, | |
| "grad_norm": 0.2460562028350464, | |
| "learning_rate": 2.722217735873718e-06, | |
| "loss": 0.1054, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 4.238987341772152, | |
| "grad_norm": 0.2390205179368803, | |
| "learning_rate": 2.6936990253975315e-06, | |
| "loss": 0.0875, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 4.243037974683545, | |
| "grad_norm": 0.24053982232460516, | |
| "learning_rate": 2.665319697490205e-06, | |
| "loss": 0.0962, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 4.247088607594937, | |
| "grad_norm": 0.24951029091680613, | |
| "learning_rate": 2.637079980714945e-06, | |
| "loss": 0.0986, | |
| "step": 1047 | |
| }, | |
| { | |
| "epoch": 4.251139240506329, | |
| "grad_norm": 0.23863360939151623, | |
| "learning_rate": 2.6089801025105453e-06, | |
| "loss": 0.0872, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 4.255189873417722, | |
| "grad_norm": 0.24270919633471966, | |
| "learning_rate": 2.581020289189571e-06, | |
| "loss": 0.0841, | |
| "step": 1049 | |
| }, | |
| { | |
| "epoch": 4.259240506329114, | |
| "grad_norm": 0.24498674633607812, | |
| "learning_rate": 2.553200765936501e-06, | |
| "loss": 0.0911, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 4.263291139240506, | |
| "grad_norm": 0.23676134056713014, | |
| "learning_rate": 2.525521756805962e-06, | |
| "loss": 0.087, | |
| "step": 1051 | |
| }, | |
| { | |
| "epoch": 4.267341772151899, | |
| "grad_norm": 0.2350906300248434, | |
| "learning_rate": 2.497983484720885e-06, | |
| "loss": 0.0885, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 4.271392405063291, | |
| "grad_norm": 0.24751333118027302, | |
| "learning_rate": 2.470586171470728e-06, | |
| "loss": 0.0976, | |
| "step": 1053 | |
| }, | |
| { | |
| "epoch": 4.275443037974684, | |
| "grad_norm": 0.24318917553805863, | |
| "learning_rate": 2.4433300377096836e-06, | |
| "loss": 0.0879, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 4.279493670886076, | |
| "grad_norm": 0.24269390414228154, | |
| "learning_rate": 2.4162153029549073e-06, | |
| "loss": 0.1025, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 4.283544303797468, | |
| "grad_norm": 0.24164860351532744, | |
| "learning_rate": 2.3892421855847458e-06, | |
| "loss": 0.1029, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 4.287594936708861, | |
| "grad_norm": 0.2453938374534618, | |
| "learning_rate": 2.362410902836978e-06, | |
| "loss": 0.0862, | |
| "step": 1057 | |
| }, | |
| { | |
| "epoch": 4.291645569620253, | |
| "grad_norm": 0.24735470046859806, | |
| "learning_rate": 2.3357216708070653e-06, | |
| "loss": 0.1043, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 4.295696202531645, | |
| "grad_norm": 0.24149144482682816, | |
| "learning_rate": 2.309174704446411e-06, | |
| "loss": 0.0929, | |
| "step": 1059 | |
| }, | |
| { | |
| "epoch": 4.299746835443038, | |
| "grad_norm": 0.234514885149423, | |
| "learning_rate": 2.2827702175606437e-06, | |
| "loss": 0.0936, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 4.30379746835443, | |
| "grad_norm": 0.2513573739515705, | |
| "learning_rate": 2.256508422807855e-06, | |
| "loss": 0.0805, | |
| "step": 1061 | |
| }, | |
| { | |
| "epoch": 4.307848101265823, | |
| "grad_norm": 0.2382438541434963, | |
| "learning_rate": 2.230389531696946e-06, | |
| "loss": 0.0827, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 4.311898734177215, | |
| "grad_norm": 0.23883629821632754, | |
| "learning_rate": 2.204413754585857e-06, | |
| "loss": 0.0998, | |
| "step": 1063 | |
| }, | |
| { | |
| "epoch": 4.315949367088607, | |
| "grad_norm": 0.23038164805155517, | |
| "learning_rate": 2.1785813006799406e-06, | |
| "loss": 0.077, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "grad_norm": 0.23994549150399913, | |
| "learning_rate": 2.1528923780302224e-06, | |
| "loss": 0.0906, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 4.324050632911392, | |
| "grad_norm": 0.23891438226193046, | |
| "learning_rate": 2.127347193531757e-06, | |
| "loss": 0.0918, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 4.328101265822784, | |
| "grad_norm": 0.24598341905040336, | |
| "learning_rate": 2.101945952921942e-06, | |
| "loss": 0.1004, | |
| "step": 1067 | |
| }, | |
| { | |
| "epoch": 4.332151898734177, | |
| "grad_norm": 0.23063758486326735, | |
| "learning_rate": 2.0766888607788906e-06, | |
| "loss": 0.0963, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 4.3362025316455695, | |
| "grad_norm": 0.2399081849762779, | |
| "learning_rate": 2.0515761205197337e-06, | |
| "loss": 0.0885, | |
| "step": 1069 | |
| }, | |
| { | |
| "epoch": 4.340253164556962, | |
| "grad_norm": 0.24753789052571615, | |
| "learning_rate": 2.0266079343990453e-06, | |
| "loss": 0.095, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 4.3443037974683545, | |
| "grad_norm": 0.22900902915673513, | |
| "learning_rate": 2.0017845035071494e-06, | |
| "loss": 0.0849, | |
| "step": 1071 | |
| }, | |
| { | |
| "epoch": 4.348354430379747, | |
| "grad_norm": 0.24036605736396544, | |
| "learning_rate": 1.9771060277685537e-06, | |
| "loss": 0.09, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 4.3524050632911395, | |
| "grad_norm": 0.2341128510875137, | |
| "learning_rate": 1.95257270594031e-06, | |
| "loss": 0.0894, | |
| "step": 1073 | |
| }, | |
| { | |
| "epoch": 4.356455696202532, | |
| "grad_norm": 0.23528518705635199, | |
| "learning_rate": 1.9281847356104188e-06, | |
| "loss": 0.0949, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 4.360506329113924, | |
| "grad_norm": 0.24923236956610167, | |
| "learning_rate": 1.9039423131962365e-06, | |
| "loss": 0.1047, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 4.364556962025317, | |
| "grad_norm": 0.2485414028168334, | |
| "learning_rate": 1.8798456339429027e-06, | |
| "loss": 0.1023, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 4.368607594936709, | |
| "grad_norm": 0.2479133624001341, | |
| "learning_rate": 1.8558948919217612e-06, | |
| "loss": 0.09, | |
| "step": 1077 | |
| }, | |
| { | |
| "epoch": 4.372658227848102, | |
| "grad_norm": 0.23718143536609718, | |
| "learning_rate": 1.8320902800287954e-06, | |
| "loss": 0.0845, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 4.376708860759494, | |
| "grad_norm": 0.2461060389302043, | |
| "learning_rate": 1.8084319899830726e-06, | |
| "loss": 0.0971, | |
| "step": 1079 | |
| }, | |
| { | |
| "epoch": 4.380759493670886, | |
| "grad_norm": 0.2449348416600689, | |
| "learning_rate": 1.7849202123252097e-06, | |
| "loss": 0.0875, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 4.384810126582279, | |
| "grad_norm": 0.23631571377662802, | |
| "learning_rate": 1.7615551364158401e-06, | |
| "loss": 0.1079, | |
| "step": 1081 | |
| }, | |
| { | |
| "epoch": 4.388860759493671, | |
| "grad_norm": 0.23988175887163707, | |
| "learning_rate": 1.738336950434061e-06, | |
| "loss": 0.1056, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 4.392911392405063, | |
| "grad_norm": 0.2526315445282431, | |
| "learning_rate": 1.715265841375957e-06, | |
| "loss": 0.1136, | |
| "step": 1083 | |
| }, | |
| { | |
| "epoch": 4.396962025316456, | |
| "grad_norm": 0.23746575943538656, | |
| "learning_rate": 1.6923419950530684e-06, | |
| "loss": 0.0807, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 4.401012658227848, | |
| "grad_norm": 0.243475986068808, | |
| "learning_rate": 1.6695655960909008e-06, | |
| "loss": 0.0756, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 4.405063291139241, | |
| "grad_norm": 0.23114415799129845, | |
| "learning_rate": 1.646936827927441e-06, | |
| "loss": 0.0757, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 4.409113924050633, | |
| "grad_norm": 0.24304268161618928, | |
| "learning_rate": 1.6244558728116766e-06, | |
| "loss": 0.0847, | |
| "step": 1087 | |
| }, | |
| { | |
| "epoch": 4.413164556962025, | |
| "grad_norm": 0.23661339812190532, | |
| "learning_rate": 1.6021229118021265e-06, | |
| "loss": 0.0935, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 4.417215189873418, | |
| "grad_norm": 0.24725777551951889, | |
| "learning_rate": 1.5799381247653967e-06, | |
| "loss": 0.1059, | |
| "step": 1089 | |
| }, | |
| { | |
| "epoch": 4.42126582278481, | |
| "grad_norm": 0.24249230223350732, | |
| "learning_rate": 1.5579016903747013e-06, | |
| "loss": 0.0779, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 4.425316455696202, | |
| "grad_norm": 0.24340670288404195, | |
| "learning_rate": 1.5360137861084656e-06, | |
| "loss": 0.105, | |
| "step": 1091 | |
| }, | |
| { | |
| "epoch": 4.429367088607595, | |
| "grad_norm": 0.2360006458177422, | |
| "learning_rate": 1.5142745882488475e-06, | |
| "loss": 0.0977, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 4.433417721518987, | |
| "grad_norm": 0.24719289582547463, | |
| "learning_rate": 1.4926842718803691e-06, | |
| "loss": 0.0895, | |
| "step": 1093 | |
| }, | |
| { | |
| "epoch": 4.43746835443038, | |
| "grad_norm": 0.2509000814554369, | |
| "learning_rate": 1.4712430108884657e-06, | |
| "loss": 0.1097, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 4.441518987341772, | |
| "grad_norm": 0.2406876512567959, | |
| "learning_rate": 1.4499509779581078e-06, | |
| "loss": 0.0898, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 4.445569620253164, | |
| "grad_norm": 0.23060857309254582, | |
| "learning_rate": 1.4288083445723988e-06, | |
| "loss": 0.0982, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 4.449620253164557, | |
| "grad_norm": 0.24576025680043662, | |
| "learning_rate": 1.4078152810112045e-06, | |
| "loss": 0.1044, | |
| "step": 1097 | |
| }, | |
| { | |
| "epoch": 4.453670886075949, | |
| "grad_norm": 0.2429286451631526, | |
| "learning_rate": 1.3869719563497697e-06, | |
| "loss": 0.0983, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 4.457721518987341, | |
| "grad_norm": 0.24672809562448694, | |
| "learning_rate": 1.3662785384573663e-06, | |
| "loss": 0.0908, | |
| "step": 1099 | |
| }, | |
| { | |
| "epoch": 4.461772151898734, | |
| "grad_norm": 0.2306309081910957, | |
| "learning_rate": 1.3457351939959383e-06, | |
| "loss": 0.082, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 4.465822784810126, | |
| "grad_norm": 0.2381492433677817, | |
| "learning_rate": 1.3253420884187551e-06, | |
| "loss": 0.0963, | |
| "step": 1101 | |
| }, | |
| { | |
| "epoch": 4.469873417721519, | |
| "grad_norm": 0.23637571800609902, | |
| "learning_rate": 1.3050993859690953e-06, | |
| "loss": 0.096, | |
| "step": 1102 | |
| }, | |
| { | |
| "epoch": 4.473924050632911, | |
| "grad_norm": 0.23725033237894486, | |
| "learning_rate": 1.2850072496788869e-06, | |
| "loss": 0.0981, | |
| "step": 1103 | |
| }, | |
| { | |
| "epoch": 4.4779746835443035, | |
| "grad_norm": 0.24286902073925826, | |
| "learning_rate": 1.2650658413674434e-06, | |
| "loss": 0.1023, | |
| "step": 1104 | |
| }, | |
| { | |
| "epoch": 4.4820253164556965, | |
| "grad_norm": 0.23527353686764307, | |
| "learning_rate": 1.2452753216401226e-06, | |
| "loss": 0.096, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 4.4860759493670885, | |
| "grad_norm": 0.24974072210415374, | |
| "learning_rate": 1.2256358498870503e-06, | |
| "loss": 0.1048, | |
| "step": 1106 | |
| }, | |
| { | |
| "epoch": 4.490126582278481, | |
| "grad_norm": 0.24081716084979632, | |
| "learning_rate": 1.2061475842818337e-06, | |
| "loss": 0.0794, | |
| "step": 1107 | |
| }, | |
| { | |
| "epoch": 4.494177215189874, | |
| "grad_norm": 0.2389770005666609, | |
| "learning_rate": 1.1868106817802816e-06, | |
| "loss": 0.0911, | |
| "step": 1108 | |
| }, | |
| { | |
| "epoch": 4.498227848101266, | |
| "grad_norm": 0.24929851803454167, | |
| "learning_rate": 1.1676252981191482e-06, | |
| "loss": 0.0883, | |
| "step": 1109 | |
| }, | |
| { | |
| "epoch": 4.502278481012659, | |
| "grad_norm": 0.23709835351081548, | |
| "learning_rate": 1.1485915878148823e-06, | |
| "loss": 0.0907, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 4.506329113924051, | |
| "grad_norm": 0.2395260774510407, | |
| "learning_rate": 1.1297097041623584e-06, | |
| "loss": 0.1066, | |
| "step": 1111 | |
| }, | |
| { | |
| "epoch": 4.510379746835443, | |
| "grad_norm": 0.2344646426471566, | |
| "learning_rate": 1.1109797992336847e-06, | |
| "loss": 0.0884, | |
| "step": 1112 | |
| }, | |
| { | |
| "epoch": 4.514430379746836, | |
| "grad_norm": 0.2514847213181512, | |
| "learning_rate": 1.092402023876933e-06, | |
| "loss": 0.102, | |
| "step": 1113 | |
| }, | |
| { | |
| "epoch": 4.518481012658228, | |
| "grad_norm": 0.23576725743127733, | |
| "learning_rate": 1.0739765277149527e-06, | |
| "loss": 0.092, | |
| "step": 1114 | |
| }, | |
| { | |
| "epoch": 4.52253164556962, | |
| "grad_norm": 0.2457410348709192, | |
| "learning_rate": 1.0557034591441596e-06, | |
| "loss": 0.1073, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 4.526582278481013, | |
| "grad_norm": 0.24623484550436014, | |
| "learning_rate": 1.0375829653333324e-06, | |
| "loss": 0.086, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 4.530632911392405, | |
| "grad_norm": 0.2354860525868563, | |
| "learning_rate": 1.0196151922224385e-06, | |
| "loss": 0.0805, | |
| "step": 1117 | |
| }, | |
| { | |
| "epoch": 4.534683544303798, | |
| "grad_norm": 0.24363497131103673, | |
| "learning_rate": 1.0018002845214526e-06, | |
| "loss": 0.0939, | |
| "step": 1118 | |
| }, | |
| { | |
| "epoch": 4.53873417721519, | |
| "grad_norm": 0.2386347309996161, | |
| "learning_rate": 9.841383857091947e-07, | |
| "loss": 0.0909, | |
| "step": 1119 | |
| }, | |
| { | |
| "epoch": 4.542784810126582, | |
| "grad_norm": 0.23632878675900199, | |
| "learning_rate": 9.666296380321616e-07, | |
| "loss": 0.1029, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 4.546835443037975, | |
| "grad_norm": 0.24531153302153935, | |
| "learning_rate": 9.492741825034124e-07, | |
| "loss": 0.0876, | |
| "step": 1121 | |
| }, | |
| { | |
| "epoch": 4.550886075949367, | |
| "grad_norm": 0.2402508111195689, | |
| "learning_rate": 9.320721589013892e-07, | |
| "loss": 0.1007, | |
| "step": 1122 | |
| }, | |
| { | |
| "epoch": 4.55493670886076, | |
| "grad_norm": 0.23377146949886696, | |
| "learning_rate": 9.150237057688339e-07, | |
| "loss": 0.0862, | |
| "step": 1123 | |
| }, | |
| { | |
| "epoch": 4.558987341772152, | |
| "grad_norm": 0.24109016517397894, | |
| "learning_rate": 8.981289604116328e-07, | |
| "loss": 0.1034, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 4.563037974683544, | |
| "grad_norm": 0.24542835722160722, | |
| "learning_rate": 8.813880588977542e-07, | |
| "loss": 0.0936, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 4.567088607594937, | |
| "grad_norm": 0.2489790567655161, | |
| "learning_rate": 8.648011360561126e-07, | |
| "loss": 0.0809, | |
| "step": 1126 | |
| }, | |
| { | |
| "epoch": 4.571139240506329, | |
| "grad_norm": 0.2356683732580991, | |
| "learning_rate": 8.483683254755037e-07, | |
| "loss": 0.0983, | |
| "step": 1127 | |
| }, | |
| { | |
| "epoch": 4.575189873417721, | |
| "grad_norm": 0.2403048034498668, | |
| "learning_rate": 8.320897595035227e-07, | |
| "loss": 0.089, | |
| "step": 1128 | |
| }, | |
| { | |
| "epoch": 4.579240506329114, | |
| "grad_norm": 0.23808270065144865, | |
| "learning_rate": 8.159655692455093e-07, | |
| "loss": 0.097, | |
| "step": 1129 | |
| }, | |
| { | |
| "epoch": 4.583291139240506, | |
| "grad_norm": 0.25250159702819763, | |
| "learning_rate": 7.999958845634648e-07, | |
| "loss": 0.0865, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 4.587341772151898, | |
| "grad_norm": 0.24247673582560555, | |
| "learning_rate": 7.841808340750478e-07, | |
| "loss": 0.0812, | |
| "step": 1131 | |
| }, | |
| { | |
| "epoch": 4.591392405063291, | |
| "grad_norm": 0.23873280320709347, | |
| "learning_rate": 7.685205451524869e-07, | |
| "loss": 0.0966, | |
| "step": 1132 | |
| }, | |
| { | |
| "epoch": 4.595443037974683, | |
| "grad_norm": 0.2355711319735643, | |
| "learning_rate": 7.530151439216027e-07, | |
| "loss": 0.0834, | |
| "step": 1133 | |
| }, | |
| { | |
| "epoch": 4.599493670886076, | |
| "grad_norm": 0.23986467257914115, | |
| "learning_rate": 7.376647552607675e-07, | |
| "loss": 0.0926, | |
| "step": 1134 | |
| }, | |
| { | |
| "epoch": 4.603544303797468, | |
| "grad_norm": 0.23213116972910225, | |
| "learning_rate": 7.224695027998963e-07, | |
| "loss": 0.0777, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 4.6075949367088604, | |
| "grad_norm": 0.23516427807930454, | |
| "learning_rate": 7.07429508919466e-07, | |
| "loss": 0.0819, | |
| "step": 1136 | |
| }, | |
| { | |
| "epoch": 4.611645569620253, | |
| "grad_norm": 0.23088893586153994, | |
| "learning_rate": 6.925448947495206e-07, | |
| "loss": 0.087, | |
| "step": 1137 | |
| }, | |
| { | |
| "epoch": 4.6156962025316455, | |
| "grad_norm": 0.23717208713222387, | |
| "learning_rate": 6.778157801686936e-07, | |
| "loss": 0.0764, | |
| "step": 1138 | |
| }, | |
| { | |
| "epoch": 4.619746835443038, | |
| "grad_norm": 0.24629286276165896, | |
| "learning_rate": 6.632422838032515e-07, | |
| "loss": 0.0937, | |
| "step": 1139 | |
| }, | |
| { | |
| "epoch": 4.6237974683544305, | |
| "grad_norm": 0.23788116471896478, | |
| "learning_rate": 6.488245230261281e-07, | |
| "loss": 0.1047, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 4.627848101265823, | |
| "grad_norm": 0.2363084094475843, | |
| "learning_rate": 6.345626139559868e-07, | |
| "loss": 0.0915, | |
| "step": 1141 | |
| }, | |
| { | |
| "epoch": 4.6318987341772155, | |
| "grad_norm": 0.24533543316075768, | |
| "learning_rate": 6.204566714562866e-07, | |
| "loss": 0.1005, | |
| "step": 1142 | |
| }, | |
| { | |
| "epoch": 4.635949367088608, | |
| "grad_norm": 0.24963139307440327, | |
| "learning_rate": 6.06506809134344e-07, | |
| "loss": 0.1157, | |
| "step": 1143 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "grad_norm": 0.231915939184196, | |
| "learning_rate": 5.927131393404373e-07, | |
| "loss": 0.0977, | |
| "step": 1144 | |
| }, | |
| { | |
| "epoch": 4.644050632911393, | |
| "grad_norm": 0.23474494020459968, | |
| "learning_rate": 5.790757731668817e-07, | |
| "loss": 0.0812, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 4.648101265822785, | |
| "grad_norm": 0.23536417494713993, | |
| "learning_rate": 5.655948204471507e-07, | |
| "loss": 0.0981, | |
| "step": 1146 | |
| }, | |
| { | |
| "epoch": 4.652151898734177, | |
| "grad_norm": 0.23469862954370474, | |
| "learning_rate": 5.522703897549875e-07, | |
| "loss": 0.0897, | |
| "step": 1147 | |
| }, | |
| { | |
| "epoch": 4.65620253164557, | |
| "grad_norm": 0.2429676804619465, | |
| "learning_rate": 5.391025884035239e-07, | |
| "loss": 0.0985, | |
| "step": 1148 | |
| }, | |
| { | |
| "epoch": 4.660253164556962, | |
| "grad_norm": 0.2520980845714133, | |
| "learning_rate": 5.260915224444207e-07, | |
| "loss": 0.1005, | |
| "step": 1149 | |
| }, | |
| { | |
| "epoch": 4.664303797468355, | |
| "grad_norm": 0.24695910195994164, | |
| "learning_rate": 5.132372966670129e-07, | |
| "loss": 0.0868, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 4.668354430379747, | |
| "grad_norm": 0.24247486525302908, | |
| "learning_rate": 5.005400145974704e-07, | |
| "loss": 0.0946, | |
| "step": 1151 | |
| }, | |
| { | |
| "epoch": 4.672405063291139, | |
| "grad_norm": 0.24522169736681962, | |
| "learning_rate": 4.879997784979562e-07, | |
| "loss": 0.0998, | |
| "step": 1152 | |
| }, | |
| { | |
| "epoch": 4.676455696202532, | |
| "grad_norm": 0.24909905734278656, | |
| "learning_rate": 4.7561668936580984e-07, | |
| "loss": 0.0916, | |
| "step": 1153 | |
| }, | |
| { | |
| "epoch": 4.680506329113924, | |
| "grad_norm": 0.2283555856515814, | |
| "learning_rate": 4.6339084693272306e-07, | |
| "loss": 0.0936, | |
| "step": 1154 | |
| }, | |
| { | |
| "epoch": 4.684556962025317, | |
| "grad_norm": 0.2394617819503974, | |
| "learning_rate": 4.5132234966395847e-07, | |
| "loss": 0.1051, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 4.688607594936709, | |
| "grad_norm": 0.23958315640300226, | |
| "learning_rate": 4.3941129475752795e-07, | |
| "loss": 0.0881, | |
| "step": 1156 | |
| }, | |
| { | |
| "epoch": 4.692658227848101, | |
| "grad_norm": 0.23159134835906633, | |
| "learning_rate": 4.27657778143431e-07, | |
| "loss": 0.0942, | |
| "step": 1157 | |
| }, | |
| { | |
| "epoch": 4.696708860759494, | |
| "grad_norm": 0.233086408667481, | |
| "learning_rate": 4.1606189448287757e-07, | |
| "loss": 0.0903, | |
| "step": 1158 | |
| }, | |
| { | |
| "epoch": 4.700759493670886, | |
| "grad_norm": 0.23134844591274784, | |
| "learning_rate": 4.046237371675177e-07, | |
| "loss": 0.0969, | |
| "step": 1159 | |
| }, | |
| { | |
| "epoch": 4.704810126582278, | |
| "grad_norm": 0.23892345891711947, | |
| "learning_rate": 3.9334339831869963e-07, | |
| "loss": 0.0806, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 4.708860759493671, | |
| "grad_norm": 0.24425268882727924, | |
| "learning_rate": 3.8222096878671955e-07, | |
| "loss": 0.098, | |
| "step": 1161 | |
| }, | |
| { | |
| "epoch": 4.712911392405063, | |
| "grad_norm": 0.23516578280397074, | |
| "learning_rate": 3.7125653815009545e-07, | |
| "loss": 0.0944, | |
| "step": 1162 | |
| }, | |
| { | |
| "epoch": 4.716962025316455, | |
| "grad_norm": 0.2400701806810615, | |
| "learning_rate": 3.6045019471484974e-07, | |
| "loss": 0.0969, | |
| "step": 1163 | |
| }, | |
| { | |
| "epoch": 4.721012658227848, | |
| "grad_norm": 0.2490906017836151, | |
| "learning_rate": 3.498020255137813e-07, | |
| "loss": 0.1063, | |
| "step": 1164 | |
| }, | |
| { | |
| "epoch": 4.72506329113924, | |
| "grad_norm": 0.24624768621456938, | |
| "learning_rate": 3.393121163057811e-07, | |
| "loss": 0.0958, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 4.729113924050633, | |
| "grad_norm": 0.23730844346421232, | |
| "learning_rate": 3.289805515751399e-07, | |
| "loss": 0.0926, | |
| "step": 1166 | |
| }, | |
| { | |
| "epoch": 4.733164556962025, | |
| "grad_norm": 0.2447494296520216, | |
| "learning_rate": 3.188074145308573e-07, | |
| "loss": 0.096, | |
| "step": 1167 | |
| }, | |
| { | |
| "epoch": 4.737215189873417, | |
| "grad_norm": 0.22674748144847953, | |
| "learning_rate": 3.087927871059804e-07, | |
| "loss": 0.0851, | |
| "step": 1168 | |
| }, | |
| { | |
| "epoch": 4.74126582278481, | |
| "grad_norm": 0.23847833202571408, | |
| "learning_rate": 2.989367499569418e-07, | |
| "loss": 0.0945, | |
| "step": 1169 | |
| }, | |
| { | |
| "epoch": 4.745316455696202, | |
| "grad_norm": 0.23747025827648227, | |
| "learning_rate": 2.8923938246290917e-07, | |
| "loss": 0.0834, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 4.749367088607595, | |
| "grad_norm": 0.24639585342977083, | |
| "learning_rate": 2.7970076272514804e-07, | |
| "loss": 0.0865, | |
| "step": 1171 | |
| }, | |
| { | |
| "epoch": 4.7534177215189874, | |
| "grad_norm": 0.24586120247683355, | |
| "learning_rate": 2.703209675663887e-07, | |
| "loss": 0.0934, | |
| "step": 1172 | |
| }, | |
| { | |
| "epoch": 4.7574683544303795, | |
| "grad_norm": 0.23951108071149524, | |
| "learning_rate": 2.6110007253021374e-07, | |
| "loss": 0.0958, | |
| "step": 1173 | |
| }, | |
| { | |
| "epoch": 4.7615189873417725, | |
| "grad_norm": 0.2382858682720842, | |
| "learning_rate": 2.520381518804471e-07, | |
| "loss": 0.0868, | |
| "step": 1174 | |
| }, | |
| { | |
| "epoch": 4.7655696202531646, | |
| "grad_norm": 0.23857792812585896, | |
| "learning_rate": 2.4313527860054585e-07, | |
| "loss": 0.0862, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 4.769620253164557, | |
| "grad_norm": 0.2409937593775192, | |
| "learning_rate": 2.343915243930317e-07, | |
| "loss": 0.1017, | |
| "step": 1176 | |
| }, | |
| { | |
| "epoch": 4.77367088607595, | |
| "grad_norm": 0.24261859910330835, | |
| "learning_rate": 2.2580695967889367e-07, | |
| "loss": 0.1084, | |
| "step": 1177 | |
| }, | |
| { | |
| "epoch": 4.777721518987342, | |
| "grad_norm": 0.2394370486763488, | |
| "learning_rate": 2.1738165359704189e-07, | |
| "loss": 0.1088, | |
| "step": 1178 | |
| }, | |
| { | |
| "epoch": 4.781772151898734, | |
| "grad_norm": 0.24005227289891273, | |
| "learning_rate": 2.0911567400373257e-07, | |
| "loss": 0.0846, | |
| "step": 1179 | |
| }, | |
| { | |
| "epoch": 4.785822784810127, | |
| "grad_norm": 0.24647140410187718, | |
| "learning_rate": 2.0100908747202607e-07, | |
| "loss": 0.0929, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 4.789873417721519, | |
| "grad_norm": 0.2371708834097907, | |
| "learning_rate": 1.9306195929125638e-07, | |
| "loss": 0.0775, | |
| "step": 1181 | |
| }, | |
| { | |
| "epoch": 4.793924050632912, | |
| "grad_norm": 0.2282177884604912, | |
| "learning_rate": 1.8527435346650247e-07, | |
| "loss": 0.0844, | |
| "step": 1182 | |
| }, | |
| { | |
| "epoch": 4.797974683544304, | |
| "grad_norm": 0.23904138057202207, | |
| "learning_rate": 1.7764633271807108e-07, | |
| "loss": 0.0893, | |
| "step": 1183 | |
| }, | |
| { | |
| "epoch": 4.802025316455696, | |
| "grad_norm": 0.2310262058949595, | |
| "learning_rate": 1.7017795848099262e-07, | |
| "loss": 0.0819, | |
| "step": 1184 | |
| }, | |
| { | |
| "epoch": 4.806075949367089, | |
| "grad_norm": 0.24130721306501762, | |
| "learning_rate": 1.6286929090452596e-07, | |
| "loss": 0.0935, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 4.810126582278481, | |
| "grad_norm": 0.2373989628109118, | |
| "learning_rate": 1.557203888516745e-07, | |
| "loss": 0.098, | |
| "step": 1186 | |
| }, | |
| { | |
| "epoch": 4.814177215189874, | |
| "grad_norm": 0.2373539828823543, | |
| "learning_rate": 1.487313098987131e-07, | |
| "loss": 0.0968, | |
| "step": 1187 | |
| }, | |
| { | |
| "epoch": 4.818227848101266, | |
| "grad_norm": 0.235732914536266, | |
| "learning_rate": 1.4190211033472402e-07, | |
| "loss": 0.0864, | |
| "step": 1188 | |
| }, | |
| { | |
| "epoch": 4.822278481012658, | |
| "grad_norm": 0.24726603028069832, | |
| "learning_rate": 1.3523284516113955e-07, | |
| "loss": 0.1015, | |
| "step": 1189 | |
| }, | |
| { | |
| "epoch": 4.826329113924051, | |
| "grad_norm": 0.2387853842863171, | |
| "learning_rate": 1.2872356809130682e-07, | |
| "loss": 0.0963, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 4.830379746835443, | |
| "grad_norm": 0.24401095131115463, | |
| "learning_rate": 1.2237433155004807e-07, | |
| "loss": 0.084, | |
| "step": 1191 | |
| }, | |
| { | |
| "epoch": 4.834430379746835, | |
| "grad_norm": 0.24505271602909545, | |
| "learning_rate": 1.1618518667323886e-07, | |
| "loss": 0.1009, | |
| "step": 1192 | |
| }, | |
| { | |
| "epoch": 4.838481012658228, | |
| "grad_norm": 0.24268169491993416, | |
| "learning_rate": 1.1015618330740385e-07, | |
| "loss": 0.0919, | |
| "step": 1193 | |
| }, | |
| { | |
| "epoch": 4.84253164556962, | |
| "grad_norm": 0.2467564044148994, | |
| "learning_rate": 1.042873700093061e-07, | |
| "loss": 0.0978, | |
| "step": 1194 | |
| }, | |
| { | |
| "epoch": 4.846582278481012, | |
| "grad_norm": 0.2489190604347198, | |
| "learning_rate": 9.857879404556291e-08, | |
| "loss": 0.097, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 4.850632911392405, | |
| "grad_norm": 0.23492753904787372, | |
| "learning_rate": 9.303050139225722e-08, | |
| "loss": 0.1016, | |
| "step": 1196 | |
| }, | |
| { | |
| "epoch": 4.854683544303797, | |
| "grad_norm": 0.2432005253900947, | |
| "learning_rate": 8.76425367345779e-08, | |
| "loss": 0.0887, | |
| "step": 1197 | |
| }, | |
| { | |
| "epoch": 4.85873417721519, | |
| "grad_norm": 0.2317578197230689, | |
| "learning_rate": 8.241494346644897e-08, | |
| "loss": 0.0953, | |
| "step": 1198 | |
| }, | |
| { | |
| "epoch": 4.862784810126582, | |
| "grad_norm": 0.23455059114895876, | |
| "learning_rate": 7.734776369019204e-08, | |
| "loss": 0.0804, | |
| "step": 1199 | |
| }, | |
| { | |
| "epoch": 4.866835443037974, | |
| "grad_norm": 0.2390985135068817, | |
| "learning_rate": 7.244103821617332e-08, | |
| "loss": 0.0978, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 4.870886075949367, | |
| "grad_norm": 0.23919941828106348, | |
| "learning_rate": 6.769480656248606e-08, | |
| "loss": 0.0911, | |
| "step": 1201 | |
| }, | |
| { | |
| "epoch": 4.874936708860759, | |
| "grad_norm": 0.23391112062589797, | |
| "learning_rate": 6.310910695462635e-08, | |
| "loss": 0.1127, | |
| "step": 1202 | |
| }, | |
| { | |
| "epoch": 4.878987341772152, | |
| "grad_norm": 0.22919313979560194, | |
| "learning_rate": 5.8683976325191185e-08, | |
| "loss": 0.0856, | |
| "step": 1203 | |
| }, | |
| { | |
| "epoch": 4.883037974683544, | |
| "grad_norm": 0.24875766470740943, | |
| "learning_rate": 5.4419450313571984e-08, | |
| "loss": 0.1025, | |
| "step": 1204 | |
| }, | |
| { | |
| "epoch": 4.8870886075949365, | |
| "grad_norm": 0.22208355238151883, | |
| "learning_rate": 5.031556326567488e-08, | |
| "loss": 0.0714, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 4.891139240506329, | |
| "grad_norm": 0.22974034774144683, | |
| "learning_rate": 4.637234823364312e-08, | |
| "loss": 0.0841, | |
| "step": 1206 | |
| }, | |
| { | |
| "epoch": 4.8951898734177215, | |
| "grad_norm": 0.2329595437174183, | |
| "learning_rate": 4.258983697558838e-08, | |
| "loss": 0.0878, | |
| "step": 1207 | |
| }, | |
| { | |
| "epoch": 4.899240506329114, | |
| "grad_norm": 0.23694945983596744, | |
| "learning_rate": 3.896805995533548e-08, | |
| "loss": 0.0956, | |
| "step": 1208 | |
| }, | |
| { | |
| "epoch": 4.9032911392405065, | |
| "grad_norm": 0.24015048862765845, | |
| "learning_rate": 3.550704634218028e-08, | |
| "loss": 0.0971, | |
| "step": 1209 | |
| }, | |
| { | |
| "epoch": 4.907341772151899, | |
| "grad_norm": 0.2322074929859014, | |
| "learning_rate": 3.2206824010647676e-08, | |
| "loss": 0.097, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 4.911392405063291, | |
| "grad_norm": 0.24492300364321398, | |
| "learning_rate": 2.9067419540278476e-08, | |
| "loss": 0.0889, | |
| "step": 1211 | |
| }, | |
| { | |
| "epoch": 4.915443037974684, | |
| "grad_norm": 0.23804301418162865, | |
| "learning_rate": 2.6088858215400638e-08, | |
| "loss": 0.0921, | |
| "step": 1212 | |
| }, | |
| { | |
| "epoch": 4.919493670886076, | |
| "grad_norm": 0.24146883135570507, | |
| "learning_rate": 2.3271164024940564e-08, | |
| "loss": 0.0938, | |
| "step": 1213 | |
| }, | |
| { | |
| "epoch": 4.923544303797469, | |
| "grad_norm": 0.233449658511918, | |
| "learning_rate": 2.061435966221881e-08, | |
| "loss": 0.0997, | |
| "step": 1214 | |
| }, | |
| { | |
| "epoch": 4.927594936708861, | |
| "grad_norm": 0.23853227463956378, | |
| "learning_rate": 1.811846652477245e-08, | |
| "loss": 0.0953, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 4.931645569620253, | |
| "grad_norm": 0.24462936291638604, | |
| "learning_rate": 1.5783504714184106e-08, | |
| "loss": 0.095, | |
| "step": 1216 | |
| }, | |
| { | |
| "epoch": 4.935696202531646, | |
| "grad_norm": 0.2404871367602787, | |
| "learning_rate": 1.360949303591097e-08, | |
| "loss": 0.0879, | |
| "step": 1217 | |
| }, | |
| { | |
| "epoch": 4.939746835443038, | |
| "grad_norm": 0.2405192596087071, | |
| "learning_rate": 1.1596448999144916e-08, | |
| "loss": 0.0932, | |
| "step": 1218 | |
| }, | |
| { | |
| "epoch": 4.943797468354431, | |
| "grad_norm": 0.2396638305399567, | |
| "learning_rate": 9.744388816668172e-09, | |
| "loss": 0.0965, | |
| "step": 1219 | |
| }, | |
| { | |
| "epoch": 4.947848101265823, | |
| "grad_norm": 0.23894960468793555, | |
| "learning_rate": 8.05332740472009e-09, | |
| "loss": 0.0912, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 4.951898734177215, | |
| "grad_norm": 0.2302405955285471, | |
| "learning_rate": 6.523278382872811e-09, | |
| "loss": 0.0752, | |
| "step": 1221 | |
| }, | |
| { | |
| "epoch": 4.955949367088608, | |
| "grad_norm": 0.23676700452246272, | |
| "learning_rate": 5.15425407393133e-09, | |
| "loss": 0.0982, | |
| "step": 1222 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "grad_norm": 0.22699830015791672, | |
| "learning_rate": 3.94626550383137e-09, | |
| "loss": 0.0854, | |
| "step": 1223 | |
| }, | |
| { | |
| "epoch": 4.964050632911392, | |
| "grad_norm": 0.25054501175180294, | |
| "learning_rate": 2.899322401546112e-09, | |
| "loss": 0.0943, | |
| "step": 1224 | |
| }, | |
| { | |
| "epoch": 4.968101265822785, | |
| "grad_norm": 0.24335299488879078, | |
| "learning_rate": 2.013433199010706e-09, | |
| "loss": 0.0992, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 4.972151898734177, | |
| "grad_norm": 0.24124604726442955, | |
| "learning_rate": 1.2886050310556563e-09, | |
| "loss": 0.0821, | |
| "step": 1226 | |
| }, | |
| { | |
| "epoch": 4.976202531645569, | |
| "grad_norm": 0.24464883938017368, | |
| "learning_rate": 7.248437353468695e-10, | |
| "loss": 0.1069, | |
| "step": 1227 | |
| }, | |
| { | |
| "epoch": 4.980253164556962, | |
| "grad_norm": 0.2392521111889513, | |
| "learning_rate": 3.221538523412449e-10, | |
| "loss": 0.0896, | |
| "step": 1228 | |
| }, | |
| { | |
| "epoch": 4.984303797468354, | |
| "grad_norm": 0.23607901347230847, | |
| "learning_rate": 8.053862524670663e-11, | |
| "loss": 0.1039, | |
| "step": 1229 | |
| }, | |
| { | |
| "epoch": 4.988354430379747, | |
| "grad_norm": 0.24572560157276674, | |
| "learning_rate": 0.0, | |
| "loss": 0.0905, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 4.988354430379747, | |
| "step": 1230, | |
| "total_flos": 2.0151031491020718e+18, | |
| "train_loss": 0.3231248075399942, | |
| "train_runtime": 59615.8773, | |
| "train_samples_per_second": 2.65, | |
| "train_steps_per_second": 0.021 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 1230, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.0151031491020718e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |