| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.9872340425531916, | |
| "eval_steps": 500, | |
| "global_step": 234, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01276595744680851, | |
| "grad_norm": 7.015718466989074, | |
| "learning_rate": 4.1666666666666667e-07, | |
| "loss": 1.117, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.02553191489361702, | |
| "grad_norm": 6.94762834330754, | |
| "learning_rate": 8.333333333333333e-07, | |
| "loss": 1.1134, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.03829787234042553, | |
| "grad_norm": 6.801049953756127, | |
| "learning_rate": 1.25e-06, | |
| "loss": 1.1129, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.05106382978723404, | |
| "grad_norm": 6.772438165340198, | |
| "learning_rate": 1.6666666666666667e-06, | |
| "loss": 1.0998, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.06382978723404255, | |
| "grad_norm": 6.296447609181628, | |
| "learning_rate": 2.0833333333333334e-06, | |
| "loss": 1.079, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.07659574468085106, | |
| "grad_norm": 5.215909882705263, | |
| "learning_rate": 2.5e-06, | |
| "loss": 1.0326, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.08936170212765958, | |
| "grad_norm": 5.0240627402893425, | |
| "learning_rate": 2.916666666666667e-06, | |
| "loss": 1.0168, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.10212765957446808, | |
| "grad_norm": 3.125198769881761, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 1.0226, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.1148936170212766, | |
| "grad_norm": 2.8348625516600063, | |
| "learning_rate": 3.7500000000000005e-06, | |
| "loss": 0.9845, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.1276595744680851, | |
| "grad_norm": 2.5539753733700277, | |
| "learning_rate": 4.166666666666667e-06, | |
| "loss": 0.9905, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.14042553191489363, | |
| "grad_norm": 4.74032301961336, | |
| "learning_rate": 4.583333333333333e-06, | |
| "loss": 0.9639, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.15319148936170213, | |
| "grad_norm": 4.86790394934032, | |
| "learning_rate": 5e-06, | |
| "loss": 0.9165, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.16595744680851063, | |
| "grad_norm": 5.066912804753986, | |
| "learning_rate": 5.416666666666667e-06, | |
| "loss": 1.0023, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.17872340425531916, | |
| "grad_norm": 4.08224163704665, | |
| "learning_rate": 5.833333333333334e-06, | |
| "loss": 0.9128, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.19148936170212766, | |
| "grad_norm": 4.091250067697973, | |
| "learning_rate": 6.25e-06, | |
| "loss": 0.8716, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.20425531914893616, | |
| "grad_norm": 3.4732433394882944, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 0.8829, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.2170212765957447, | |
| "grad_norm": 2.5197296768780895, | |
| "learning_rate": 7.083333333333335e-06, | |
| "loss": 0.8787, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.2297872340425532, | |
| "grad_norm": 2.125408786161704, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.8183, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.2425531914893617, | |
| "grad_norm": 2.2748157897060506, | |
| "learning_rate": 7.916666666666667e-06, | |
| "loss": 0.8937, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.2553191489361702, | |
| "grad_norm": 2.333974324631764, | |
| "learning_rate": 8.333333333333334e-06, | |
| "loss": 0.8386, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.2680851063829787, | |
| "grad_norm": 1.8128182674638877, | |
| "learning_rate": 8.750000000000001e-06, | |
| "loss": 0.8552, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.28085106382978725, | |
| "grad_norm": 1.7125731325584599, | |
| "learning_rate": 9.166666666666666e-06, | |
| "loss": 0.8767, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.2936170212765957, | |
| "grad_norm": 1.683343575929496, | |
| "learning_rate": 9.583333333333335e-06, | |
| "loss": 0.8357, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.30638297872340425, | |
| "grad_norm": 1.373668760486191, | |
| "learning_rate": 1e-05, | |
| "loss": 0.7915, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.3191489361702128, | |
| "grad_norm": 1.0055571221242297, | |
| "learning_rate": 9.999440509051367e-06, | |
| "loss": 0.7785, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.33191489361702126, | |
| "grad_norm": 0.9027056985862393, | |
| "learning_rate": 9.997762161417517e-06, | |
| "loss": 0.8068, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.3446808510638298, | |
| "grad_norm": 1.2667053539496995, | |
| "learning_rate": 9.994965332706574e-06, | |
| "loss": 0.8507, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.3574468085106383, | |
| "grad_norm": 0.9917915369326918, | |
| "learning_rate": 9.991050648838676e-06, | |
| "loss": 0.8097, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.3702127659574468, | |
| "grad_norm": 0.8527855285775848, | |
| "learning_rate": 9.986018985905901e-06, | |
| "loss": 0.795, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.3829787234042553, | |
| "grad_norm": 0.838806489476804, | |
| "learning_rate": 9.979871469976197e-06, | |
| "loss": 0.7381, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.39574468085106385, | |
| "grad_norm": 1.0269042283041614, | |
| "learning_rate": 9.972609476841368e-06, | |
| "loss": 0.8167, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.4085106382978723, | |
| "grad_norm": 0.9178642201492891, | |
| "learning_rate": 9.964234631709188e-06, | |
| "loss": 0.7563, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.42127659574468085, | |
| "grad_norm": 0.7926111862163232, | |
| "learning_rate": 9.954748808839675e-06, | |
| "loss": 0.7787, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.4340425531914894, | |
| "grad_norm": 0.75573330787621, | |
| "learning_rate": 9.944154131125643e-06, | |
| "loss": 0.78, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.44680851063829785, | |
| "grad_norm": 0.815145181705661, | |
| "learning_rate": 9.932452969617607e-06, | |
| "loss": 0.7871, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.4595744680851064, | |
| "grad_norm": 0.9752539663188574, | |
| "learning_rate": 9.91964794299315e-06, | |
| "loss": 0.7976, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.4723404255319149, | |
| "grad_norm": 0.6959577877909565, | |
| "learning_rate": 9.905741916970863e-06, | |
| "loss": 0.7809, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.4851063829787234, | |
| "grad_norm": 0.8883024852628244, | |
| "learning_rate": 9.890738003669029e-06, | |
| "loss": 0.7876, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.4978723404255319, | |
| "grad_norm": 0.773663747644755, | |
| "learning_rate": 9.874639560909118e-06, | |
| "loss": 0.7954, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.5106382978723404, | |
| "grad_norm": 0.6398699968223347, | |
| "learning_rate": 9.857450191464337e-06, | |
| "loss": 0.766, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.5234042553191489, | |
| "grad_norm": 0.746186258074948, | |
| "learning_rate": 9.839173742253334e-06, | |
| "loss": 0.8023, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.5361702127659574, | |
| "grad_norm": 0.621705451607282, | |
| "learning_rate": 9.819814303479268e-06, | |
| "loss": 0.7787, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.548936170212766, | |
| "grad_norm": 0.6165255600989523, | |
| "learning_rate": 9.799376207714446e-06, | |
| "loss": 0.7676, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.5617021276595745, | |
| "grad_norm": 0.6336316514181005, | |
| "learning_rate": 9.777864028930705e-06, | |
| "loss": 0.7733, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.574468085106383, | |
| "grad_norm": 0.5891614649703093, | |
| "learning_rate": 9.755282581475769e-06, | |
| "loss": 0.7726, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.5872340425531914, | |
| "grad_norm": 0.5953555831423718, | |
| "learning_rate": 9.731636918995821e-06, | |
| "loss": 0.7886, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.6255442079564976, | |
| "learning_rate": 9.706932333304518e-06, | |
| "loss": 0.7776, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.6127659574468085, | |
| "grad_norm": 0.5944241623820739, | |
| "learning_rate": 9.681174353198687e-06, | |
| "loss": 0.8059, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.625531914893617, | |
| "grad_norm": 0.5568557096343427, | |
| "learning_rate": 9.654368743221022e-06, | |
| "loss": 0.7354, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.6382978723404256, | |
| "grad_norm": 0.6038601495314921, | |
| "learning_rate": 9.626521502369984e-06, | |
| "loss": 0.7841, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.6510638297872341, | |
| "grad_norm": 0.5270561177136355, | |
| "learning_rate": 9.597638862757255e-06, | |
| "loss": 0.7536, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.6638297872340425, | |
| "grad_norm": 0.5844453802384513, | |
| "learning_rate": 9.567727288213005e-06, | |
| "loss": 0.7836, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.676595744680851, | |
| "grad_norm": 0.6102351937764352, | |
| "learning_rate": 9.536793472839325e-06, | |
| "loss": 0.7027, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.6893617021276596, | |
| "grad_norm": 0.6035603241490802, | |
| "learning_rate": 9.504844339512096e-06, | |
| "loss": 0.7938, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.7021276595744681, | |
| "grad_norm": 0.6261987250094778, | |
| "learning_rate": 9.471887038331686e-06, | |
| "loss": 0.8058, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.7148936170212766, | |
| "grad_norm": 0.5496448961935777, | |
| "learning_rate": 9.437928945022772e-06, | |
| "loss": 0.7234, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.7276595744680852, | |
| "grad_norm": 0.6005800193481035, | |
| "learning_rate": 9.40297765928369e-06, | |
| "loss": 0.7323, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.7404255319148936, | |
| "grad_norm": 0.9150485257772174, | |
| "learning_rate": 9.36704100308565e-06, | |
| "loss": 0.7737, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.7531914893617021, | |
| "grad_norm": 0.5971646523259739, | |
| "learning_rate": 9.330127018922195e-06, | |
| "loss": 0.713, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.7659574468085106, | |
| "grad_norm": 0.6498404792169774, | |
| "learning_rate": 9.292243968009332e-06, | |
| "loss": 0.7276, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.7787234042553192, | |
| "grad_norm": 0.8683199966188363, | |
| "learning_rate": 9.253400328436699e-06, | |
| "loss": 0.7949, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.7914893617021277, | |
| "grad_norm": 0.6642758991694608, | |
| "learning_rate": 9.213604793270196e-06, | |
| "loss": 0.8019, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.8042553191489362, | |
| "grad_norm": 0.6410474363247081, | |
| "learning_rate": 9.172866268606514e-06, | |
| "loss": 0.7602, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.8170212765957446, | |
| "grad_norm": 0.5904684410337478, | |
| "learning_rate": 9.131193871579975e-06, | |
| "loss": 0.7189, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.8297872340425532, | |
| "grad_norm": 0.6709679491441708, | |
| "learning_rate": 9.088596928322158e-06, | |
| "loss": 0.8073, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.8425531914893617, | |
| "grad_norm": 0.556019821773472, | |
| "learning_rate": 9.045084971874738e-06, | |
| "loss": 0.7782, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.8553191489361702, | |
| "grad_norm": 0.5882607467493168, | |
| "learning_rate": 9.000667740056033e-06, | |
| "loss": 0.7552, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.8680851063829788, | |
| "grad_norm": 0.6128020803055951, | |
| "learning_rate": 8.955355173281709e-06, | |
| "loss": 0.7859, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.8808510638297873, | |
| "grad_norm": 0.5441564996823284, | |
| "learning_rate": 8.90915741234015e-06, | |
| "loss": 0.8118, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.8936170212765957, | |
| "grad_norm": 0.5071072554693752, | |
| "learning_rate": 8.862084796122998e-06, | |
| "loss": 0.7541, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.9063829787234042, | |
| "grad_norm": 0.526853686583371, | |
| "learning_rate": 8.814147859311333e-06, | |
| "loss": 0.7476, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.9191489361702128, | |
| "grad_norm": 0.565655168554739, | |
| "learning_rate": 8.765357330018056e-06, | |
| "loss": 0.7464, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.9319148936170213, | |
| "grad_norm": 0.5679672017867912, | |
| "learning_rate": 8.715724127386971e-06, | |
| "loss": 0.8133, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.9446808510638298, | |
| "grad_norm": 0.5492621036178154, | |
| "learning_rate": 8.665259359149132e-06, | |
| "loss": 0.8062, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.9574468085106383, | |
| "grad_norm": 0.5463633975363426, | |
| "learning_rate": 8.613974319136959e-06, | |
| "loss": 0.79, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.9702127659574468, | |
| "grad_norm": 0.5714899949507461, | |
| "learning_rate": 8.561880484756726e-06, | |
| "loss": 0.7951, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.9829787234042553, | |
| "grad_norm": 0.5754994749263996, | |
| "learning_rate": 8.508989514419959e-06, | |
| "loss": 0.7233, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.9957446808510638, | |
| "grad_norm": 0.5760269816494827, | |
| "learning_rate": 8.455313244934324e-06, | |
| "loss": 0.7733, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.0085106382978724, | |
| "grad_norm": 1.0945888585918737, | |
| "learning_rate": 8.400863688854598e-06, | |
| "loss": 1.1355, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.0212765957446808, | |
| "grad_norm": 0.5489153017138312, | |
| "learning_rate": 8.345653031794292e-06, | |
| "loss": 0.6646, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.0340425531914894, | |
| "grad_norm": 0.550042864839408, | |
| "learning_rate": 8.289693629698564e-06, | |
| "loss": 0.7469, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.0468085106382978, | |
| "grad_norm": 0.549804299148006, | |
| "learning_rate": 8.232998006078998e-06, | |
| "loss": 0.7351, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.0595744680851065, | |
| "grad_norm": 0.46781275812787854, | |
| "learning_rate": 8.175578849210894e-06, | |
| "loss": 0.6464, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.0723404255319149, | |
| "grad_norm": 0.6076923590979096, | |
| "learning_rate": 8.117449009293668e-06, | |
| "loss": 0.7995, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.0851063829787233, | |
| "grad_norm": 0.5641940949871667, | |
| "learning_rate": 8.058621495575032e-06, | |
| "loss": 0.7216, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.097872340425532, | |
| "grad_norm": 0.5993520107937353, | |
| "learning_rate": 7.99910947343957e-06, | |
| "loss": 0.7149, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.1106382978723404, | |
| "grad_norm": 0.567214363627709, | |
| "learning_rate": 7.938926261462366e-06, | |
| "loss": 0.658, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.123404255319149, | |
| "grad_norm": 0.5349806750291352, | |
| "learning_rate": 7.87808532842837e-06, | |
| "loss": 0.649, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.1361702127659574, | |
| "grad_norm": 0.703875833332789, | |
| "learning_rate": 7.81660029031811e-06, | |
| "loss": 0.8051, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.148936170212766, | |
| "grad_norm": 0.6291964370143157, | |
| "learning_rate": 7.754484907260513e-06, | |
| "loss": 0.698, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.1617021276595745, | |
| "grad_norm": 0.4666655295099628, | |
| "learning_rate": 7.691753080453413e-06, | |
| "loss": 0.6655, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.174468085106383, | |
| "grad_norm": 0.5368319006956255, | |
| "learning_rate": 7.628418849052523e-06, | |
| "loss": 0.7435, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.1872340425531915, | |
| "grad_norm": 0.5153563571772463, | |
| "learning_rate": 7.564496387029532e-06, | |
| "loss": 0.7504, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 0.5466125166103902, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.7341, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.2127659574468086, | |
| "grad_norm": 0.5224826643535506, | |
| "learning_rate": 7.434944122021837e-06, | |
| "loss": 0.7498, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.225531914893617, | |
| "grad_norm": 0.5708976206267316, | |
| "learning_rate": 7.369343312364994e-06, | |
| "loss": 0.708, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.2382978723404254, | |
| "grad_norm": 0.563262494604646, | |
| "learning_rate": 7.303212252253163e-06, | |
| "loss": 0.7551, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.251063829787234, | |
| "grad_norm": 0.5511363211697056, | |
| "learning_rate": 7.236565741578163e-06, | |
| "loss": 0.7711, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.2638297872340425, | |
| "grad_norm": 0.4874825499668437, | |
| "learning_rate": 7.169418695587791e-06, | |
| "loss": 0.6459, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.2765957446808511, | |
| "grad_norm": 0.465488823301378, | |
| "learning_rate": 7.101786141547829e-06, | |
| "loss": 0.6609, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.2893617021276595, | |
| "grad_norm": 0.5900785967913017, | |
| "learning_rate": 7.033683215379002e-06, | |
| "loss": 0.7567, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.302127659574468, | |
| "grad_norm": 0.5150565984498293, | |
| "learning_rate": 6.965125158269619e-06, | |
| "loss": 0.6895, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.3148936170212766, | |
| "grad_norm": 0.5415463201111466, | |
| "learning_rate": 6.896127313264643e-06, | |
| "loss": 0.6849, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.327659574468085, | |
| "grad_norm": 0.5828549026140929, | |
| "learning_rate": 6.8267051218319766e-06, | |
| "loss": 0.7744, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.3404255319148937, | |
| "grad_norm": 0.5705104446754513, | |
| "learning_rate": 6.7568741204067145e-06, | |
| "loss": 0.7876, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.353191489361702, | |
| "grad_norm": 0.5521243149798348, | |
| "learning_rate": 6.686649936914151e-06, | |
| "loss": 0.624, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.3659574468085105, | |
| "grad_norm": 0.6413422766450044, | |
| "learning_rate": 6.616048287272301e-06, | |
| "loss": 0.7829, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.3787234042553191, | |
| "grad_norm": 0.46827552326425964, | |
| "learning_rate": 6.545084971874738e-06, | |
| "loss": 0.6761, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.3914893617021278, | |
| "grad_norm": 0.6205358424015903, | |
| "learning_rate": 6.473775872054522e-06, | |
| "loss": 0.7192, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.4042553191489362, | |
| "grad_norm": 0.5005870891529645, | |
| "learning_rate": 6.402136946530014e-06, | |
| "loss": 0.7401, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.4170212765957446, | |
| "grad_norm": 0.5009298310214143, | |
| "learning_rate": 6.330184227833376e-06, | |
| "loss": 0.6096, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.4297872340425533, | |
| "grad_norm": 0.600500474185216, | |
| "learning_rate": 6.257933818722544e-06, | |
| "loss": 0.77, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.4425531914893617, | |
| "grad_norm": 0.4995998386976486, | |
| "learning_rate": 6.185401888577488e-06, | |
| "loss": 0.6739, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.4553191489361703, | |
| "grad_norm": 0.4798072520496255, | |
| "learning_rate": 6.112604669781572e-06, | |
| "loss": 0.6769, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.4680851063829787, | |
| "grad_norm": 0.5176238294523213, | |
| "learning_rate": 6.039558454088796e-06, | |
| "loss": 0.8088, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.4808510638297872, | |
| "grad_norm": 0.4911445924656217, | |
| "learning_rate": 5.9662795889777666e-06, | |
| "loss": 0.6255, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.4936170212765958, | |
| "grad_norm": 0.5344710529397226, | |
| "learning_rate": 5.892784473993184e-06, | |
| "loss": 0.8014, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.5063829787234042, | |
| "grad_norm": 0.525330620190939, | |
| "learning_rate": 5.819089557075689e-06, | |
| "loss": 0.6739, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.5191489361702128, | |
| "grad_norm": 0.5049522069915814, | |
| "learning_rate": 5.745211330880872e-06, | |
| "loss": 0.7689, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.5319148936170213, | |
| "grad_norm": 0.5368024598915486, | |
| "learning_rate": 5.671166329088278e-06, | |
| "loss": 0.6955, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.5446808510638297, | |
| "grad_norm": 0.5586168781796989, | |
| "learning_rate": 5.596971122701221e-06, | |
| "loss": 0.6957, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.5574468085106383, | |
| "grad_norm": 0.4948472638820811, | |
| "learning_rate": 5.522642316338268e-06, | |
| "loss": 0.6908, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.570212765957447, | |
| "grad_norm": 0.6059945133709344, | |
| "learning_rate": 5.448196544517168e-06, | |
| "loss": 0.8481, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.5829787234042554, | |
| "grad_norm": 0.4870810475990154, | |
| "learning_rate": 5.373650467932122e-06, | |
| "loss": 0.6862, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.5957446808510638, | |
| "grad_norm": 0.6318974529790354, | |
| "learning_rate": 5.299020769725172e-06, | |
| "loss": 0.6743, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.6085106382978722, | |
| "grad_norm": 0.665188557566122, | |
| "learning_rate": 5.224324151752575e-06, | |
| "loss": 0.7473, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.6212765957446809, | |
| "grad_norm": 0.4922894454426491, | |
| "learning_rate": 5.1495773308469935e-06, | |
| "loss": 0.6891, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.6340425531914895, | |
| "grad_norm": 0.5207672174829043, | |
| "learning_rate": 5.074797035076319e-06, | |
| "loss": 0.622, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.646808510638298, | |
| "grad_norm": 0.5266233826579009, | |
| "learning_rate": 5e-06, | |
| "loss": 0.7025, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.6595744680851063, | |
| "grad_norm": 0.5449941486655594, | |
| "learning_rate": 4.9252029649236835e-06, | |
| "loss": 0.6184, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.6723404255319148, | |
| "grad_norm": 0.6474022684476997, | |
| "learning_rate": 4.850422669153009e-06, | |
| "loss": 0.706, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.6851063829787234, | |
| "grad_norm": 0.49589919470436855, | |
| "learning_rate": 4.775675848247427e-06, | |
| "loss": 0.7047, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.697872340425532, | |
| "grad_norm": 0.5426968909626301, | |
| "learning_rate": 4.700979230274829e-06, | |
| "loss": 0.7253, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.7106382978723405, | |
| "grad_norm": 0.60876343138784, | |
| "learning_rate": 4.626349532067879e-06, | |
| "loss": 0.7898, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.7234042553191489, | |
| "grad_norm": 0.5027585714953703, | |
| "learning_rate": 4.551803455482833e-06, | |
| "loss": 0.691, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.7361702127659573, | |
| "grad_norm": 0.5526361540526097, | |
| "learning_rate": 4.477357683661734e-06, | |
| "loss": 0.7306, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.748936170212766, | |
| "grad_norm": 0.4564058130594461, | |
| "learning_rate": 4.4030288772987795e-06, | |
| "loss": 0.677, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.7617021276595746, | |
| "grad_norm": 0.4265652793878518, | |
| "learning_rate": 4.3288336709117246e-06, | |
| "loss": 0.6141, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.774468085106383, | |
| "grad_norm": 0.4451324891275968, | |
| "learning_rate": 4.254788669119127e-06, | |
| "loss": 0.6652, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.7872340425531914, | |
| "grad_norm": 0.531036312152885, | |
| "learning_rate": 4.180910442924312e-06, | |
| "loss": 0.6635, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "grad_norm": 0.522235798644409, | |
| "learning_rate": 4.107215526006818e-06, | |
| "loss": 0.7722, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.8127659574468085, | |
| "grad_norm": 0.446699355693039, | |
| "learning_rate": 4.033720411022235e-06, | |
| "loss": 0.6111, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.825531914893617, | |
| "grad_norm": 0.4882147681991008, | |
| "learning_rate": 3.960441545911205e-06, | |
| "loss": 0.7362, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.8382978723404255, | |
| "grad_norm": 0.5436602159483871, | |
| "learning_rate": 3.887395330218429e-06, | |
| "loss": 0.7662, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.851063829787234, | |
| "grad_norm": 0.4097709684721711, | |
| "learning_rate": 3.8145981114225135e-06, | |
| "loss": 0.627, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.8638297872340426, | |
| "grad_norm": 0.5774202757015185, | |
| "learning_rate": 3.7420661812774577e-06, | |
| "loss": 0.8162, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.8765957446808512, | |
| "grad_norm": 0.5449728825694614, | |
| "learning_rate": 3.669815772166625e-06, | |
| "loss": 0.7026, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.8893617021276596, | |
| "grad_norm": 0.4576714982855058, | |
| "learning_rate": 3.5978630534699873e-06, | |
| "loss": 0.7011, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.902127659574468, | |
| "grad_norm": 0.5195319976448147, | |
| "learning_rate": 3.526224127945479e-06, | |
| "loss": 0.7526, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.9148936170212765, | |
| "grad_norm": 0.4707171874496708, | |
| "learning_rate": 3.4549150281252635e-06, | |
| "loss": 0.6694, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.9276595744680851, | |
| "grad_norm": 0.456671543427713, | |
| "learning_rate": 3.383951712727701e-06, | |
| "loss": 0.6953, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.9404255319148938, | |
| "grad_norm": 0.43257527089711645, | |
| "learning_rate": 3.3133500630858507e-06, | |
| "loss": 0.7002, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.9531914893617022, | |
| "grad_norm": 0.48731268058311134, | |
| "learning_rate": 3.2431258795932863e-06, | |
| "loss": 0.6944, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.9659574468085106, | |
| "grad_norm": 0.4776960174537403, | |
| "learning_rate": 3.173294878168025e-06, | |
| "loss": 0.7994, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.978723404255319, | |
| "grad_norm": 0.45202915072462296, | |
| "learning_rate": 3.1038726867353587e-06, | |
| "loss": 0.6511, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.9914893617021276, | |
| "grad_norm": 0.4585132837327996, | |
| "learning_rate": 3.0348748417303826e-06, | |
| "loss": 0.647, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 2.0042553191489363, | |
| "grad_norm": 0.849043915814881, | |
| "learning_rate": 2.966316784621e-06, | |
| "loss": 0.9898, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 2.0170212765957447, | |
| "grad_norm": 0.47049388201053804, | |
| "learning_rate": 2.8982138584521734e-06, | |
| "loss": 0.706, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 2.029787234042553, | |
| "grad_norm": 0.4233596382881546, | |
| "learning_rate": 2.83058130441221e-06, | |
| "loss": 0.6692, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 2.0425531914893615, | |
| "grad_norm": 0.47661574060831124, | |
| "learning_rate": 2.7634342584218364e-06, | |
| "loss": 0.6987, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.0553191489361704, | |
| "grad_norm": 0.48301144689234443, | |
| "learning_rate": 2.6967877477468394e-06, | |
| "loss": 0.6053, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 2.068085106382979, | |
| "grad_norm": 0.4777060565567049, | |
| "learning_rate": 2.6306566876350072e-06, | |
| "loss": 0.6766, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 2.0808510638297872, | |
| "grad_norm": 0.38995578479059767, | |
| "learning_rate": 2.5650558779781635e-06, | |
| "loss": 0.6486, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 2.0936170212765957, | |
| "grad_norm": 0.4469045207268816, | |
| "learning_rate": 2.5000000000000015e-06, | |
| "loss": 0.6803, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 2.106382978723404, | |
| "grad_norm": 0.428314133399734, | |
| "learning_rate": 2.43550361297047e-06, | |
| "loss": 0.6573, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 2.119148936170213, | |
| "grad_norm": 0.4531099548598637, | |
| "learning_rate": 2.371581150947476e-06, | |
| "loss": 0.6132, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 2.1319148936170214, | |
| "grad_norm": 0.5460835483467924, | |
| "learning_rate": 2.3082469195465893e-06, | |
| "loss": 0.6436, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 2.1446808510638298, | |
| "grad_norm": 0.4634335271948671, | |
| "learning_rate": 2.245515092739488e-06, | |
| "loss": 0.7057, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 2.157446808510638, | |
| "grad_norm": 0.4433923033605567, | |
| "learning_rate": 2.1833997096818897e-06, | |
| "loss": 0.6898, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 2.1702127659574466, | |
| "grad_norm": 0.4026516337596407, | |
| "learning_rate": 2.1219146715716332e-06, | |
| "loss": 0.6066, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 2.1829787234042555, | |
| "grad_norm": 0.4792701280174098, | |
| "learning_rate": 2.061073738537635e-06, | |
| "loss": 0.7451, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 2.195744680851064, | |
| "grad_norm": 0.5497478648874768, | |
| "learning_rate": 2.0008905265604316e-06, | |
| "loss": 0.6688, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 2.2085106382978723, | |
| "grad_norm": 0.4613200283285023, | |
| "learning_rate": 1.941378504424968e-06, | |
| "loss": 0.7059, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 2.2212765957446807, | |
| "grad_norm": 0.445448575146405, | |
| "learning_rate": 1.8825509907063328e-06, | |
| "loss": 0.6934, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 2.2340425531914896, | |
| "grad_norm": 0.3791538615491654, | |
| "learning_rate": 1.8244211507891064e-06, | |
| "loss": 0.6081, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 2.246808510638298, | |
| "grad_norm": 0.46053396727842905, | |
| "learning_rate": 1.7670019939210025e-06, | |
| "loss": 0.7652, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 2.2595744680851064, | |
| "grad_norm": 0.39910064726710237, | |
| "learning_rate": 1.7103063703014372e-06, | |
| "loss": 0.7186, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 2.272340425531915, | |
| "grad_norm": 0.4019715799995289, | |
| "learning_rate": 1.6543469682057105e-06, | |
| "loss": 0.6669, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 2.2851063829787233, | |
| "grad_norm": 0.430365457540682, | |
| "learning_rate": 1.5991363111454023e-06, | |
| "loss": 0.6817, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 2.297872340425532, | |
| "grad_norm": 0.4951142354660635, | |
| "learning_rate": 1.544686755065677e-06, | |
| "loss": 0.6174, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 2.3106382978723405, | |
| "grad_norm": 0.4703634969823223, | |
| "learning_rate": 1.4910104855800429e-06, | |
| "loss": 0.6884, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 2.323404255319149, | |
| "grad_norm": 0.4383048478891779, | |
| "learning_rate": 1.438119515243277e-06, | |
| "loss": 0.6935, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 2.3361702127659574, | |
| "grad_norm": 0.40015717483297514, | |
| "learning_rate": 1.3860256808630429e-06, | |
| "loss": 0.6116, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 2.348936170212766, | |
| "grad_norm": 0.44199347748639095, | |
| "learning_rate": 1.3347406408508695e-06, | |
| "loss": 0.6779, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 2.3617021276595747, | |
| "grad_norm": 0.4325390527297763, | |
| "learning_rate": 1.2842758726130283e-06, | |
| "loss": 0.733, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 2.374468085106383, | |
| "grad_norm": 0.41116195093472185, | |
| "learning_rate": 1.234642669981946e-06, | |
| "loss": 0.6138, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 2.3872340425531915, | |
| "grad_norm": 0.43169027661540094, | |
| "learning_rate": 1.1858521406886674e-06, | |
| "loss": 0.7082, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 0.41678457027002164, | |
| "learning_rate": 1.137915203877003e-06, | |
| "loss": 0.7132, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 2.4127659574468083, | |
| "grad_norm": 0.4464999225599113, | |
| "learning_rate": 1.0908425876598512e-06, | |
| "loss": 0.595, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 2.425531914893617, | |
| "grad_norm": 0.3737938370502847, | |
| "learning_rate": 1.044644826718295e-06, | |
| "loss": 0.5945, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 2.4382978723404256, | |
| "grad_norm": 0.4346593864871973, | |
| "learning_rate": 9.993322599439692e-07, | |
| "loss": 0.7383, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 2.451063829787234, | |
| "grad_norm": 0.3744054578474546, | |
| "learning_rate": 9.549150281252633e-07, | |
| "loss": 0.6384, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 2.4638297872340424, | |
| "grad_norm": 0.46293489409698124, | |
| "learning_rate": 9.114030716778433e-07, | |
| "loss": 0.7709, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 2.476595744680851, | |
| "grad_norm": 0.47676213159992253, | |
| "learning_rate": 8.688061284200266e-07, | |
| "loss": 0.67, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 2.4893617021276597, | |
| "grad_norm": 0.4110883133835421, | |
| "learning_rate": 8.271337313934869e-07, | |
| "loss": 0.8132, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 2.502127659574468, | |
| "grad_norm": 0.3498732547252578, | |
| "learning_rate": 7.863952067298042e-07, | |
| "loss": 0.609, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 2.5148936170212766, | |
| "grad_norm": 0.4399096601042779, | |
| "learning_rate": 7.465996715633028e-07, | |
| "loss": 0.846, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 2.527659574468085, | |
| "grad_norm": 0.40810583196696365, | |
| "learning_rate": 7.077560319906696e-07, | |
| "loss": 0.7215, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 2.5404255319148934, | |
| "grad_norm": 0.3562003037576008, | |
| "learning_rate": 6.698729810778065e-07, | |
| "loss": 0.5401, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 2.5531914893617023, | |
| "grad_norm": 0.41794386747962137, | |
| "learning_rate": 6.329589969143518e-07, | |
| "loss": 0.7005, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 2.5659574468085107, | |
| "grad_norm": 0.395478659932515, | |
| "learning_rate": 5.9702234071631e-07, | |
| "loss": 0.6182, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 2.578723404255319, | |
| "grad_norm": 0.4022640858856306, | |
| "learning_rate": 5.620710549772295e-07, | |
| "loss": 0.7059, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 2.5914893617021275, | |
| "grad_norm": 0.37429538477036206, | |
| "learning_rate": 5.281129616683167e-07, | |
| "loss": 0.625, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 2.604255319148936, | |
| "grad_norm": 0.4065643629196546, | |
| "learning_rate": 4.951556604879049e-07, | |
| "loss": 0.7309, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 2.617021276595745, | |
| "grad_norm": 0.37314680627544133, | |
| "learning_rate": 4.632065271606756e-07, | |
| "loss": 0.6416, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 2.629787234042553, | |
| "grad_norm": 0.37232602082470584, | |
| "learning_rate": 4.322727117869951e-07, | |
| "loss": 0.6622, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 2.6425531914893616, | |
| "grad_norm": 0.39090758391988906, | |
| "learning_rate": 4.0236113724274716e-07, | |
| "loss": 0.6591, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 2.65531914893617, | |
| "grad_norm": 0.3760763848394524, | |
| "learning_rate": 3.734784976300165e-07, | |
| "loss": 0.6527, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 2.6680851063829785, | |
| "grad_norm": 0.3546421103370596, | |
| "learning_rate": 3.4563125677897936e-07, | |
| "loss": 0.6087, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 2.6808510638297873, | |
| "grad_norm": 0.4558722715324015, | |
| "learning_rate": 3.18825646801314e-07, | |
| "loss": 0.7705, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 2.6936170212765957, | |
| "grad_norm": 0.3838011747274437, | |
| "learning_rate": 2.930676666954846e-07, | |
| "loss": 0.6485, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 2.706382978723404, | |
| "grad_norm": 0.3744192449633902, | |
| "learning_rate": 2.6836308100417874e-07, | |
| "loss": 0.5956, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 2.719148936170213, | |
| "grad_norm": 0.38821467045177294, | |
| "learning_rate": 2.447174185242324e-07, | |
| "loss": 0.6515, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 2.731914893617021, | |
| "grad_norm": 0.4320094550489657, | |
| "learning_rate": 2.2213597106929608e-07, | |
| "loss": 0.7521, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 2.74468085106383, | |
| "grad_norm": 0.38626349645535607, | |
| "learning_rate": 2.006237922855553e-07, | |
| "loss": 0.6371, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 2.7574468085106383, | |
| "grad_norm": 0.4022594095078819, | |
| "learning_rate": 1.801856965207338e-07, | |
| "loss": 0.7641, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 2.7702127659574467, | |
| "grad_norm": 0.3752751624701117, | |
| "learning_rate": 1.6082625774666793e-07, | |
| "loss": 0.6856, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 2.7829787234042556, | |
| "grad_norm": 0.39713460273316725, | |
| "learning_rate": 1.4254980853566248e-07, | |
| "loss": 0.661, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 2.795744680851064, | |
| "grad_norm": 0.3951904567722507, | |
| "learning_rate": 1.253604390908819e-07, | |
| "loss": 0.7534, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 2.8085106382978724, | |
| "grad_norm": 0.3912160449369003, | |
| "learning_rate": 1.0926199633097156e-07, | |
| "loss": 0.678, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 2.821276595744681, | |
| "grad_norm": 0.37799117554814204, | |
| "learning_rate": 9.42580830291373e-08, | |
| "loss": 0.7268, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 2.8340425531914892, | |
| "grad_norm": 0.37825741134583873, | |
| "learning_rate": 8.035205700685167e-08, | |
| "loss": 0.6707, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 2.846808510638298, | |
| "grad_norm": 0.4431058847999343, | |
| "learning_rate": 6.75470303823933e-08, | |
| "loss": 0.7161, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 2.8595744680851065, | |
| "grad_norm": 0.4156239531283083, | |
| "learning_rate": 5.584586887435739e-08, | |
| "loss": 0.6633, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 2.872340425531915, | |
| "grad_norm": 0.41505028162528623, | |
| "learning_rate": 4.52511911603265e-08, | |
| "loss": 0.6042, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 2.8851063829787233, | |
| "grad_norm": 0.38641333321427823, | |
| "learning_rate": 3.576536829081323e-08, | |
| "loss": 0.7831, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 2.8978723404255318, | |
| "grad_norm": 0.38474595541773154, | |
| "learning_rate": 2.7390523158633552e-08, | |
| "loss": 0.6996, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 2.9106382978723406, | |
| "grad_norm": 0.40452129313178115, | |
| "learning_rate": 2.012853002380466e-08, | |
| "loss": 0.6679, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 2.923404255319149, | |
| "grad_norm": 0.37427821508754977, | |
| "learning_rate": 1.3981014094099354e-08, | |
| "loss": 0.6368, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 2.9361702127659575, | |
| "grad_norm": 0.3995889111864553, | |
| "learning_rate": 8.949351161324227e-09, | |
| "loss": 0.7147, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 2.948936170212766, | |
| "grad_norm": 0.38436649596851297, | |
| "learning_rate": 5.034667293427053e-09, | |
| "loss": 0.6326, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 2.9617021276595743, | |
| "grad_norm": 0.3717333771442753, | |
| "learning_rate": 2.237838582483387e-09, | |
| "loss": 0.6752, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 2.974468085106383, | |
| "grad_norm": 0.37070096497845584, | |
| "learning_rate": 5.594909486328348e-10, | |
| "loss": 0.6547, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 2.9872340425531916, | |
| "grad_norm": 0.3783628778531463, | |
| "learning_rate": 0.0, | |
| "loss": 0.6771, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 2.9872340425531916, | |
| "step": 234, | |
| "total_flos": 226130136268800.0, | |
| "train_loss": 0.7424698599383363, | |
| "train_runtime": 3709.8986, | |
| "train_samples_per_second": 6.065, | |
| "train_steps_per_second": 0.063 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 234, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 226130136268800.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |