| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.9872340425531916, | |
| "eval_steps": 500, | |
| "global_step": 234, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01276595744680851, | |
| "grad_norm": 6.353932376962155, | |
| "learning_rate": 4.1666666666666667e-07, | |
| "loss": 1.1912, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.02553191489361702, | |
| "grad_norm": 6.22538335218383, | |
| "learning_rate": 8.333333333333333e-07, | |
| "loss": 1.1524, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.03829787234042553, | |
| "grad_norm": 6.023789244570738, | |
| "learning_rate": 1.25e-06, | |
| "loss": 1.1156, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.05106382978723404, | |
| "grad_norm": 6.298814184785475, | |
| "learning_rate": 1.6666666666666667e-06, | |
| "loss": 1.2029, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.06382978723404255, | |
| "grad_norm": 5.954891137312739, | |
| "learning_rate": 2.0833333333333334e-06, | |
| "loss": 1.1737, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.07659574468085106, | |
| "grad_norm": 4.707769789262157, | |
| "learning_rate": 2.5e-06, | |
| "loss": 1.1566, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.08936170212765958, | |
| "grad_norm": 4.06547531235841, | |
| "learning_rate": 2.916666666666667e-06, | |
| "loss": 1.0835, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.10212765957446808, | |
| "grad_norm": 2.6783100585231767, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 1.0894, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.1148936170212766, | |
| "grad_norm": 2.531714805749627, | |
| "learning_rate": 3.7500000000000005e-06, | |
| "loss": 1.1027, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.1276595744680851, | |
| "grad_norm": 2.3328797912721426, | |
| "learning_rate": 4.166666666666667e-06, | |
| "loss": 1.0661, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.14042553191489363, | |
| "grad_norm": 4.06134025571158, | |
| "learning_rate": 4.583333333333333e-06, | |
| "loss": 1.0376, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.15319148936170213, | |
| "grad_norm": 4.184622856911514, | |
| "learning_rate": 5e-06, | |
| "loss": 1.0636, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.16595744680851063, | |
| "grad_norm": 4.199138698508921, | |
| "learning_rate": 5.416666666666667e-06, | |
| "loss": 1.0713, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.17872340425531916, | |
| "grad_norm": 3.0771186783481066, | |
| "learning_rate": 5.833333333333334e-06, | |
| "loss": 1.021, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.19148936170212766, | |
| "grad_norm": 2.9827797840666106, | |
| "learning_rate": 6.25e-06, | |
| "loss": 0.994, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.20425531914893616, | |
| "grad_norm": 2.5267499262248165, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 0.9823, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.2170212765957447, | |
| "grad_norm": 2.2630049185041625, | |
| "learning_rate": 7.083333333333335e-06, | |
| "loss": 0.9355, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.2297872340425532, | |
| "grad_norm": 2.1526932427229086, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.9551, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.2425531914893617, | |
| "grad_norm": 1.8518564917059142, | |
| "learning_rate": 7.916666666666667e-06, | |
| "loss": 0.9394, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.2553191489361702, | |
| "grad_norm": 1.6748374760293612, | |
| "learning_rate": 8.333333333333334e-06, | |
| "loss": 0.9586, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.2680851063829787, | |
| "grad_norm": 1.4038164790827745, | |
| "learning_rate": 8.750000000000001e-06, | |
| "loss": 0.9333, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.28085106382978725, | |
| "grad_norm": 1.3518569082484015, | |
| "learning_rate": 9.166666666666666e-06, | |
| "loss": 0.9325, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.2936170212765957, | |
| "grad_norm": 1.1090409192234645, | |
| "learning_rate": 9.583333333333335e-06, | |
| "loss": 0.8934, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.30638297872340425, | |
| "grad_norm": 1.1504957844523982, | |
| "learning_rate": 1e-05, | |
| "loss": 0.9226, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.3191489361702128, | |
| "grad_norm": 1.0220624504910976, | |
| "learning_rate": 9.999440509051367e-06, | |
| "loss": 0.8972, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.33191489361702126, | |
| "grad_norm": 1.0571307771486256, | |
| "learning_rate": 9.997762161417517e-06, | |
| "loss": 0.8894, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.3446808510638298, | |
| "grad_norm": 0.9979834526620437, | |
| "learning_rate": 9.994965332706574e-06, | |
| "loss": 0.8751, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.3574468085106383, | |
| "grad_norm": 0.9631038384731712, | |
| "learning_rate": 9.991050648838676e-06, | |
| "loss": 0.8794, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.3702127659574468, | |
| "grad_norm": 0.9639536468707164, | |
| "learning_rate": 9.986018985905901e-06, | |
| "loss": 0.8651, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.3829787234042553, | |
| "grad_norm": 0.9991789301222855, | |
| "learning_rate": 9.979871469976197e-06, | |
| "loss": 0.8748, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.39574468085106385, | |
| "grad_norm": 0.7084177291168907, | |
| "learning_rate": 9.972609476841368e-06, | |
| "loss": 0.8668, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.4085106382978723, | |
| "grad_norm": 0.9941892229146153, | |
| "learning_rate": 9.964234631709188e-06, | |
| "loss": 0.9048, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.42127659574468085, | |
| "grad_norm": 0.8881707523663802, | |
| "learning_rate": 9.954748808839675e-06, | |
| "loss": 0.8814, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.4340425531914894, | |
| "grad_norm": 0.8130437377464066, | |
| "learning_rate": 9.944154131125643e-06, | |
| "loss": 0.8748, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.44680851063829785, | |
| "grad_norm": 0.8570977832402222, | |
| "learning_rate": 9.932452969617607e-06, | |
| "loss": 0.8866, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.4595744680851064, | |
| "grad_norm": 0.7423707329128824, | |
| "learning_rate": 9.91964794299315e-06, | |
| "loss": 0.8582, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.4723404255319149, | |
| "grad_norm": 0.7198235435636622, | |
| "learning_rate": 9.905741916970863e-06, | |
| "loss": 0.8598, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.4851063829787234, | |
| "grad_norm": 0.7749185329627992, | |
| "learning_rate": 9.890738003669029e-06, | |
| "loss": 0.8458, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.4978723404255319, | |
| "grad_norm": 0.6433008471066703, | |
| "learning_rate": 9.874639560909118e-06, | |
| "loss": 0.8221, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.5106382978723404, | |
| "grad_norm": 0.7978894394809969, | |
| "learning_rate": 9.857450191464337e-06, | |
| "loss": 0.8824, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.5234042553191489, | |
| "grad_norm": 0.767845866948557, | |
| "learning_rate": 9.839173742253334e-06, | |
| "loss": 0.8744, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.5361702127659574, | |
| "grad_norm": 0.7396968322828916, | |
| "learning_rate": 9.819814303479268e-06, | |
| "loss": 0.8635, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.548936170212766, | |
| "grad_norm": 0.6779405395661932, | |
| "learning_rate": 9.799376207714446e-06, | |
| "loss": 0.8284, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.5617021276595745, | |
| "grad_norm": 0.7153502005873587, | |
| "learning_rate": 9.777864028930705e-06, | |
| "loss": 0.8818, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.574468085106383, | |
| "grad_norm": 0.6863317883746143, | |
| "learning_rate": 9.755282581475769e-06, | |
| "loss": 0.8713, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.5872340425531914, | |
| "grad_norm": 0.7130901083688288, | |
| "learning_rate": 9.731636918995821e-06, | |
| "loss": 0.8134, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.6898306425311603, | |
| "learning_rate": 9.706932333304518e-06, | |
| "loss": 0.8581, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.6127659574468085, | |
| "grad_norm": 0.6982172738750606, | |
| "learning_rate": 9.681174353198687e-06, | |
| "loss": 0.8413, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.625531914893617, | |
| "grad_norm": 0.8275251593813877, | |
| "learning_rate": 9.654368743221022e-06, | |
| "loss": 0.8602, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.6382978723404256, | |
| "grad_norm": 0.7343850642149492, | |
| "learning_rate": 9.626521502369984e-06, | |
| "loss": 0.826, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.6510638297872341, | |
| "grad_norm": 0.7654533248805878, | |
| "learning_rate": 9.597638862757255e-06, | |
| "loss": 0.8888, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.6638297872340425, | |
| "grad_norm": 0.6500639945476345, | |
| "learning_rate": 9.567727288213005e-06, | |
| "loss": 0.8201, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.676595744680851, | |
| "grad_norm": 0.87098438448592, | |
| "learning_rate": 9.536793472839325e-06, | |
| "loss": 0.8661, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.6893617021276596, | |
| "grad_norm": 0.7053185152695439, | |
| "learning_rate": 9.504844339512096e-06, | |
| "loss": 0.807, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.7021276595744681, | |
| "grad_norm": 0.672193427899284, | |
| "learning_rate": 9.471887038331686e-06, | |
| "loss": 0.8168, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.7148936170212766, | |
| "grad_norm": 0.6999423156768017, | |
| "learning_rate": 9.437928945022772e-06, | |
| "loss": 0.8412, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.7276595744680852, | |
| "grad_norm": 0.6568075220707117, | |
| "learning_rate": 9.40297765928369e-06, | |
| "loss": 0.8468, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.7404255319148936, | |
| "grad_norm": 0.6590937008148525, | |
| "learning_rate": 9.36704100308565e-06, | |
| "loss": 0.8368, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.7531914893617021, | |
| "grad_norm": 0.6343228191926094, | |
| "learning_rate": 9.330127018922195e-06, | |
| "loss": 0.8368, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.7659574468085106, | |
| "grad_norm": 0.6711624025837197, | |
| "learning_rate": 9.292243968009332e-06, | |
| "loss": 0.8024, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.7787234042553192, | |
| "grad_norm": 0.8360001836286379, | |
| "learning_rate": 9.253400328436699e-06, | |
| "loss": 0.8484, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.7914893617021277, | |
| "grad_norm": 0.6685752235146978, | |
| "learning_rate": 9.213604793270196e-06, | |
| "loss": 0.8408, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.8042553191489362, | |
| "grad_norm": 0.658221678510916, | |
| "learning_rate": 9.172866268606514e-06, | |
| "loss": 0.8535, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.8170212765957446, | |
| "grad_norm": 0.6507254367865235, | |
| "learning_rate": 9.131193871579975e-06, | |
| "loss": 0.8747, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.8297872340425532, | |
| "grad_norm": 0.6460028531302459, | |
| "learning_rate": 9.088596928322158e-06, | |
| "loss": 0.8235, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.8425531914893617, | |
| "grad_norm": 0.6822964607405055, | |
| "learning_rate": 9.045084971874738e-06, | |
| "loss": 0.8686, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.8553191489361702, | |
| "grad_norm": 0.6076116804305184, | |
| "learning_rate": 9.000667740056033e-06, | |
| "loss": 0.8193, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.8680851063829788, | |
| "grad_norm": 0.6590919977095543, | |
| "learning_rate": 8.955355173281709e-06, | |
| "loss": 0.8728, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.8808510638297873, | |
| "grad_norm": 0.80455292594799, | |
| "learning_rate": 8.90915741234015e-06, | |
| "loss": 0.8639, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.8936170212765957, | |
| "grad_norm": 0.798116367786634, | |
| "learning_rate": 8.862084796122998e-06, | |
| "loss": 0.8419, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.9063829787234042, | |
| "grad_norm": 0.6175272732760302, | |
| "learning_rate": 8.814147859311333e-06, | |
| "loss": 0.8281, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.9191489361702128, | |
| "grad_norm": 0.6991366864152418, | |
| "learning_rate": 8.765357330018056e-06, | |
| "loss": 0.7673, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.9319148936170213, | |
| "grad_norm": 0.7800616049786662, | |
| "learning_rate": 8.715724127386971e-06, | |
| "loss": 0.8352, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.9446808510638298, | |
| "grad_norm": 0.6071910134631383, | |
| "learning_rate": 8.665259359149132e-06, | |
| "loss": 0.7964, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.9574468085106383, | |
| "grad_norm": 0.7175207409525108, | |
| "learning_rate": 8.613974319136959e-06, | |
| "loss": 0.8003, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.9702127659574468, | |
| "grad_norm": 0.7269990693905276, | |
| "learning_rate": 8.561880484756726e-06, | |
| "loss": 0.8035, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.9829787234042553, | |
| "grad_norm": 0.643014711354184, | |
| "learning_rate": 8.508989514419959e-06, | |
| "loss": 0.832, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.9957446808510638, | |
| "grad_norm": 0.5906253178794993, | |
| "learning_rate": 8.455313244934324e-06, | |
| "loss": 0.8212, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.0085106382978724, | |
| "grad_norm": 1.4253189700474573, | |
| "learning_rate": 8.400863688854598e-06, | |
| "loss": 1.3589, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.0212765957446808, | |
| "grad_norm": 0.6252290101034432, | |
| "learning_rate": 8.345653031794292e-06, | |
| "loss": 0.681, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.0340425531914894, | |
| "grad_norm": 0.6126983062781691, | |
| "learning_rate": 8.289693629698564e-06, | |
| "loss": 0.7441, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.0468085106382978, | |
| "grad_norm": 0.5764424319094782, | |
| "learning_rate": 8.232998006078998e-06, | |
| "loss": 0.743, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.0595744680851065, | |
| "grad_norm": 0.6951973478221167, | |
| "learning_rate": 8.175578849210894e-06, | |
| "loss": 0.8549, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.0723404255319149, | |
| "grad_norm": 0.741475138432806, | |
| "learning_rate": 8.117449009293668e-06, | |
| "loss": 0.7314, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.0851063829787233, | |
| "grad_norm": 0.619112834273637, | |
| "learning_rate": 8.058621495575032e-06, | |
| "loss": 0.69, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.097872340425532, | |
| "grad_norm": 0.6709089764707235, | |
| "learning_rate": 7.99910947343957e-06, | |
| "loss": 0.7606, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.1106382978723404, | |
| "grad_norm": 0.9095717040896454, | |
| "learning_rate": 7.938926261462366e-06, | |
| "loss": 0.7677, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.123404255319149, | |
| "grad_norm": 0.7296980623269048, | |
| "learning_rate": 7.87808532842837e-06, | |
| "loss": 0.7596, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.1361702127659574, | |
| "grad_norm": 0.775655214973485, | |
| "learning_rate": 7.81660029031811e-06, | |
| "loss": 0.7416, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.148936170212766, | |
| "grad_norm": 0.6743944889465332, | |
| "learning_rate": 7.754484907260513e-06, | |
| "loss": 0.688, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.1617021276595745, | |
| "grad_norm": 0.6335100743206861, | |
| "learning_rate": 7.691753080453413e-06, | |
| "loss": 0.7427, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.174468085106383, | |
| "grad_norm": 0.711181947370624, | |
| "learning_rate": 7.628418849052523e-06, | |
| "loss": 0.7761, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.1872340425531915, | |
| "grad_norm": 0.7514220496808613, | |
| "learning_rate": 7.564496387029532e-06, | |
| "loss": 0.8177, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 0.658573568051986, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.731, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.2127659574468086, | |
| "grad_norm": 0.6461429813659317, | |
| "learning_rate": 7.434944122021837e-06, | |
| "loss": 0.7076, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.225531914893617, | |
| "grad_norm": 0.6072398670230126, | |
| "learning_rate": 7.369343312364994e-06, | |
| "loss": 0.7707, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.2382978723404254, | |
| "grad_norm": 0.5683579113242971, | |
| "learning_rate": 7.303212252253163e-06, | |
| "loss": 0.7246, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.251063829787234, | |
| "grad_norm": 0.6470968617096235, | |
| "learning_rate": 7.236565741578163e-06, | |
| "loss": 0.8116, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.2638297872340425, | |
| "grad_norm": 0.6290836574482417, | |
| "learning_rate": 7.169418695587791e-06, | |
| "loss": 0.7499, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.2765957446808511, | |
| "grad_norm": 0.5386462029087553, | |
| "learning_rate": 7.101786141547829e-06, | |
| "loss": 0.7554, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.2893617021276595, | |
| "grad_norm": 0.6379302400766887, | |
| "learning_rate": 7.033683215379002e-06, | |
| "loss": 0.6596, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.302127659574468, | |
| "grad_norm": 0.6738218338500571, | |
| "learning_rate": 6.965125158269619e-06, | |
| "loss": 0.8226, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.3148936170212766, | |
| "grad_norm": 0.592967066545746, | |
| "learning_rate": 6.896127313264643e-06, | |
| "loss": 0.8034, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.327659574468085, | |
| "grad_norm": 0.5714401976787592, | |
| "learning_rate": 6.8267051218319766e-06, | |
| "loss": 0.7348, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.3404255319148937, | |
| "grad_norm": 0.5976528961528519, | |
| "learning_rate": 6.7568741204067145e-06, | |
| "loss": 0.7203, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.353191489361702, | |
| "grad_norm": 0.5777615361976374, | |
| "learning_rate": 6.686649936914151e-06, | |
| "loss": 0.7419, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.3659574468085105, | |
| "grad_norm": 0.5348666700952892, | |
| "learning_rate": 6.616048287272301e-06, | |
| "loss": 0.7898, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.3787234042553191, | |
| "grad_norm": 0.6621307601091048, | |
| "learning_rate": 6.545084971874738e-06, | |
| "loss": 0.8062, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.3914893617021278, | |
| "grad_norm": 0.691721594153437, | |
| "learning_rate": 6.473775872054522e-06, | |
| "loss": 0.8034, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.4042553191489362, | |
| "grad_norm": 0.5686130190581552, | |
| "learning_rate": 6.402136946530014e-06, | |
| "loss": 0.7673, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.4170212765957446, | |
| "grad_norm": 0.4756378346544592, | |
| "learning_rate": 6.330184227833376e-06, | |
| "loss": 0.6315, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.4297872340425533, | |
| "grad_norm": 0.6206136672748521, | |
| "learning_rate": 6.257933818722544e-06, | |
| "loss": 0.7698, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.4425531914893617, | |
| "grad_norm": 0.6303389185178494, | |
| "learning_rate": 6.185401888577488e-06, | |
| "loss": 0.8267, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.4553191489361703, | |
| "grad_norm": 0.5452107765410494, | |
| "learning_rate": 6.112604669781572e-06, | |
| "loss": 0.6971, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.4680851063829787, | |
| "grad_norm": 0.6060781455714925, | |
| "learning_rate": 6.039558454088796e-06, | |
| "loss": 0.8687, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.4808510638297872, | |
| "grad_norm": 0.6013851352507967, | |
| "learning_rate": 5.9662795889777666e-06, | |
| "loss": 0.732, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.4936170212765958, | |
| "grad_norm": 0.5562358332246483, | |
| "learning_rate": 5.892784473993184e-06, | |
| "loss": 0.6502, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.5063829787234042, | |
| "grad_norm": 0.5675479514156825, | |
| "learning_rate": 5.819089557075689e-06, | |
| "loss": 0.7959, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.5191489361702128, | |
| "grad_norm": 0.546953474852278, | |
| "learning_rate": 5.745211330880872e-06, | |
| "loss": 0.7351, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.5319148936170213, | |
| "grad_norm": 0.5583886944268567, | |
| "learning_rate": 5.671166329088278e-06, | |
| "loss": 0.7528, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.5446808510638297, | |
| "grad_norm": 0.5600086311533115, | |
| "learning_rate": 5.596971122701221e-06, | |
| "loss": 0.8955, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.5574468085106383, | |
| "grad_norm": 0.6029023748669958, | |
| "learning_rate": 5.522642316338268e-06, | |
| "loss": 0.7261, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.570212765957447, | |
| "grad_norm": 0.6342694584033263, | |
| "learning_rate": 5.448196544517168e-06, | |
| "loss": 0.7679, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.5829787234042554, | |
| "grad_norm": 0.6065847834649665, | |
| "learning_rate": 5.373650467932122e-06, | |
| "loss": 0.7638, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.5957446808510638, | |
| "grad_norm": 0.5265674527652279, | |
| "learning_rate": 5.299020769725172e-06, | |
| "loss": 0.787, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.6085106382978722, | |
| "grad_norm": 0.6149521469743251, | |
| "learning_rate": 5.224324151752575e-06, | |
| "loss": 0.8122, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.6212765957446809, | |
| "grad_norm": 0.6086968303307506, | |
| "learning_rate": 5.1495773308469935e-06, | |
| "loss": 0.7534, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.6340425531914895, | |
| "grad_norm": 0.5682726195284596, | |
| "learning_rate": 5.074797035076319e-06, | |
| "loss": 0.7181, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.646808510638298, | |
| "grad_norm": 0.5754891009333322, | |
| "learning_rate": 5e-06, | |
| "loss": 0.892, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.6595744680851063, | |
| "grad_norm": 0.6043646952027831, | |
| "learning_rate": 4.9252029649236835e-06, | |
| "loss": 0.7209, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.6723404255319148, | |
| "grad_norm": 0.5578664828925858, | |
| "learning_rate": 4.850422669153009e-06, | |
| "loss": 0.7289, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.6851063829787234, | |
| "grad_norm": 0.5988474734097357, | |
| "learning_rate": 4.775675848247427e-06, | |
| "loss": 0.7493, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.697872340425532, | |
| "grad_norm": 0.5279090391793908, | |
| "learning_rate": 4.700979230274829e-06, | |
| "loss": 0.6385, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.7106382978723405, | |
| "grad_norm": 0.6477329260959007, | |
| "learning_rate": 4.626349532067879e-06, | |
| "loss": 0.868, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.7234042553191489, | |
| "grad_norm": 0.574998428834536, | |
| "learning_rate": 4.551803455482833e-06, | |
| "loss": 0.7627, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.7361702127659573, | |
| "grad_norm": 0.5203426261066783, | |
| "learning_rate": 4.477357683661734e-06, | |
| "loss": 0.7253, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.748936170212766, | |
| "grad_norm": 0.6267633363557024, | |
| "learning_rate": 4.4030288772987795e-06, | |
| "loss": 0.7586, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.7617021276595746, | |
| "grad_norm": 0.5632632149868597, | |
| "learning_rate": 4.3288336709117246e-06, | |
| "loss": 0.808, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.774468085106383, | |
| "grad_norm": 0.5630999443363466, | |
| "learning_rate": 4.254788669119127e-06, | |
| "loss": 0.7702, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.7872340425531914, | |
| "grad_norm": 0.5442195790445595, | |
| "learning_rate": 4.180910442924312e-06, | |
| "loss": 0.7557, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "grad_norm": 0.5883967505552894, | |
| "learning_rate": 4.107215526006818e-06, | |
| "loss": 0.7419, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.8127659574468085, | |
| "grad_norm": 0.6080687924951912, | |
| "learning_rate": 4.033720411022235e-06, | |
| "loss": 0.666, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.825531914893617, | |
| "grad_norm": 0.6251220871700776, | |
| "learning_rate": 3.960441545911205e-06, | |
| "loss": 0.772, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.8382978723404255, | |
| "grad_norm": 0.5777374888887631, | |
| "learning_rate": 3.887395330218429e-06, | |
| "loss": 0.7972, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.851063829787234, | |
| "grad_norm": 0.5341946785783167, | |
| "learning_rate": 3.8145981114225135e-06, | |
| "loss": 0.6953, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.8638297872340426, | |
| "grad_norm": 0.6516092653149019, | |
| "learning_rate": 3.7420661812774577e-06, | |
| "loss": 0.8309, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.8765957446808512, | |
| "grad_norm": 0.6366696504215543, | |
| "learning_rate": 3.669815772166625e-06, | |
| "loss": 0.6375, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.8893617021276596, | |
| "grad_norm": 0.5764338420537471, | |
| "learning_rate": 3.5978630534699873e-06, | |
| "loss": 0.7718, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.902127659574468, | |
| "grad_norm": 0.5034839691318637, | |
| "learning_rate": 3.526224127945479e-06, | |
| "loss": 0.7586, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.9148936170212765, | |
| "grad_norm": 0.659371553780018, | |
| "learning_rate": 3.4549150281252635e-06, | |
| "loss": 0.8194, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.9276595744680851, | |
| "grad_norm": 0.575488935335163, | |
| "learning_rate": 3.383951712727701e-06, | |
| "loss": 0.7998, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.9404255319148938, | |
| "grad_norm": 0.6021998191038525, | |
| "learning_rate": 3.3133500630858507e-06, | |
| "loss": 0.7879, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.9531914893617022, | |
| "grad_norm": 0.5477249938045327, | |
| "learning_rate": 3.2431258795932863e-06, | |
| "loss": 0.7206, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.9659574468085106, | |
| "grad_norm": 0.5811445369216853, | |
| "learning_rate": 3.173294878168025e-06, | |
| "loss": 0.7528, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.978723404255319, | |
| "grad_norm": 0.5280702510308407, | |
| "learning_rate": 3.1038726867353587e-06, | |
| "loss": 0.7817, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.9914893617021276, | |
| "grad_norm": 0.5652222500582458, | |
| "learning_rate": 3.0348748417303826e-06, | |
| "loss": 0.7491, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 2.0042553191489363, | |
| "grad_norm": 1.0505257444754954, | |
| "learning_rate": 2.966316784621e-06, | |
| "loss": 1.2112, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 2.0170212765957447, | |
| "grad_norm": 0.5534679233773416, | |
| "learning_rate": 2.8982138584521734e-06, | |
| "loss": 0.6694, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 2.029787234042553, | |
| "grad_norm": 0.6416084502104811, | |
| "learning_rate": 2.83058130441221e-06, | |
| "loss": 0.7556, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 2.0425531914893615, | |
| "grad_norm": 0.4798010455035571, | |
| "learning_rate": 2.7634342584218364e-06, | |
| "loss": 0.6236, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.0553191489361704, | |
| "grad_norm": 0.6989318235437988, | |
| "learning_rate": 2.6967877477468394e-06, | |
| "loss": 0.8026, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 2.068085106382979, | |
| "grad_norm": 0.6281981748050607, | |
| "learning_rate": 2.6306566876350072e-06, | |
| "loss": 0.693, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 2.0808510638297872, | |
| "grad_norm": 0.5152717911140114, | |
| "learning_rate": 2.5650558779781635e-06, | |
| "loss": 0.695, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 2.0936170212765957, | |
| "grad_norm": 0.5157593106953199, | |
| "learning_rate": 2.5000000000000015e-06, | |
| "loss": 0.6958, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 2.106382978723404, | |
| "grad_norm": 0.542890735627848, | |
| "learning_rate": 2.43550361297047e-06, | |
| "loss": 0.6422, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 2.119148936170213, | |
| "grad_norm": 0.6354916831539539, | |
| "learning_rate": 2.371581150947476e-06, | |
| "loss": 0.7221, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 2.1319148936170214, | |
| "grad_norm": 0.5071694106301671, | |
| "learning_rate": 2.3082469195465893e-06, | |
| "loss": 0.6881, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 2.1446808510638298, | |
| "grad_norm": 0.5270976651431593, | |
| "learning_rate": 2.245515092739488e-06, | |
| "loss": 0.714, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 2.157446808510638, | |
| "grad_norm": 0.6107743680203847, | |
| "learning_rate": 2.1833997096818897e-06, | |
| "loss": 0.8456, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 2.1702127659574466, | |
| "grad_norm": 0.5394700063686253, | |
| "learning_rate": 2.1219146715716332e-06, | |
| "loss": 0.6249, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 2.1829787234042555, | |
| "grad_norm": 0.5667817318976859, | |
| "learning_rate": 2.061073738537635e-06, | |
| "loss": 0.7802, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 2.195744680851064, | |
| "grad_norm": 0.5044529000484788, | |
| "learning_rate": 2.0008905265604316e-06, | |
| "loss": 0.5842, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 2.2085106382978723, | |
| "grad_norm": 0.6094419863003621, | |
| "learning_rate": 1.941378504424968e-06, | |
| "loss": 0.8071, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 2.2212765957446807, | |
| "grad_norm": 0.505840415405961, | |
| "learning_rate": 1.8825509907063328e-06, | |
| "loss": 0.7655, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 2.2340425531914896, | |
| "grad_norm": 0.5329780431110791, | |
| "learning_rate": 1.8244211507891064e-06, | |
| "loss": 0.6509, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 2.246808510638298, | |
| "grad_norm": 0.5560722646167355, | |
| "learning_rate": 1.7670019939210025e-06, | |
| "loss": 0.6761, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 2.2595744680851064, | |
| "grad_norm": 0.5278556863776027, | |
| "learning_rate": 1.7103063703014372e-06, | |
| "loss": 0.752, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 2.272340425531915, | |
| "grad_norm": 0.5540546065487634, | |
| "learning_rate": 1.6543469682057105e-06, | |
| "loss": 0.7025, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 2.2851063829787233, | |
| "grad_norm": 0.5313374180186387, | |
| "learning_rate": 1.5991363111454023e-06, | |
| "loss": 0.6167, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 2.297872340425532, | |
| "grad_norm": 0.562448332468006, | |
| "learning_rate": 1.544686755065677e-06, | |
| "loss": 0.5999, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 2.3106382978723405, | |
| "grad_norm": 0.5214892741988327, | |
| "learning_rate": 1.4910104855800429e-06, | |
| "loss": 0.7743, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 2.323404255319149, | |
| "grad_norm": 0.5174189126938886, | |
| "learning_rate": 1.438119515243277e-06, | |
| "loss": 0.735, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 2.3361702127659574, | |
| "grad_norm": 0.45428482617916605, | |
| "learning_rate": 1.3860256808630429e-06, | |
| "loss": 0.6105, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 2.348936170212766, | |
| "grad_norm": 0.5152662327467596, | |
| "learning_rate": 1.3347406408508695e-06, | |
| "loss": 0.7582, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 2.3617021276595747, | |
| "grad_norm": 0.5469842875723849, | |
| "learning_rate": 1.2842758726130283e-06, | |
| "loss": 0.8307, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 2.374468085106383, | |
| "grad_norm": 0.465872379406897, | |
| "learning_rate": 1.234642669981946e-06, | |
| "loss": 0.6998, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 2.3872340425531915, | |
| "grad_norm": 0.5505647045899656, | |
| "learning_rate": 1.1858521406886674e-06, | |
| "loss": 0.8048, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 0.5063316847953139, | |
| "learning_rate": 1.137915203877003e-06, | |
| "loss": 0.6056, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 2.4127659574468083, | |
| "grad_norm": 0.50130245907352, | |
| "learning_rate": 1.0908425876598512e-06, | |
| "loss": 0.6458, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 2.425531914893617, | |
| "grad_norm": 0.5208231184042346, | |
| "learning_rate": 1.044644826718295e-06, | |
| "loss": 0.7236, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 2.4382978723404256, | |
| "grad_norm": 0.5030678286204384, | |
| "learning_rate": 9.993322599439692e-07, | |
| "loss": 0.688, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 2.451063829787234, | |
| "grad_norm": 0.5019824022121786, | |
| "learning_rate": 9.549150281252633e-07, | |
| "loss": 0.7615, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 2.4638297872340424, | |
| "grad_norm": 0.5156242724008653, | |
| "learning_rate": 9.114030716778433e-07, | |
| "loss": 0.6917, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 2.476595744680851, | |
| "grad_norm": 0.4810684332662468, | |
| "learning_rate": 8.688061284200266e-07, | |
| "loss": 0.7123, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 2.4893617021276597, | |
| "grad_norm": 0.49610188906380476, | |
| "learning_rate": 8.271337313934869e-07, | |
| "loss": 0.6604, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 2.502127659574468, | |
| "grad_norm": 0.5096131733768843, | |
| "learning_rate": 7.863952067298042e-07, | |
| "loss": 0.7049, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 2.5148936170212766, | |
| "grad_norm": 0.46084306907290395, | |
| "learning_rate": 7.465996715633028e-07, | |
| "loss": 0.6966, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 2.527659574468085, | |
| "grad_norm": 0.49577140692888255, | |
| "learning_rate": 7.077560319906696e-07, | |
| "loss": 0.7798, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 2.5404255319148934, | |
| "grad_norm": 0.5012719167353504, | |
| "learning_rate": 6.698729810778065e-07, | |
| "loss": 0.7255, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 2.5531914893617023, | |
| "grad_norm": 0.43751384195216086, | |
| "learning_rate": 6.329589969143518e-07, | |
| "loss": 0.6144, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 2.5659574468085107, | |
| "grad_norm": 0.4906859931725163, | |
| "learning_rate": 5.9702234071631e-07, | |
| "loss": 0.7007, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 2.578723404255319, | |
| "grad_norm": 0.5031899446810962, | |
| "learning_rate": 5.620710549772295e-07, | |
| "loss": 0.6856, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 2.5914893617021275, | |
| "grad_norm": 0.47867968621934653, | |
| "learning_rate": 5.281129616683167e-07, | |
| "loss": 0.6839, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 2.604255319148936, | |
| "grad_norm": 0.5157873816638013, | |
| "learning_rate": 4.951556604879049e-07, | |
| "loss": 0.7011, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 2.617021276595745, | |
| "grad_norm": 0.47874359226187657, | |
| "learning_rate": 4.632065271606756e-07, | |
| "loss": 0.7438, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 2.629787234042553, | |
| "grad_norm": 0.46097043391449666, | |
| "learning_rate": 4.322727117869951e-07, | |
| "loss": 0.6271, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 2.6425531914893616, | |
| "grad_norm": 0.497047485208018, | |
| "learning_rate": 4.0236113724274716e-07, | |
| "loss": 0.7694, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 2.65531914893617, | |
| "grad_norm": 0.47599364906666225, | |
| "learning_rate": 3.734784976300165e-07, | |
| "loss": 0.8376, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 2.6680851063829785, | |
| "grad_norm": 0.43864910005965657, | |
| "learning_rate": 3.4563125677897936e-07, | |
| "loss": 0.6243, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 2.6808510638297873, | |
| "grad_norm": 0.4621825463897554, | |
| "learning_rate": 3.18825646801314e-07, | |
| "loss": 0.7003, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 2.6936170212765957, | |
| "grad_norm": 0.48232343197094046, | |
| "learning_rate": 2.930676666954846e-07, | |
| "loss": 0.7206, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 2.706382978723404, | |
| "grad_norm": 0.47104350651465504, | |
| "learning_rate": 2.6836308100417874e-07, | |
| "loss": 0.6196, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 2.719148936170213, | |
| "grad_norm": 0.5273694728646362, | |
| "learning_rate": 2.447174185242324e-07, | |
| "loss": 0.7903, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 2.731914893617021, | |
| "grad_norm": 0.4826068178110643, | |
| "learning_rate": 2.2213597106929608e-07, | |
| "loss": 0.6996, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 2.74468085106383, | |
| "grad_norm": 0.49633999113379995, | |
| "learning_rate": 2.006237922855553e-07, | |
| "loss": 0.706, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 2.7574468085106383, | |
| "grad_norm": 0.5169749746813925, | |
| "learning_rate": 1.801856965207338e-07, | |
| "loss": 0.6336, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 2.7702127659574467, | |
| "grad_norm": 0.47272884332119, | |
| "learning_rate": 1.6082625774666793e-07, | |
| "loss": 0.6656, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 2.7829787234042556, | |
| "grad_norm": 0.46057432003620846, | |
| "learning_rate": 1.4254980853566248e-07, | |
| "loss": 0.7266, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 2.795744680851064, | |
| "grad_norm": 0.4893287165876055, | |
| "learning_rate": 1.253604390908819e-07, | |
| "loss": 0.6562, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 2.8085106382978724, | |
| "grad_norm": 0.4648250703865466, | |
| "learning_rate": 1.0926199633097156e-07, | |
| "loss": 0.6747, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 2.821276595744681, | |
| "grad_norm": 0.4968226962990124, | |
| "learning_rate": 9.42580830291373e-08, | |
| "loss": 0.6722, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 2.8340425531914892, | |
| "grad_norm": 0.515324253421338, | |
| "learning_rate": 8.035205700685167e-08, | |
| "loss": 0.7334, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 2.846808510638298, | |
| "grad_norm": 0.46320007701441, | |
| "learning_rate": 6.75470303823933e-08, | |
| "loss": 0.6765, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 2.8595744680851065, | |
| "grad_norm": 0.48508235718645204, | |
| "learning_rate": 5.584586887435739e-08, | |
| "loss": 0.6531, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 2.872340425531915, | |
| "grad_norm": 0.5147688482752547, | |
| "learning_rate": 4.52511911603265e-08, | |
| "loss": 0.7047, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 2.8851063829787233, | |
| "grad_norm": 0.4629650700246944, | |
| "learning_rate": 3.576536829081323e-08, | |
| "loss": 0.7151, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 2.8978723404255318, | |
| "grad_norm": 0.5345400043641719, | |
| "learning_rate": 2.7390523158633552e-08, | |
| "loss": 0.7742, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 2.9106382978723406, | |
| "grad_norm": 0.42658862838231876, | |
| "learning_rate": 2.012853002380466e-08, | |
| "loss": 0.6379, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 2.923404255319149, | |
| "grad_norm": 0.4761085131478779, | |
| "learning_rate": 1.3981014094099354e-08, | |
| "loss": 0.7514, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 2.9361702127659575, | |
| "grad_norm": 0.4659539884115577, | |
| "learning_rate": 8.949351161324227e-09, | |
| "loss": 0.7313, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 2.948936170212766, | |
| "grad_norm": 0.42768655407061484, | |
| "learning_rate": 5.034667293427053e-09, | |
| "loss": 0.5988, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 2.9617021276595743, | |
| "grad_norm": 0.5010147150356298, | |
| "learning_rate": 2.237838582483387e-09, | |
| "loss": 0.7718, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 2.974468085106383, | |
| "grad_norm": 0.47358962083493916, | |
| "learning_rate": 5.594909486328348e-10, | |
| "loss": 0.6915, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 2.9872340425531916, | |
| "grad_norm": 0.4838276929973422, | |
| "learning_rate": 0.0, | |
| "loss": 0.7312, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 2.9872340425531916, | |
| "step": 234, | |
| "total_flos": 180145845731328.0, | |
| "train_loss": 0.7942904473369957, | |
| "train_runtime": 3536.2592, | |
| "train_samples_per_second": 6.363, | |
| "train_steps_per_second": 0.066 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 234, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 180145845731328.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |