| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.9904153354632586, |
| "eval_steps": 500, |
| "global_step": 312, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.009584664536741214, |
| "grad_norm": 7.631757736206055, |
| "learning_rate": 3.125e-07, |
| "loss": 1.1437, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.019169329073482427, |
| "grad_norm": 7.207672119140625, |
| "learning_rate": 6.25e-07, |
| "loss": 1.0804, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.02875399361022364, |
| "grad_norm": 8.071743965148926, |
| "learning_rate": 9.375000000000001e-07, |
| "loss": 1.155, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.038338658146964855, |
| "grad_norm": 7.237469673156738, |
| "learning_rate": 1.25e-06, |
| "loss": 1.0944, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.04792332268370607, |
| "grad_norm": 7.192005157470703, |
| "learning_rate": 1.5625e-06, |
| "loss": 1.1085, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.05750798722044728, |
| "grad_norm": 7.114604473114014, |
| "learning_rate": 1.8750000000000003e-06, |
| "loss": 1.099, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.0670926517571885, |
| "grad_norm": 5.225025177001953, |
| "learning_rate": 2.1875000000000002e-06, |
| "loss": 1.0407, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.07667731629392971, |
| "grad_norm": 4.8877787590026855, |
| "learning_rate": 2.5e-06, |
| "loss": 1.0256, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.08626198083067092, |
| "grad_norm": 3.1853082180023193, |
| "learning_rate": 2.8125e-06, |
| "loss": 0.9822, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.09584664536741214, |
| "grad_norm": 2.9565036296844482, |
| "learning_rate": 3.125e-06, |
| "loss": 1.0148, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.10543130990415335, |
| "grad_norm": 2.6891329288482666, |
| "learning_rate": 3.4375e-06, |
| "loss": 0.9717, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.11501597444089456, |
| "grad_norm": 3.217716693878174, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 0.9585, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.12460063897763578, |
| "grad_norm": 3.6357855796813965, |
| "learning_rate": 4.0625000000000005e-06, |
| "loss": 0.9466, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.134185303514377, |
| "grad_norm": 3.2886316776275635, |
| "learning_rate": 4.3750000000000005e-06, |
| "loss": 0.9149, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.14376996805111822, |
| "grad_norm": 2.728691339492798, |
| "learning_rate": 4.6875000000000004e-06, |
| "loss": 0.9003, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.15335463258785942, |
| "grad_norm": 2.0728046894073486, |
| "learning_rate": 5e-06, |
| "loss": 0.8858, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.16293929712460065, |
| "grad_norm": 2.3851478099823, |
| "learning_rate": 5.3125e-06, |
| "loss": 0.8923, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.17252396166134185, |
| "grad_norm": 2.1146421432495117, |
| "learning_rate": 5.625e-06, |
| "loss": 0.8444, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.18210862619808307, |
| "grad_norm": 1.742587685585022, |
| "learning_rate": 5.9375e-06, |
| "loss": 0.846, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.19169329073482427, |
| "grad_norm": 1.4841606616973877, |
| "learning_rate": 6.25e-06, |
| "loss": 0.8413, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.2012779552715655, |
| "grad_norm": 1.2848784923553467, |
| "learning_rate": 6.5625e-06, |
| "loss": 0.7976, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.2108626198083067, |
| "grad_norm": 1.739992380142212, |
| "learning_rate": 6.875e-06, |
| "loss": 0.7873, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.22044728434504793, |
| "grad_norm": 1.633833408355713, |
| "learning_rate": 7.1875e-06, |
| "loss": 0.7909, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.23003194888178913, |
| "grad_norm": 1.2238140106201172, |
| "learning_rate": 7.500000000000001e-06, |
| "loss": 0.789, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.23961661341853036, |
| "grad_norm": 1.1153886318206787, |
| "learning_rate": 7.8125e-06, |
| "loss": 0.7688, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.24920127795527156, |
| "grad_norm": 1.1999198198318481, |
| "learning_rate": 8.125000000000001e-06, |
| "loss": 0.7509, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.25878594249201275, |
| "grad_norm": 1.0238111019134521, |
| "learning_rate": 8.4375e-06, |
| "loss": 0.7451, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.268370607028754, |
| "grad_norm": 0.9762786626815796, |
| "learning_rate": 8.750000000000001e-06, |
| "loss": 0.7739, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.2779552715654952, |
| "grad_norm": 0.931849479675293, |
| "learning_rate": 9.0625e-06, |
| "loss": 0.7406, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.28753993610223644, |
| "grad_norm": 0.8621435165405273, |
| "learning_rate": 9.375000000000001e-06, |
| "loss": 0.7302, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.2971246006389776, |
| "grad_norm": 0.9465786218643188, |
| "learning_rate": 9.6875e-06, |
| "loss": 0.7396, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.30670926517571884, |
| "grad_norm": 0.9201045632362366, |
| "learning_rate": 1e-05, |
| "loss": 0.7593, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.31629392971246006, |
| "grad_norm": 0.7977403402328491, |
| "learning_rate": 9.999685283773504e-06, |
| "loss": 0.7255, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.3258785942492013, |
| "grad_norm": 0.8451075553894043, |
| "learning_rate": 9.998741174712534e-06, |
| "loss": 0.7172, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.3354632587859425, |
| "grad_norm": 0.9500023126602173, |
| "learning_rate": 9.997167791667668e-06, |
| "loss": 0.7309, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.3450479233226837, |
| "grad_norm": 0.8545159101486206, |
| "learning_rate": 9.994965332706574e-06, |
| "loss": 0.7316, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.3546325878594249, |
| "grad_norm": 0.7496842741966248, |
| "learning_rate": 9.992134075089085e-06, |
| "loss": 0.7276, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.36421725239616615, |
| "grad_norm": 0.9326169490814209, |
| "learning_rate": 9.98867437523228e-06, |
| "loss": 0.7029, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.3738019169329074, |
| "grad_norm": 0.8307452201843262, |
| "learning_rate": 9.984586668665641e-06, |
| "loss": 0.7111, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.38338658146964855, |
| "grad_norm": 0.8053420782089233, |
| "learning_rate": 9.979871469976197e-06, |
| "loss": 0.733, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.3929712460063898, |
| "grad_norm": 0.81968092918396, |
| "learning_rate": 9.974529372743762e-06, |
| "loss": 0.7054, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.402555910543131, |
| "grad_norm": 0.8351101875305176, |
| "learning_rate": 9.968561049466214e-06, |
| "loss": 0.6961, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.41214057507987223, |
| "grad_norm": 0.8805105686187744, |
| "learning_rate": 9.961967251474823e-06, |
| "loss": 0.7093, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.4217252396166134, |
| "grad_norm": 0.7041685581207275, |
| "learning_rate": 9.954748808839675e-06, |
| "loss": 0.6822, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.43130990415335463, |
| "grad_norm": 0.7762527465820312, |
| "learning_rate": 9.946906630265184e-06, |
| "loss": 0.693, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.44089456869009586, |
| "grad_norm": 0.7489679455757141, |
| "learning_rate": 9.938441702975689e-06, |
| "loss": 0.7289, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.4504792332268371, |
| "grad_norm": 0.8490743041038513, |
| "learning_rate": 9.92935509259118e-06, |
| "loss": 0.6953, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.46006389776357826, |
| "grad_norm": 0.7434765696525574, |
| "learning_rate": 9.91964794299315e-06, |
| "loss": 0.6735, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.4696485623003195, |
| "grad_norm": 0.7000515460968018, |
| "learning_rate": 9.909321476180594e-06, |
| "loss": 0.6932, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.4792332268370607, |
| "grad_norm": 0.891094446182251, |
| "learning_rate": 9.898376992116179e-06, |
| "loss": 0.6902, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.48881789137380194, |
| "grad_norm": 0.711914598941803, |
| "learning_rate": 9.886815868562596e-06, |
| "loss": 0.6869, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.4984025559105431, |
| "grad_norm": 0.6698287725448608, |
| "learning_rate": 9.874639560909118e-06, |
| "loss": 0.6825, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.5079872204472844, |
| "grad_norm": 0.6114389300346375, |
| "learning_rate": 9.861849601988384e-06, |
| "loss": 0.6854, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.5175718849840255, |
| "grad_norm": 0.7109593152999878, |
| "learning_rate": 9.848447601883436e-06, |
| "loss": 0.6904, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.5271565495207667, |
| "grad_norm": 0.748586893081665, |
| "learning_rate": 9.834435247725032e-06, |
| "loss": 0.684, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.536741214057508, |
| "grad_norm": 0.7264263033866882, |
| "learning_rate": 9.819814303479268e-06, |
| "loss": 0.6821, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.5463258785942492, |
| "grad_norm": 0.7709238529205322, |
| "learning_rate": 9.804586609725499e-06, |
| "loss": 0.7041, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.5559105431309904, |
| "grad_norm": 0.7797627449035645, |
| "learning_rate": 9.788754083424654e-06, |
| "loss": 0.6591, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.5654952076677316, |
| "grad_norm": 0.6770584583282471, |
| "learning_rate": 9.772318717677905e-06, |
| "loss": 0.668, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.5750798722044729, |
| "grad_norm": 0.7253358364105225, |
| "learning_rate": 9.755282581475769e-06, |
| "loss": 0.6889, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.5846645367412141, |
| "grad_norm": 0.8044716715812683, |
| "learning_rate": 9.737647819437645e-06, |
| "loss": 0.6824, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.5942492012779552, |
| "grad_norm": 0.7814176082611084, |
| "learning_rate": 9.719416651541839e-06, |
| "loss": 0.6652, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.6038338658146964, |
| "grad_norm": 0.8672887086868286, |
| "learning_rate": 9.700591372846096e-06, |
| "loss": 0.6681, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.6134185303514377, |
| "grad_norm": 0.686853289604187, |
| "learning_rate": 9.681174353198687e-06, |
| "loss": 0.6741, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.6230031948881789, |
| "grad_norm": 0.7145909667015076, |
| "learning_rate": 9.661168036940071e-06, |
| "loss": 0.6763, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.6325878594249201, |
| "grad_norm": 0.7959132790565491, |
| "learning_rate": 9.640574942595195e-06, |
| "loss": 0.6863, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.6421725239616614, |
| "grad_norm": 0.6892458200454712, |
| "learning_rate": 9.619397662556434e-06, |
| "loss": 0.6772, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.6517571884984026, |
| "grad_norm": 0.7975493669509888, |
| "learning_rate": 9.597638862757255e-06, |
| "loss": 0.6726, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.6613418530351438, |
| "grad_norm": 0.7013779282569885, |
| "learning_rate": 9.5753012823366e-06, |
| "loss": 0.6603, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.670926517571885, |
| "grad_norm": 0.7355247139930725, |
| "learning_rate": 9.552387733294081e-06, |
| "loss": 0.6855, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.6805111821086262, |
| "grad_norm": 0.7918930649757385, |
| "learning_rate": 9.528901100135971e-06, |
| "loss": 0.6717, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.6900958466453674, |
| "grad_norm": 0.7762782573699951, |
| "learning_rate": 9.504844339512096e-06, |
| "loss": 0.6842, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.6996805111821086, |
| "grad_norm": 0.7748196721076965, |
| "learning_rate": 9.480220479843627e-06, |
| "loss": 0.666, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.7092651757188498, |
| "grad_norm": 0.7611200213432312, |
| "learning_rate": 9.45503262094184e-06, |
| "loss": 0.6856, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.7188498402555911, |
| "grad_norm": 0.8682624697685242, |
| "learning_rate": 9.4292839336179e-06, |
| "loss": 0.6581, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.7284345047923323, |
| "grad_norm": 0.7679495811462402, |
| "learning_rate": 9.40297765928369e-06, |
| "loss": 0.6734, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.7380191693290735, |
| "grad_norm": 0.7918921709060669, |
| "learning_rate": 9.376117109543769e-06, |
| "loss": 0.6694, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.7476038338658147, |
| "grad_norm": 0.9087303280830383, |
| "learning_rate": 9.348705665778479e-06, |
| "loss": 0.6755, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.7571884984025559, |
| "grad_norm": 0.6955181360244751, |
| "learning_rate": 9.320746778718274e-06, |
| "loss": 0.6469, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.7667731629392971, |
| "grad_norm": 0.7844282388687134, |
| "learning_rate": 9.292243968009332e-06, |
| "loss": 0.6723, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.7763578274760383, |
| "grad_norm": 0.7338053584098816, |
| "learning_rate": 9.263200821770462e-06, |
| "loss": 0.6757, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.7859424920127795, |
| "grad_norm": 0.7353216409683228, |
| "learning_rate": 9.233620996141421e-06, |
| "loss": 0.6727, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.7955271565495208, |
| "grad_norm": 0.7343015670776367, |
| "learning_rate": 9.203508214822652e-06, |
| "loss": 0.6635, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.805111821086262, |
| "grad_norm": 0.792455792427063, |
| "learning_rate": 9.172866268606514e-06, |
| "loss": 0.6389, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.8146964856230032, |
| "grad_norm": 0.7722944617271423, |
| "learning_rate": 9.141699014900084e-06, |
| "loss": 0.6848, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.8242811501597445, |
| "grad_norm": 0.7590317130088806, |
| "learning_rate": 9.110010377239552e-06, |
| "loss": 0.6687, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.8338658146964856, |
| "grad_norm": 0.6646780371665955, |
| "learning_rate": 9.077804344796302e-06, |
| "loss": 0.6772, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.8434504792332268, |
| "grad_norm": 0.7406274676322937, |
| "learning_rate": 9.045084971874738e-06, |
| "loss": 0.649, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.853035143769968, |
| "grad_norm": 0.7770557999610901, |
| "learning_rate": 9.011856377401891e-06, |
| "loss": 0.6667, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.8626198083067093, |
| "grad_norm": 0.7461346387863159, |
| "learning_rate": 8.978122744408905e-06, |
| "loss": 0.6846, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.8722044728434505, |
| "grad_norm": 0.8803783655166626, |
| "learning_rate": 8.943888319504456e-06, |
| "loss": 0.6694, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.8817891373801917, |
| "grad_norm": 0.7138260006904602, |
| "learning_rate": 8.90915741234015e-06, |
| "loss": 0.6731, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.8913738019169329, |
| "grad_norm": 0.7463406324386597, |
| "learning_rate": 8.873934395068006e-06, |
| "loss": 0.6445, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.9009584664536742, |
| "grad_norm": 0.7805307507514954, |
| "learning_rate": 8.838223701790057e-06, |
| "loss": 0.6833, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.9105431309904153, |
| "grad_norm": 0.7624490857124329, |
| "learning_rate": 8.802029828000157e-06, |
| "loss": 0.6291, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.9201277955271565, |
| "grad_norm": 0.7260795831680298, |
| "learning_rate": 8.765357330018056e-06, |
| "loss": 0.6729, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.9297124600638977, |
| "grad_norm": 0.8511263132095337, |
| "learning_rate": 8.728210824415829e-06, |
| "loss": 0.6617, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.939297124600639, |
| "grad_norm": 0.7045103907585144, |
| "learning_rate": 8.690594987436705e-06, |
| "loss": 0.6557, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.9488817891373802, |
| "grad_norm": 0.7786133885383606, |
| "learning_rate": 8.652514554406388e-06, |
| "loss": 0.6525, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.9584664536741214, |
| "grad_norm": 0.9434441924095154, |
| "learning_rate": 8.613974319136959e-06, |
| "loss": 0.6618, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.9680511182108626, |
| "grad_norm": 0.6732690930366516, |
| "learning_rate": 8.574979133323378e-06, |
| "loss": 0.6403, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.9776357827476039, |
| "grad_norm": 0.8073576092720032, |
| "learning_rate": 8.535533905932739e-06, |
| "loss": 0.6523, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.987220447284345, |
| "grad_norm": 0.7025145292282104, |
| "learning_rate": 8.495643602586287e-06, |
| "loss": 0.6454, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.9968051118210862, |
| "grad_norm": 0.7717574238777161, |
| "learning_rate": 8.455313244934324e-06, |
| "loss": 0.6529, |
| "step": 104 |
| }, |
| { |
| "epoch": 1.0063897763578276, |
| "grad_norm": 0.6937882900238037, |
| "learning_rate": 8.414547910024035e-06, |
| "loss": 0.6219, |
| "step": 105 |
| }, |
| { |
| "epoch": 1.0159744408945688, |
| "grad_norm": 0.6880476474761963, |
| "learning_rate": 8.373352729660373e-06, |
| "loss": 0.5887, |
| "step": 106 |
| }, |
| { |
| "epoch": 1.0255591054313098, |
| "grad_norm": 0.7143478393554688, |
| "learning_rate": 8.331732889760021e-06, |
| "loss": 0.5787, |
| "step": 107 |
| }, |
| { |
| "epoch": 1.035143769968051, |
| "grad_norm": 0.7447161674499512, |
| "learning_rate": 8.289693629698564e-06, |
| "loss": 0.5986, |
| "step": 108 |
| }, |
| { |
| "epoch": 1.0447284345047922, |
| "grad_norm": 0.655197262763977, |
| "learning_rate": 8.247240241650918e-06, |
| "loss": 0.5626, |
| "step": 109 |
| }, |
| { |
| "epoch": 1.0543130990415335, |
| "grad_norm": 0.7953078746795654, |
| "learning_rate": 8.204378069925121e-06, |
| "loss": 0.5935, |
| "step": 110 |
| }, |
| { |
| "epoch": 1.0638977635782747, |
| "grad_norm": 0.712658166885376, |
| "learning_rate": 8.16111251028955e-06, |
| "loss": 0.5891, |
| "step": 111 |
| }, |
| { |
| "epoch": 1.073482428115016, |
| "grad_norm": 0.6255548000335693, |
| "learning_rate": 8.117449009293668e-06, |
| "loss": 0.5729, |
| "step": 112 |
| }, |
| { |
| "epoch": 1.0830670926517572, |
| "grad_norm": 0.6540143489837646, |
| "learning_rate": 8.073393063582386e-06, |
| "loss": 0.5781, |
| "step": 113 |
| }, |
| { |
| "epoch": 1.0926517571884984, |
| "grad_norm": 0.6459587812423706, |
| "learning_rate": 8.0289502192041e-06, |
| "loss": 0.5843, |
| "step": 114 |
| }, |
| { |
| "epoch": 1.1022364217252396, |
| "grad_norm": 0.7313612103462219, |
| "learning_rate": 7.984126070912519e-06, |
| "loss": 0.5648, |
| "step": 115 |
| }, |
| { |
| "epoch": 1.1118210862619808, |
| "grad_norm": 0.6702539324760437, |
| "learning_rate": 7.938926261462366e-06, |
| "loss": 0.5617, |
| "step": 116 |
| }, |
| { |
| "epoch": 1.121405750798722, |
| "grad_norm": 0.6335576176643372, |
| "learning_rate": 7.89335648089903e-06, |
| "loss": 0.5983, |
| "step": 117 |
| }, |
| { |
| "epoch": 1.1309904153354633, |
| "grad_norm": 0.7993137836456299, |
| "learning_rate": 7.84742246584226e-06, |
| "loss": 0.6143, |
| "step": 118 |
| }, |
| { |
| "epoch": 1.1405750798722045, |
| "grad_norm": 0.7164519429206848, |
| "learning_rate": 7.801129998764014e-06, |
| "loss": 0.5664, |
| "step": 119 |
| }, |
| { |
| "epoch": 1.1501597444089458, |
| "grad_norm": 0.7602349519729614, |
| "learning_rate": 7.754484907260513e-06, |
| "loss": 0.5801, |
| "step": 120 |
| }, |
| { |
| "epoch": 1.159744408945687, |
| "grad_norm": 0.7163518071174622, |
| "learning_rate": 7.70749306331863e-06, |
| "loss": 0.5573, |
| "step": 121 |
| }, |
| { |
| "epoch": 1.1693290734824282, |
| "grad_norm": 0.6692156195640564, |
| "learning_rate": 7.660160382576683e-06, |
| "loss": 0.575, |
| "step": 122 |
| }, |
| { |
| "epoch": 1.1789137380191694, |
| "grad_norm": 0.8085399866104126, |
| "learning_rate": 7.612492823579744e-06, |
| "loss": 0.5878, |
| "step": 123 |
| }, |
| { |
| "epoch": 1.1884984025559104, |
| "grad_norm": 0.6267865896224976, |
| "learning_rate": 7.564496387029532e-06, |
| "loss": 0.5486, |
| "step": 124 |
| }, |
| { |
| "epoch": 1.1980830670926517, |
| "grad_norm": 0.6363678574562073, |
| "learning_rate": 7.516177115029002e-06, |
| "loss": 0.5884, |
| "step": 125 |
| }, |
| { |
| "epoch": 1.207667731629393, |
| "grad_norm": 0.7670935392379761, |
| "learning_rate": 7.467541090321735e-06, |
| "loss": 0.5565, |
| "step": 126 |
| }, |
| { |
| "epoch": 1.2172523961661341, |
| "grad_norm": 0.640592634677887, |
| "learning_rate": 7.4185944355261996e-06, |
| "loss": 0.5943, |
| "step": 127 |
| }, |
| { |
| "epoch": 1.2268370607028753, |
| "grad_norm": 0.7349753379821777, |
| "learning_rate": 7.369343312364994e-06, |
| "loss": 0.5965, |
| "step": 128 |
| }, |
| { |
| "epoch": 1.2364217252396166, |
| "grad_norm": 0.7965679168701172, |
| "learning_rate": 7.319793920889171e-06, |
| "loss": 0.5895, |
| "step": 129 |
| }, |
| { |
| "epoch": 1.2460063897763578, |
| "grad_norm": 0.5832921862602234, |
| "learning_rate": 7.269952498697734e-06, |
| "loss": 0.5785, |
| "step": 130 |
| }, |
| { |
| "epoch": 1.255591054313099, |
| "grad_norm": 0.7845919132232666, |
| "learning_rate": 7.219825320152411e-06, |
| "loss": 0.5873, |
| "step": 131 |
| }, |
| { |
| "epoch": 1.2651757188498403, |
| "grad_norm": 0.6836994886398315, |
| "learning_rate": 7.169418695587791e-06, |
| "loss": 0.5811, |
| "step": 132 |
| }, |
| { |
| "epoch": 1.2747603833865815, |
| "grad_norm": 0.5873911380767822, |
| "learning_rate": 7.118738970516944e-06, |
| "loss": 0.5854, |
| "step": 133 |
| }, |
| { |
| "epoch": 1.2843450479233227, |
| "grad_norm": 0.7451477646827698, |
| "learning_rate": 7.067792524832604e-06, |
| "loss": 0.5859, |
| "step": 134 |
| }, |
| { |
| "epoch": 1.293929712460064, |
| "grad_norm": 0.6415753364562988, |
| "learning_rate": 7.016585772004026e-06, |
| "loss": 0.5812, |
| "step": 135 |
| }, |
| { |
| "epoch": 1.3035143769968052, |
| "grad_norm": 0.6458070874214172, |
| "learning_rate": 6.965125158269619e-06, |
| "loss": 0.5712, |
| "step": 136 |
| }, |
| { |
| "epoch": 1.3130990415335464, |
| "grad_norm": 0.7372552752494812, |
| "learning_rate": 6.913417161825449e-06, |
| "loss": 0.5692, |
| "step": 137 |
| }, |
| { |
| "epoch": 1.3226837060702876, |
| "grad_norm": 0.6712161302566528, |
| "learning_rate": 6.8614682920097265e-06, |
| "loss": 0.5681, |
| "step": 138 |
| }, |
| { |
| "epoch": 1.3322683706070286, |
| "grad_norm": 0.6904409527778625, |
| "learning_rate": 6.809285088483361e-06, |
| "loss": 0.576, |
| "step": 139 |
| }, |
| { |
| "epoch": 1.34185303514377, |
| "grad_norm": 0.636581540107727, |
| "learning_rate": 6.7568741204067145e-06, |
| "loss": 0.5779, |
| "step": 140 |
| }, |
| { |
| "epoch": 1.351437699680511, |
| "grad_norm": 0.6586715579032898, |
| "learning_rate": 6.704241985612625e-06, |
| "loss": 0.5499, |
| "step": 141 |
| }, |
| { |
| "epoch": 1.3610223642172525, |
| "grad_norm": 0.6217679381370544, |
| "learning_rate": 6.651395309775837e-06, |
| "loss": 0.5603, |
| "step": 142 |
| }, |
| { |
| "epoch": 1.3706070287539935, |
| "grad_norm": 0.7175468802452087, |
| "learning_rate": 6.598340745578908e-06, |
| "loss": 0.5949, |
| "step": 143 |
| }, |
| { |
| "epoch": 1.3801916932907348, |
| "grad_norm": 0.6404513716697693, |
| "learning_rate": 6.545084971874738e-06, |
| "loss": 0.5864, |
| "step": 144 |
| }, |
| { |
| "epoch": 1.389776357827476, |
| "grad_norm": 0.6180385947227478, |
| "learning_rate": 6.491634692845781e-06, |
| "loss": 0.5977, |
| "step": 145 |
| }, |
| { |
| "epoch": 1.3993610223642172, |
| "grad_norm": 0.6496903896331787, |
| "learning_rate": 6.437996637160086e-06, |
| "loss": 0.5824, |
| "step": 146 |
| }, |
| { |
| "epoch": 1.4089456869009584, |
| "grad_norm": 0.6618816256523132, |
| "learning_rate": 6.384177557124247e-06, |
| "loss": 0.5648, |
| "step": 147 |
| }, |
| { |
| "epoch": 1.4185303514376997, |
| "grad_norm": 0.6388576626777649, |
| "learning_rate": 6.330184227833376e-06, |
| "loss": 0.5641, |
| "step": 148 |
| }, |
| { |
| "epoch": 1.428115015974441, |
| "grad_norm": 0.6645902991294861, |
| "learning_rate": 6.276023446318214e-06, |
| "loss": 0.5642, |
| "step": 149 |
| }, |
| { |
| "epoch": 1.4376996805111821, |
| "grad_norm": 0.6369603872299194, |
| "learning_rate": 6.2217020306894705e-06, |
| "loss": 0.5672, |
| "step": 150 |
| }, |
| { |
| "epoch": 1.4472843450479234, |
| "grad_norm": 0.7125694751739502, |
| "learning_rate": 6.1672268192795285e-06, |
| "loss": 0.5738, |
| "step": 151 |
| }, |
| { |
| "epoch": 1.4568690095846646, |
| "grad_norm": 0.6303924918174744, |
| "learning_rate": 6.112604669781572e-06, |
| "loss": 0.574, |
| "step": 152 |
| }, |
| { |
| "epoch": 1.4664536741214058, |
| "grad_norm": 0.5970083475112915, |
| "learning_rate": 6.057842458386315e-06, |
| "loss": 0.5747, |
| "step": 153 |
| }, |
| { |
| "epoch": 1.476038338658147, |
| "grad_norm": 0.6371821761131287, |
| "learning_rate": 6.002947078916365e-06, |
| "loss": 0.5653, |
| "step": 154 |
| }, |
| { |
| "epoch": 1.4856230031948883, |
| "grad_norm": 0.7160565257072449, |
| "learning_rate": 5.947925441958393e-06, |
| "loss": 0.5493, |
| "step": 155 |
| }, |
| { |
| "epoch": 1.4952076677316293, |
| "grad_norm": 0.6290352940559387, |
| "learning_rate": 5.892784473993184e-06, |
| "loss": 0.5723, |
| "step": 156 |
| }, |
| { |
| "epoch": 1.5047923322683707, |
| "grad_norm": 0.5443238615989685, |
| "learning_rate": 5.837531116523683e-06, |
| "loss": 0.5914, |
| "step": 157 |
| }, |
| { |
| "epoch": 1.5143769968051117, |
| "grad_norm": 0.6566057205200195, |
| "learning_rate": 5.782172325201155e-06, |
| "loss": 0.5577, |
| "step": 158 |
| }, |
| { |
| "epoch": 1.5239616613418532, |
| "grad_norm": 0.6568493247032166, |
| "learning_rate": 5.726715068949564e-06, |
| "loss": 0.5672, |
| "step": 159 |
| }, |
| { |
| "epoch": 1.5335463258785942, |
| "grad_norm": 0.6268693208694458, |
| "learning_rate": 5.671166329088278e-06, |
| "loss": 0.5853, |
| "step": 160 |
| }, |
| { |
| "epoch": 1.5431309904153354, |
| "grad_norm": 0.6314179301261902, |
| "learning_rate": 5.615533098453215e-06, |
| "loss": 0.5599, |
| "step": 161 |
| }, |
| { |
| "epoch": 1.5527156549520766, |
| "grad_norm": 0.5839089155197144, |
| "learning_rate": 5.559822380516539e-06, |
| "loss": 0.5578, |
| "step": 162 |
| }, |
| { |
| "epoch": 1.5623003194888179, |
| "grad_norm": 0.5961830019950867, |
| "learning_rate": 5.504041188505022e-06, |
| "loss": 0.5634, |
| "step": 163 |
| }, |
| { |
| "epoch": 1.571884984025559, |
| "grad_norm": 0.6408848166465759, |
| "learning_rate": 5.448196544517168e-06, |
| "loss": 0.5677, |
| "step": 164 |
| }, |
| { |
| "epoch": 1.5814696485623003, |
| "grad_norm": 0.6148958802223206, |
| "learning_rate": 5.392295478639226e-06, |
| "loss": 0.5647, |
| "step": 165 |
| }, |
| { |
| "epoch": 1.5910543130990416, |
| "grad_norm": 0.5794315338134766, |
| "learning_rate": 5.336345028060199e-06, |
| "loss": 0.5792, |
| "step": 166 |
| }, |
| { |
| "epoch": 1.6006389776357828, |
| "grad_norm": 0.60219806432724, |
| "learning_rate": 5.2803522361859596e-06, |
| "loss": 0.5477, |
| "step": 167 |
| }, |
| { |
| "epoch": 1.610223642172524, |
| "grad_norm": 0.6012296676635742, |
| "learning_rate": 5.224324151752575e-06, |
| "loss": 0.583, |
| "step": 168 |
| }, |
| { |
| "epoch": 1.619808306709265, |
| "grad_norm": 0.6214751601219177, |
| "learning_rate": 5.168267827938971e-06, |
| "loss": 0.5439, |
| "step": 169 |
| }, |
| { |
| "epoch": 1.6293929712460065, |
| "grad_norm": 0.6018662452697754, |
| "learning_rate": 5.112190321479026e-06, |
| "loss": 0.5671, |
| "step": 170 |
| }, |
| { |
| "epoch": 1.6389776357827475, |
| "grad_norm": 0.5933774709701538, |
| "learning_rate": 5.05609869177323e-06, |
| "loss": 0.5886, |
| "step": 171 |
| }, |
| { |
| "epoch": 1.648562300319489, |
| "grad_norm": 0.5990563035011292, |
| "learning_rate": 5e-06, |
| "loss": 0.5653, |
| "step": 172 |
| }, |
| { |
| "epoch": 1.65814696485623, |
| "grad_norm": 0.5904830694198608, |
| "learning_rate": 4.943901308226771e-06, |
| "loss": 0.5602, |
| "step": 173 |
| }, |
| { |
| "epoch": 1.6677316293929714, |
| "grad_norm": 0.6122473478317261, |
| "learning_rate": 4.887809678520976e-06, |
| "loss": 0.5801, |
| "step": 174 |
| }, |
| { |
| "epoch": 1.6773162939297124, |
| "grad_norm": 0.6307440400123596, |
| "learning_rate": 4.831732172061032e-06, |
| "loss": 0.5842, |
| "step": 175 |
| }, |
| { |
| "epoch": 1.6869009584664538, |
| "grad_norm": 0.6917486190795898, |
| "learning_rate": 4.775675848247427e-06, |
| "loss": 0.5595, |
| "step": 176 |
| }, |
| { |
| "epoch": 1.6964856230031948, |
| "grad_norm": 0.6185805797576904, |
| "learning_rate": 4.719647763814041e-06, |
| "loss": 0.5727, |
| "step": 177 |
| }, |
| { |
| "epoch": 1.706070287539936, |
| "grad_norm": 0.5742789506912231, |
| "learning_rate": 4.663654971939802e-06, |
| "loss": 0.5575, |
| "step": 178 |
| }, |
| { |
| "epoch": 1.7156549520766773, |
| "grad_norm": 0.6561394333839417, |
| "learning_rate": 4.6077045213607765e-06, |
| "loss": 0.5706, |
| "step": 179 |
| }, |
| { |
| "epoch": 1.7252396166134185, |
| "grad_norm": 0.6904816627502441, |
| "learning_rate": 4.551803455482833e-06, |
| "loss": 0.6007, |
| "step": 180 |
| }, |
| { |
| "epoch": 1.7348242811501597, |
| "grad_norm": 0.6116853356361389, |
| "learning_rate": 4.4959588114949785e-06, |
| "loss": 0.5686, |
| "step": 181 |
| }, |
| { |
| "epoch": 1.744408945686901, |
| "grad_norm": 0.6065500378608704, |
| "learning_rate": 4.4401776194834615e-06, |
| "loss": 0.5829, |
| "step": 182 |
| }, |
| { |
| "epoch": 1.7539936102236422, |
| "grad_norm": 0.6076284050941467, |
| "learning_rate": 4.384466901546786e-06, |
| "loss": 0.5745, |
| "step": 183 |
| }, |
| { |
| "epoch": 1.7635782747603834, |
| "grad_norm": 0.5575659275054932, |
| "learning_rate": 4.3288336709117246e-06, |
| "loss": 0.5708, |
| "step": 184 |
| }, |
| { |
| "epoch": 1.7731629392971247, |
| "grad_norm": 0.5849297046661377, |
| "learning_rate": 4.273284931050438e-06, |
| "loss": 0.5532, |
| "step": 185 |
| }, |
| { |
| "epoch": 1.7827476038338657, |
| "grad_norm": 0.5678250193595886, |
| "learning_rate": 4.217827674798845e-06, |
| "loss": 0.5676, |
| "step": 186 |
| }, |
| { |
| "epoch": 1.792332268370607, |
| "grad_norm": 0.5182955265045166, |
| "learning_rate": 4.162468883476319e-06, |
| "loss": 0.5656, |
| "step": 187 |
| }, |
| { |
| "epoch": 1.8019169329073481, |
| "grad_norm": 0.6109117269515991, |
| "learning_rate": 4.107215526006818e-06, |
| "loss": 0.5734, |
| "step": 188 |
| }, |
| { |
| "epoch": 1.8115015974440896, |
| "grad_norm": 0.6146546006202698, |
| "learning_rate": 4.052074558041608e-06, |
| "loss": 0.5891, |
| "step": 189 |
| }, |
| { |
| "epoch": 1.8210862619808306, |
| "grad_norm": 0.5946270823478699, |
| "learning_rate": 3.997052921083637e-06, |
| "loss": 0.5838, |
| "step": 190 |
| }, |
| { |
| "epoch": 1.830670926517572, |
| "grad_norm": 0.5628454089164734, |
| "learning_rate": 3.9421575416136866e-06, |
| "loss": 0.5682, |
| "step": 191 |
| }, |
| { |
| "epoch": 1.840255591054313, |
| "grad_norm": 0.5776045322418213, |
| "learning_rate": 3.887395330218429e-06, |
| "loss": 0.5774, |
| "step": 192 |
| }, |
| { |
| "epoch": 1.8498402555910545, |
| "grad_norm": 0.5610283017158508, |
| "learning_rate": 3.832773180720475e-06, |
| "loss": 0.5755, |
| "step": 193 |
| }, |
| { |
| "epoch": 1.8594249201277955, |
| "grad_norm": 0.5823736190795898, |
| "learning_rate": 3.778297969310529e-06, |
| "loss": 0.5894, |
| "step": 194 |
| }, |
| { |
| "epoch": 1.8690095846645367, |
| "grad_norm": 0.5524364709854126, |
| "learning_rate": 3.723976553681787e-06, |
| "loss": 0.5881, |
| "step": 195 |
| }, |
| { |
| "epoch": 1.878594249201278, |
| "grad_norm": 0.6131855249404907, |
| "learning_rate": 3.669815772166625e-06, |
| "loss": 0.5699, |
| "step": 196 |
| }, |
| { |
| "epoch": 1.8881789137380192, |
| "grad_norm": 0.6087161898612976, |
| "learning_rate": 3.6158224428757538e-06, |
| "loss": 0.5747, |
| "step": 197 |
| }, |
| { |
| "epoch": 1.8977635782747604, |
| "grad_norm": 0.591026782989502, |
| "learning_rate": 3.562003362839914e-06, |
| "loss": 0.5596, |
| "step": 198 |
| }, |
| { |
| "epoch": 1.9073482428115016, |
| "grad_norm": 0.5625490546226501, |
| "learning_rate": 3.50836530715422e-06, |
| "loss": 0.5709, |
| "step": 199 |
| }, |
| { |
| "epoch": 1.9169329073482428, |
| "grad_norm": 0.5438273549079895, |
| "learning_rate": 3.4549150281252635e-06, |
| "loss": 0.5771, |
| "step": 200 |
| }, |
| { |
| "epoch": 1.926517571884984, |
| "grad_norm": 0.5201634764671326, |
| "learning_rate": 3.4016592544210937e-06, |
| "loss": 0.5763, |
| "step": 201 |
| }, |
| { |
| "epoch": 1.9361022364217253, |
| "grad_norm": 0.6035382151603699, |
| "learning_rate": 3.3486046902241663e-06, |
| "loss": 0.5495, |
| "step": 202 |
| }, |
| { |
| "epoch": 1.9456869009584663, |
| "grad_norm": 0.539597749710083, |
| "learning_rate": 3.295758014387375e-06, |
| "loss": 0.5836, |
| "step": 203 |
| }, |
| { |
| "epoch": 1.9552715654952078, |
| "grad_norm": 0.4887038469314575, |
| "learning_rate": 3.2431258795932863e-06, |
| "loss": 0.5865, |
| "step": 204 |
| }, |
| { |
| "epoch": 1.9648562300319488, |
| "grad_norm": 0.5846530795097351, |
| "learning_rate": 3.1907149115166403e-06, |
| "loss": 0.5638, |
| "step": 205 |
| }, |
| { |
| "epoch": 1.9744408945686902, |
| "grad_norm": 0.5618496537208557, |
| "learning_rate": 3.1385317079902743e-06, |
| "loss": 0.5784, |
| "step": 206 |
| }, |
| { |
| "epoch": 1.9840255591054312, |
| "grad_norm": 0.5010558366775513, |
| "learning_rate": 3.0865828381745515e-06, |
| "loss": 0.572, |
| "step": 207 |
| }, |
| { |
| "epoch": 1.9936102236421727, |
| "grad_norm": 0.5525842308998108, |
| "learning_rate": 3.0348748417303826e-06, |
| "loss": 0.5578, |
| "step": 208 |
| }, |
| { |
| "epoch": 2.0031948881789137, |
| "grad_norm": 0.5372030735015869, |
| "learning_rate": 2.9834142279959754e-06, |
| "loss": 0.5419, |
| "step": 209 |
| }, |
| { |
| "epoch": 2.012779552715655, |
| "grad_norm": 0.5794517397880554, |
| "learning_rate": 2.932207475167398e-06, |
| "loss": 0.5237, |
| "step": 210 |
| }, |
| { |
| "epoch": 2.022364217252396, |
| "grad_norm": 0.5482734441757202, |
| "learning_rate": 2.8812610294830568e-06, |
| "loss": 0.523, |
| "step": 211 |
| }, |
| { |
| "epoch": 2.0319488817891376, |
| "grad_norm": 0.49512386322021484, |
| "learning_rate": 2.83058130441221e-06, |
| "loss": 0.5488, |
| "step": 212 |
| }, |
| { |
| "epoch": 2.0415335463258786, |
| "grad_norm": 0.5444460511207581, |
| "learning_rate": 2.7801746798475905e-06, |
| "loss": 0.5362, |
| "step": 213 |
| }, |
| { |
| "epoch": 2.0511182108626196, |
| "grad_norm": 0.5378485321998596, |
| "learning_rate": 2.7300475013022666e-06, |
| "loss": 0.5117, |
| "step": 214 |
| }, |
| { |
| "epoch": 2.060702875399361, |
| "grad_norm": 0.570003092288971, |
| "learning_rate": 2.6802060791108304e-06, |
| "loss": 0.4838, |
| "step": 215 |
| }, |
| { |
| "epoch": 2.070287539936102, |
| "grad_norm": 0.6234847903251648, |
| "learning_rate": 2.6306566876350072e-06, |
| "loss": 0.5046, |
| "step": 216 |
| }, |
| { |
| "epoch": 2.0798722044728435, |
| "grad_norm": 0.5408498644828796, |
| "learning_rate": 2.5814055644738013e-06, |
| "loss": 0.5185, |
| "step": 217 |
| }, |
| { |
| "epoch": 2.0894568690095845, |
| "grad_norm": 0.5204379558563232, |
| "learning_rate": 2.532458909678266e-06, |
| "loss": 0.496, |
| "step": 218 |
| }, |
| { |
| "epoch": 2.099041533546326, |
| "grad_norm": 0.5795621871948242, |
| "learning_rate": 2.483822884971e-06, |
| "loss": 0.5253, |
| "step": 219 |
| }, |
| { |
| "epoch": 2.108626198083067, |
| "grad_norm": 0.5828584432601929, |
| "learning_rate": 2.43550361297047e-06, |
| "loss": 0.5122, |
| "step": 220 |
| }, |
| { |
| "epoch": 2.1182108626198084, |
| "grad_norm": 0.6278659105300903, |
| "learning_rate": 2.387507176420256e-06, |
| "loss": 0.4836, |
| "step": 221 |
| }, |
| { |
| "epoch": 2.1277955271565494, |
| "grad_norm": 0.5321851968765259, |
| "learning_rate": 2.339839617423318e-06, |
| "loss": 0.5254, |
| "step": 222 |
| }, |
| { |
| "epoch": 2.137380191693291, |
| "grad_norm": 0.5402694940567017, |
| "learning_rate": 2.2925069366813718e-06, |
| "loss": 0.5279, |
| "step": 223 |
| }, |
| { |
| "epoch": 2.146964856230032, |
| "grad_norm": 0.528405487537384, |
| "learning_rate": 2.245515092739488e-06, |
| "loss": 0.4915, |
| "step": 224 |
| }, |
| { |
| "epoch": 2.1565495207667733, |
| "grad_norm": 0.5780546069145203, |
| "learning_rate": 2.1988700012359865e-06, |
| "loss": 0.5057, |
| "step": 225 |
| }, |
| { |
| "epoch": 2.1661341853035143, |
| "grad_norm": 0.5286846160888672, |
| "learning_rate": 2.1525775341577404e-06, |
| "loss": 0.4826, |
| "step": 226 |
| }, |
| { |
| "epoch": 2.1757188498402558, |
| "grad_norm": 0.5604357719421387, |
| "learning_rate": 2.1066435191009717e-06, |
| "loss": 0.5041, |
| "step": 227 |
| }, |
| { |
| "epoch": 2.1853035143769968, |
| "grad_norm": 0.5224547386169434, |
| "learning_rate": 2.061073738537635e-06, |
| "loss": 0.504, |
| "step": 228 |
| }, |
| { |
| "epoch": 2.194888178913738, |
| "grad_norm": 0.49711328744888306, |
| "learning_rate": 2.0158739290874822e-06, |
| "loss": 0.4867, |
| "step": 229 |
| }, |
| { |
| "epoch": 2.2044728434504792, |
| "grad_norm": 0.5324923396110535, |
| "learning_rate": 1.971049780795901e-06, |
| "loss": 0.5058, |
| "step": 230 |
| }, |
| { |
| "epoch": 2.2140575079872207, |
| "grad_norm": 0.5620575547218323, |
| "learning_rate": 1.9266069364176144e-06, |
| "loss": 0.491, |
| "step": 231 |
| }, |
| { |
| "epoch": 2.2236421725239617, |
| "grad_norm": 0.6187978386878967, |
| "learning_rate": 1.8825509907063328e-06, |
| "loss": 0.4932, |
| "step": 232 |
| }, |
| { |
| "epoch": 2.2332268370607027, |
| "grad_norm": 0.5564191937446594, |
| "learning_rate": 1.838887489710452e-06, |
| "loss": 0.5167, |
| "step": 233 |
| }, |
| { |
| "epoch": 2.242811501597444, |
| "grad_norm": 0.544456422328949, |
| "learning_rate": 1.7956219300748796e-06, |
| "loss": 0.5106, |
| "step": 234 |
| }, |
| { |
| "epoch": 2.252396166134185, |
| "grad_norm": 0.49560070037841797, |
| "learning_rate": 1.7527597583490825e-06, |
| "loss": 0.4985, |
| "step": 235 |
| }, |
| { |
| "epoch": 2.2619808306709266, |
| "grad_norm": 0.5730255246162415, |
| "learning_rate": 1.7103063703014372e-06, |
| "loss": 0.4861, |
| "step": 236 |
| }, |
| { |
| "epoch": 2.2715654952076676, |
| "grad_norm": 0.5446303486824036, |
| "learning_rate": 1.6682671102399806e-06, |
| "loss": 0.5204, |
| "step": 237 |
| }, |
| { |
| "epoch": 2.281150159744409, |
| "grad_norm": 0.5347279906272888, |
| "learning_rate": 1.6266472703396286e-06, |
| "loss": 0.514, |
| "step": 238 |
| }, |
| { |
| "epoch": 2.29073482428115, |
| "grad_norm": 0.5006322860717773, |
| "learning_rate": 1.5854520899759656e-06, |
| "loss": 0.5042, |
| "step": 239 |
| }, |
| { |
| "epoch": 2.3003194888178915, |
| "grad_norm": 0.5397834777832031, |
| "learning_rate": 1.544686755065677e-06, |
| "loss": 0.5044, |
| "step": 240 |
| }, |
| { |
| "epoch": 2.3099041533546325, |
| "grad_norm": 0.5032399892807007, |
| "learning_rate": 1.5043563974137132e-06, |
| "loss": 0.5072, |
| "step": 241 |
| }, |
| { |
| "epoch": 2.319488817891374, |
| "grad_norm": 0.47611555457115173, |
| "learning_rate": 1.4644660940672628e-06, |
| "loss": 0.5302, |
| "step": 242 |
| }, |
| { |
| "epoch": 2.329073482428115, |
| "grad_norm": 0.5055474042892456, |
| "learning_rate": 1.4250208666766235e-06, |
| "loss": 0.5042, |
| "step": 243 |
| }, |
| { |
| "epoch": 2.3386581469648564, |
| "grad_norm": 0.5255368947982788, |
| "learning_rate": 1.3860256808630429e-06, |
| "loss": 0.5111, |
| "step": 244 |
| }, |
| { |
| "epoch": 2.3482428115015974, |
| "grad_norm": 0.5098780393600464, |
| "learning_rate": 1.3474854455936126e-06, |
| "loss": 0.515, |
| "step": 245 |
| }, |
| { |
| "epoch": 2.357827476038339, |
| "grad_norm": 0.4925176501274109, |
| "learning_rate": 1.3094050125632973e-06, |
| "loss": 0.5114, |
| "step": 246 |
| }, |
| { |
| "epoch": 2.36741214057508, |
| "grad_norm": 0.537067174911499, |
| "learning_rate": 1.2717891755841722e-06, |
| "loss": 0.4797, |
| "step": 247 |
| }, |
| { |
| "epoch": 2.376996805111821, |
| "grad_norm": 0.4920751750469208, |
| "learning_rate": 1.234642669981946e-06, |
| "loss": 0.4941, |
| "step": 248 |
| }, |
| { |
| "epoch": 2.3865814696485623, |
| "grad_norm": 0.4458199143409729, |
| "learning_rate": 1.1979701719998454e-06, |
| "loss": 0.5222, |
| "step": 249 |
| }, |
| { |
| "epoch": 2.3961661341853033, |
| "grad_norm": 0.48658084869384766, |
| "learning_rate": 1.1617762982099446e-06, |
| "loss": 0.518, |
| "step": 250 |
| }, |
| { |
| "epoch": 2.405750798722045, |
| "grad_norm": 0.5211532115936279, |
| "learning_rate": 1.1260656049319957e-06, |
| "loss": 0.4984, |
| "step": 251 |
| }, |
| { |
| "epoch": 2.415335463258786, |
| "grad_norm": 0.5080324411392212, |
| "learning_rate": 1.0908425876598512e-06, |
| "loss": 0.5229, |
| "step": 252 |
| }, |
| { |
| "epoch": 2.4249201277955272, |
| "grad_norm": 0.5140640735626221, |
| "learning_rate": 1.0561116804955451e-06, |
| "loss": 0.5146, |
| "step": 253 |
| }, |
| { |
| "epoch": 2.4345047923322682, |
| "grad_norm": 0.48215627670288086, |
| "learning_rate": 1.0218772555910955e-06, |
| "loss": 0.5131, |
| "step": 254 |
| }, |
| { |
| "epoch": 2.4440894568690097, |
| "grad_norm": 0.5040873289108276, |
| "learning_rate": 9.881436225981107e-07, |
| "loss": 0.4914, |
| "step": 255 |
| }, |
| { |
| "epoch": 2.4536741214057507, |
| "grad_norm": 0.5259299874305725, |
| "learning_rate": 9.549150281252633e-07, |
| "loss": 0.5022, |
| "step": 256 |
| }, |
| { |
| "epoch": 2.463258785942492, |
| "grad_norm": 0.5273531079292297, |
| "learning_rate": 9.221956552036992e-07, |
| "loss": 0.4945, |
| "step": 257 |
| }, |
| { |
| "epoch": 2.472843450479233, |
| "grad_norm": 0.49242982268333435, |
| "learning_rate": 8.899896227604509e-07, |
| "loss": 0.4967, |
| "step": 258 |
| }, |
| { |
| "epoch": 2.4824281150159746, |
| "grad_norm": 0.48913198709487915, |
| "learning_rate": 8.58300985099918e-07, |
| "loss": 0.4912, |
| "step": 259 |
| }, |
| { |
| "epoch": 2.4920127795527156, |
| "grad_norm": 0.4919417202472687, |
| "learning_rate": 8.271337313934869e-07, |
| "loss": 0.4789, |
| "step": 260 |
| }, |
| { |
| "epoch": 2.501597444089457, |
| "grad_norm": 0.4618432819843292, |
| "learning_rate": 7.964917851773496e-07, |
| "loss": 0.5138, |
| "step": 261 |
| }, |
| { |
| "epoch": 2.511182108626198, |
| "grad_norm": 0.5181034207344055, |
| "learning_rate": 7.663790038585794e-07, |
| "loss": 0.4869, |
| "step": 262 |
| }, |
| { |
| "epoch": 2.520766773162939, |
| "grad_norm": 0.4994615912437439, |
| "learning_rate": 7.367991782295392e-07, |
| "loss": 0.487, |
| "step": 263 |
| }, |
| { |
| "epoch": 2.5303514376996805, |
| "grad_norm": 0.4796070158481598, |
| "learning_rate": 7.077560319906696e-07, |
| "loss": 0.5115, |
| "step": 264 |
| }, |
| { |
| "epoch": 2.539936102236422, |
| "grad_norm": 0.4891747832298279, |
| "learning_rate": 6.792532212817271e-07, |
| "loss": 0.5304, |
| "step": 265 |
| }, |
| { |
| "epoch": 2.549520766773163, |
| "grad_norm": 0.4947539269924164, |
| "learning_rate": 6.512943342215234e-07, |
| "loss": 0.5268, |
| "step": 266 |
| }, |
| { |
| "epoch": 2.559105431309904, |
| "grad_norm": 0.48322218656539917, |
| "learning_rate": 6.238828904562316e-07, |
| "loss": 0.4955, |
| "step": 267 |
| }, |
| { |
| "epoch": 2.5686900958466454, |
| "grad_norm": 0.4670308232307434, |
| "learning_rate": 5.9702234071631e-07, |
| "loss": 0.5186, |
| "step": 268 |
| }, |
| { |
| "epoch": 2.5782747603833864, |
| "grad_norm": 0.5252702236175537, |
| "learning_rate": 5.707160663821009e-07, |
| "loss": 0.4998, |
| "step": 269 |
| }, |
| { |
| "epoch": 2.587859424920128, |
| "grad_norm": 0.4638543426990509, |
| "learning_rate": 5.449673790581611e-07, |
| "loss": 0.4974, |
| "step": 270 |
| }, |
| { |
| "epoch": 2.597444089456869, |
| "grad_norm": 0.5163476467132568, |
| "learning_rate": 5.197795201563744e-07, |
| "loss": 0.4794, |
| "step": 271 |
| }, |
| { |
| "epoch": 2.6070287539936103, |
| "grad_norm": 0.4854280948638916, |
| "learning_rate": 4.951556604879049e-07, |
| "loss": 0.4974, |
| "step": 272 |
| }, |
| { |
| "epoch": 2.6166134185303513, |
| "grad_norm": 0.4789203107357025, |
| "learning_rate": 4.710988998640298e-07, |
| "loss": 0.4988, |
| "step": 273 |
| }, |
| { |
| "epoch": 2.626198083067093, |
| "grad_norm": 0.4644279181957245, |
| "learning_rate": 4.4761226670592074e-07, |
| "loss": 0.512, |
| "step": 274 |
| }, |
| { |
| "epoch": 2.635782747603834, |
| "grad_norm": 0.5351918935775757, |
| "learning_rate": 4.2469871766340096e-07, |
| "loss": 0.4738, |
| "step": 275 |
| }, |
| { |
| "epoch": 2.6453674121405752, |
| "grad_norm": 0.48578253388404846, |
| "learning_rate": 4.0236113724274716e-07, |
| "loss": 0.5278, |
| "step": 276 |
| }, |
| { |
| "epoch": 2.6549520766773163, |
| "grad_norm": 0.4682360291481018, |
| "learning_rate": 3.8060233744356634e-07, |
| "loss": 0.5055, |
| "step": 277 |
| }, |
| { |
| "epoch": 2.6645367412140573, |
| "grad_norm": 0.49804800748825073, |
| "learning_rate": 3.5942505740480583e-07, |
| "loss": 0.4815, |
| "step": 278 |
| }, |
| { |
| "epoch": 2.6741214057507987, |
| "grad_norm": 0.48392346501350403, |
| "learning_rate": 3.3883196305992906e-07, |
| "loss": 0.5133, |
| "step": 279 |
| }, |
| { |
| "epoch": 2.68370607028754, |
| "grad_norm": 0.4864685535430908, |
| "learning_rate": 3.18825646801314e-07, |
| "loss": 0.5029, |
| "step": 280 |
| }, |
| { |
| "epoch": 2.693290734824281, |
| "grad_norm": 0.5228228569030762, |
| "learning_rate": 2.9940862715390483e-07, |
| "loss": 0.4995, |
| "step": 281 |
| }, |
| { |
| "epoch": 2.702875399361022, |
| "grad_norm": 0.4820217192173004, |
| "learning_rate": 2.8058334845816214e-07, |
| "loss": 0.5045, |
| "step": 282 |
| }, |
| { |
| "epoch": 2.7124600638977636, |
| "grad_norm": 0.4983007311820984, |
| "learning_rate": 2.6235218056235633e-07, |
| "loss": 0.49, |
| "step": 283 |
| }, |
| { |
| "epoch": 2.722044728434505, |
| "grad_norm": 0.48497146368026733, |
| "learning_rate": 2.447174185242324e-07, |
| "loss": 0.4986, |
| "step": 284 |
| }, |
| { |
| "epoch": 2.731629392971246, |
| "grad_norm": 0.4843176603317261, |
| "learning_rate": 2.276812823220964e-07, |
| "loss": 0.4864, |
| "step": 285 |
| }, |
| { |
| "epoch": 2.741214057507987, |
| "grad_norm": 0.4612375795841217, |
| "learning_rate": 2.1124591657534776e-07, |
| "loss": 0.5138, |
| "step": 286 |
| }, |
| { |
| "epoch": 2.7507987220447285, |
| "grad_norm": 0.4730239808559418, |
| "learning_rate": 1.9541339027450256e-07, |
| "loss": 0.498, |
| "step": 287 |
| }, |
| { |
| "epoch": 2.7603833865814695, |
| "grad_norm": 0.47294536232948303, |
| "learning_rate": 1.801856965207338e-07, |
| "loss": 0.517, |
| "step": 288 |
| }, |
| { |
| "epoch": 2.769968051118211, |
| "grad_norm": 0.490991473197937, |
| "learning_rate": 1.6556475227496816e-07, |
| "loss": 0.5219, |
| "step": 289 |
| }, |
| { |
| "epoch": 2.779552715654952, |
| "grad_norm": 0.5257830619812012, |
| "learning_rate": 1.5155239811656562e-07, |
| "loss": 0.5048, |
| "step": 290 |
| }, |
| { |
| "epoch": 2.7891373801916934, |
| "grad_norm": 0.46119868755340576, |
| "learning_rate": 1.3815039801161723e-07, |
| "loss": 0.5258, |
| "step": 291 |
| }, |
| { |
| "epoch": 2.7987220447284344, |
| "grad_norm": 0.5001710653305054, |
| "learning_rate": 1.253604390908819e-07, |
| "loss": 0.5056, |
| "step": 292 |
| }, |
| { |
| "epoch": 2.8083067092651754, |
| "grad_norm": 0.45364686846733093, |
| "learning_rate": 1.1318413143740436e-07, |
| "loss": 0.525, |
| "step": 293 |
| }, |
| { |
| "epoch": 2.817891373801917, |
| "grad_norm": 0.511743426322937, |
| "learning_rate": 1.0162300788382263e-07, |
| "loss": 0.5028, |
| "step": 294 |
| }, |
| { |
| "epoch": 2.8274760383386583, |
| "grad_norm": 0.48278823494911194, |
| "learning_rate": 9.0678523819408e-08, |
| "loss": 0.5026, |
| "step": 295 |
| }, |
| { |
| "epoch": 2.8370607028753994, |
| "grad_norm": 0.4801782965660095, |
| "learning_rate": 8.035205700685167e-08, |
| "loss": 0.5173, |
| "step": 296 |
| }, |
| { |
| "epoch": 2.8466453674121404, |
| "grad_norm": 0.4829420745372772, |
| "learning_rate": 7.064490740882057e-08, |
| "loss": 0.5111, |
| "step": 297 |
| }, |
| { |
| "epoch": 2.856230031948882, |
| "grad_norm": 0.49983468651771545, |
| "learning_rate": 6.15582970243117e-08, |
| "loss": 0.4989, |
| "step": 298 |
| }, |
| { |
| "epoch": 2.8658146964856233, |
| "grad_norm": 0.4892556667327881, |
| "learning_rate": 5.3093369734816824e-08, |
| "loss": 0.5111, |
| "step": 299 |
| }, |
| { |
| "epoch": 2.8753993610223643, |
| "grad_norm": 0.5195422172546387, |
| "learning_rate": 4.52511911603265e-08, |
| "loss": 0.4896, |
| "step": 300 |
| }, |
| { |
| "epoch": 2.8849840255591053, |
| "grad_norm": 0.4906642436981201, |
| "learning_rate": 3.8032748525179684e-08, |
| "loss": 0.5274, |
| "step": 301 |
| }, |
| { |
| "epoch": 2.8945686900958467, |
| "grad_norm": 0.499541699886322, |
| "learning_rate": 3.143895053378698e-08, |
| "loss": 0.4915, |
| "step": 302 |
| }, |
| { |
| "epoch": 2.9041533546325877, |
| "grad_norm": 0.48538094758987427, |
| "learning_rate": 2.547062725623828e-08, |
| "loss": 0.5059, |
| "step": 303 |
| }, |
| { |
| "epoch": 2.913738019169329, |
| "grad_norm": 0.4701797366142273, |
| "learning_rate": 2.012853002380466e-08, |
| "loss": 0.4837, |
| "step": 304 |
| }, |
| { |
| "epoch": 2.92332268370607, |
| "grad_norm": 0.5019702315330505, |
| "learning_rate": 1.541333133436018e-08, |
| "loss": 0.5137, |
| "step": 305 |
| }, |
| { |
| "epoch": 2.9329073482428116, |
| "grad_norm": 0.5122066736221313, |
| "learning_rate": 1.132562476771959e-08, |
| "loss": 0.5035, |
| "step": 306 |
| }, |
| { |
| "epoch": 2.9424920127795526, |
| "grad_norm": 0.4683079719543457, |
| "learning_rate": 7.865924910916977e-09, |
| "loss": 0.5052, |
| "step": 307 |
| }, |
| { |
| "epoch": 2.952076677316294, |
| "grad_norm": 0.48147568106651306, |
| "learning_rate": 5.034667293427053e-09, |
| "loss": 0.4787, |
| "step": 308 |
| }, |
| { |
| "epoch": 2.961661341853035, |
| "grad_norm": 0.4846435487270355, |
| "learning_rate": 2.8322083323334417e-09, |
| "loss": 0.498, |
| "step": 309 |
| }, |
| { |
| "epoch": 2.9712460063897765, |
| "grad_norm": 0.4767644703388214, |
| "learning_rate": 1.2588252874673469e-09, |
| "loss": 0.5103, |
| "step": 310 |
| }, |
| { |
| "epoch": 2.9808306709265175, |
| "grad_norm": 0.48420077562332153, |
| "learning_rate": 3.147162264971471e-10, |
| "loss": 0.4959, |
| "step": 311 |
| }, |
| { |
| "epoch": 2.9904153354632586, |
| "grad_norm": 0.4982874095439911, |
| "learning_rate": 0.0, |
| "loss": 0.5048, |
| "step": 312 |
| }, |
| { |
| "epoch": 2.9904153354632586, |
| "step": 312, |
| "total_flos": 2.995131507754271e+17, |
| "train_loss": 0.6095600314438343, |
| "train_runtime": 8590.5636, |
| "train_samples_per_second": 3.492, |
| "train_steps_per_second": 0.036 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 312, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.995131507754271e+17, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|