| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 7.0, |
| "eval_steps": 500, |
| "global_step": 567, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.012345679012345678, |
| "grad_norm": 9.008057481638138, |
| "learning_rate": 3.5087719298245616e-07, |
| "loss": 0.6743, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.024691358024691357, |
| "grad_norm": 7.6701365166507625, |
| "learning_rate": 7.017543859649123e-07, |
| "loss": 0.576, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.037037037037037035, |
| "grad_norm": 8.97726725259136, |
| "learning_rate": 1.0526315789473685e-06, |
| "loss": 0.6663, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.04938271604938271, |
| "grad_norm": 7.225115208278711, |
| "learning_rate": 1.4035087719298246e-06, |
| "loss": 0.5696, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.06172839506172839, |
| "grad_norm": 7.33497956558816, |
| "learning_rate": 1.7543859649122807e-06, |
| "loss": 0.5746, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.07407407407407407, |
| "grad_norm": 6.771581012013294, |
| "learning_rate": 2.105263157894737e-06, |
| "loss": 0.5319, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.08641975308641975, |
| "grad_norm": 5.723788798376614, |
| "learning_rate": 2.456140350877193e-06, |
| "loss": 0.4894, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.09876543209876543, |
| "grad_norm": 5.4088640282382565, |
| "learning_rate": 2.8070175438596493e-06, |
| "loss": 0.517, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.1111111111111111, |
| "grad_norm": 2.153808698110969, |
| "learning_rate": 3.157894736842105e-06, |
| "loss": 0.4509, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.12345679012345678, |
| "grad_norm": 2.0408212548859406, |
| "learning_rate": 3.5087719298245615e-06, |
| "loss": 0.4467, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.13580246913580246, |
| "grad_norm": 1.5459963386370055, |
| "learning_rate": 3.859649122807018e-06, |
| "loss": 0.4118, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.14814814814814814, |
| "grad_norm": 1.5814209627310287, |
| "learning_rate": 4.210526315789474e-06, |
| "loss": 0.3888, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.16049382716049382, |
| "grad_norm": 1.87145877734503, |
| "learning_rate": 4.56140350877193e-06, |
| "loss": 0.4058, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.1728395061728395, |
| "grad_norm": 1.3756801350478975, |
| "learning_rate": 4.912280701754386e-06, |
| "loss": 0.3741, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.18518518518518517, |
| "grad_norm": 1.1752142353172148, |
| "learning_rate": 5.263157894736842e-06, |
| "loss": 0.3995, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.19753086419753085, |
| "grad_norm": 1.418292428476638, |
| "learning_rate": 5.6140350877192985e-06, |
| "loss": 0.385, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.20987654320987653, |
| "grad_norm": 1.16124552621937, |
| "learning_rate": 5.964912280701755e-06, |
| "loss": 0.36, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.2222222222222222, |
| "grad_norm": 0.9870066082850208, |
| "learning_rate": 6.31578947368421e-06, |
| "loss": 0.3499, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.2345679012345679, |
| "grad_norm": 0.9366608437505954, |
| "learning_rate": 6.666666666666667e-06, |
| "loss": 0.3513, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.24691358024691357, |
| "grad_norm": 1.1198168697388675, |
| "learning_rate": 7.017543859649123e-06, |
| "loss": 0.3412, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.25925925925925924, |
| "grad_norm": 0.877398481087362, |
| "learning_rate": 7.368421052631579e-06, |
| "loss": 0.3298, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.2716049382716049, |
| "grad_norm": 0.9702377137387861, |
| "learning_rate": 7.719298245614036e-06, |
| "loss": 0.3365, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.2839506172839506, |
| "grad_norm": 0.7984859557261315, |
| "learning_rate": 8.070175438596492e-06, |
| "loss": 0.3025, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.2962962962962963, |
| "grad_norm": 0.7050324311912214, |
| "learning_rate": 8.421052631578948e-06, |
| "loss": 0.2703, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.30864197530864196, |
| "grad_norm": 0.9126479851972958, |
| "learning_rate": 8.771929824561405e-06, |
| "loss": 0.2939, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.32098765432098764, |
| "grad_norm": 0.8796740071209702, |
| "learning_rate": 9.12280701754386e-06, |
| "loss": 0.304, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.3333333333333333, |
| "grad_norm": 0.9446428136109001, |
| "learning_rate": 9.473684210526315e-06, |
| "loss": 0.3092, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.345679012345679, |
| "grad_norm": 0.8426809268341795, |
| "learning_rate": 9.824561403508772e-06, |
| "loss": 0.2732, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.35802469135802467, |
| "grad_norm": 0.8732085201361185, |
| "learning_rate": 1.017543859649123e-05, |
| "loss": 0.2745, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.37037037037037035, |
| "grad_norm": 0.827942537306075, |
| "learning_rate": 1.0526315789473684e-05, |
| "loss": 0.2824, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.38271604938271603, |
| "grad_norm": 0.9093812676850115, |
| "learning_rate": 1.0877192982456142e-05, |
| "loss": 0.2796, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.3950617283950617, |
| "grad_norm": 0.7240207676930677, |
| "learning_rate": 1.1228070175438597e-05, |
| "loss": 0.2413, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.4074074074074074, |
| "grad_norm": 0.8837553684406467, |
| "learning_rate": 1.1578947368421053e-05, |
| "loss": 0.2654, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.41975308641975306, |
| "grad_norm": 0.9620696328942127, |
| "learning_rate": 1.192982456140351e-05, |
| "loss": 0.2604, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.43209876543209874, |
| "grad_norm": 0.8315152131518835, |
| "learning_rate": 1.2280701754385966e-05, |
| "loss": 0.2374, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.4444444444444444, |
| "grad_norm": 0.7954427643282206, |
| "learning_rate": 1.263157894736842e-05, |
| "loss": 0.2599, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.4567901234567901, |
| "grad_norm": 0.8097701562489309, |
| "learning_rate": 1.2982456140350879e-05, |
| "loss": 0.2336, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.4691358024691358, |
| "grad_norm": 0.7679487776985499, |
| "learning_rate": 1.3333333333333333e-05, |
| "loss": 0.1984, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.48148148148148145, |
| "grad_norm": 0.8186214855532091, |
| "learning_rate": 1.3684210526315791e-05, |
| "loss": 0.2257, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.49382716049382713, |
| "grad_norm": 0.8486399745760126, |
| "learning_rate": 1.4035087719298246e-05, |
| "loss": 0.2203, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.5061728395061729, |
| "grad_norm": 0.7515522994828953, |
| "learning_rate": 1.4385964912280704e-05, |
| "loss": 0.2033, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.5185185185185185, |
| "grad_norm": 0.8186675607439609, |
| "learning_rate": 1.4736842105263159e-05, |
| "loss": 0.2035, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.5308641975308642, |
| "grad_norm": 0.8105881928417837, |
| "learning_rate": 1.5087719298245615e-05, |
| "loss": 0.2067, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.5432098765432098, |
| "grad_norm": 0.7099905049117751, |
| "learning_rate": 1.543859649122807e-05, |
| "loss": 0.1619, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.5555555555555556, |
| "grad_norm": 0.7178850952198459, |
| "learning_rate": 1.578947368421053e-05, |
| "loss": 0.1763, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.5679012345679012, |
| "grad_norm": 0.9051580828689527, |
| "learning_rate": 1.6140350877192984e-05, |
| "loss": 0.1777, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.5802469135802469, |
| "grad_norm": 0.776120251351539, |
| "learning_rate": 1.649122807017544e-05, |
| "loss": 0.1533, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.5925925925925926, |
| "grad_norm": 0.6923150710928373, |
| "learning_rate": 1.6842105263157896e-05, |
| "loss": 0.1471, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.6049382716049383, |
| "grad_norm": 0.8308137534219192, |
| "learning_rate": 1.719298245614035e-05, |
| "loss": 0.1661, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.6172839506172839, |
| "grad_norm": 0.6920144332725509, |
| "learning_rate": 1.754385964912281e-05, |
| "loss": 0.1544, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.6296296296296297, |
| "grad_norm": 0.8936796942488102, |
| "learning_rate": 1.7894736842105264e-05, |
| "loss": 0.1517, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.6419753086419753, |
| "grad_norm": 0.7606903567238646, |
| "learning_rate": 1.824561403508772e-05, |
| "loss": 0.1436, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.654320987654321, |
| "grad_norm": 0.7983567717631593, |
| "learning_rate": 1.8596491228070176e-05, |
| "loss": 0.1504, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.6666666666666666, |
| "grad_norm": 0.6932938639060522, |
| "learning_rate": 1.894736842105263e-05, |
| "loss": 0.1323, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.6790123456790124, |
| "grad_norm": 0.765978207523316, |
| "learning_rate": 1.929824561403509e-05, |
| "loss": 0.1584, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.691358024691358, |
| "grad_norm": 0.6706267081010999, |
| "learning_rate": 1.9649122807017544e-05, |
| "loss": 0.1226, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.7037037037037037, |
| "grad_norm": 0.7576546029397313, |
| "learning_rate": 2e-05, |
| "loss": 0.1107, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.7160493827160493, |
| "grad_norm": 0.631172942746403, |
| "learning_rate": 1.999981027348727e-05, |
| "loss": 0.1269, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.7283950617283951, |
| "grad_norm": 0.7763194682579061, |
| "learning_rate": 1.999924110114831e-05, |
| "loss": 0.1106, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.7407407407407407, |
| "grad_norm": 0.6620307624287203, |
| "learning_rate": 1.9998292504580528e-05, |
| "loss": 0.1264, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.7530864197530864, |
| "grad_norm": 0.6018372333318557, |
| "learning_rate": 1.999696451977872e-05, |
| "loss": 0.1064, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.7654320987654321, |
| "grad_norm": 0.5505324516461024, |
| "learning_rate": 1.999525719713366e-05, |
| "loss": 0.0924, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.7777777777777778, |
| "grad_norm": 0.6290628543373665, |
| "learning_rate": 1.9993170601430233e-05, |
| "loss": 0.1015, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.7901234567901234, |
| "grad_norm": 0.5654581976010135, |
| "learning_rate": 1.9990704811844934e-05, |
| "loss": 0.0955, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.8024691358024691, |
| "grad_norm": 0.6143207836622308, |
| "learning_rate": 1.99878599219429e-05, |
| "loss": 0.1045, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.8148148148148148, |
| "grad_norm": 0.6398660896422187, |
| "learning_rate": 1.9984636039674342e-05, |
| "loss": 0.0873, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.8271604938271605, |
| "grad_norm": 0.6398310694406929, |
| "learning_rate": 1.9981033287370443e-05, |
| "loss": 0.1089, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.8395061728395061, |
| "grad_norm": 0.6948653198177049, |
| "learning_rate": 1.9977051801738733e-05, |
| "loss": 0.1364, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.8518518518518519, |
| "grad_norm": 0.6005769842034276, |
| "learning_rate": 1.997269173385788e-05, |
| "loss": 0.092, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.8641975308641975, |
| "grad_norm": 0.6977886617269095, |
| "learning_rate": 1.996795324917199e-05, |
| "loss": 0.1083, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.8765432098765432, |
| "grad_norm": 0.6065814882574566, |
| "learning_rate": 1.9962836527484296e-05, |
| "loss": 0.0818, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.8888888888888888, |
| "grad_norm": 0.6615848160240534, |
| "learning_rate": 1.9957341762950346e-05, |
| "loss": 0.0981, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.9012345679012346, |
| "grad_norm": 0.5931653814016081, |
| "learning_rate": 1.9951469164070647e-05, |
| "loss": 0.083, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.9135802469135802, |
| "grad_norm": 0.5186114507696477, |
| "learning_rate": 1.9945218953682736e-05, |
| "loss": 0.0809, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.9259259259259259, |
| "grad_norm": 0.573114746431398, |
| "learning_rate": 1.993859136895274e-05, |
| "loss": 0.0848, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.9382716049382716, |
| "grad_norm": 0.5344071067213606, |
| "learning_rate": 1.9931586661366363e-05, |
| "loss": 0.0815, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.9506172839506173, |
| "grad_norm": 0.6377105774122757, |
| "learning_rate": 1.992420509671936e-05, |
| "loss": 0.0879, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.9629629629629629, |
| "grad_norm": 0.5209583162843623, |
| "learning_rate": 1.991644695510743e-05, |
| "loss": 0.0805, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.9753086419753086, |
| "grad_norm": 0.417423387838958, |
| "learning_rate": 1.9908312530915603e-05, |
| "loss": 0.0634, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.9876543209876543, |
| "grad_norm": 0.5230529318454548, |
| "learning_rate": 1.9899802132807073e-05, |
| "loss": 0.074, |
| "step": 80 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.571800306158981, |
| "learning_rate": 1.9890916083711463e-05, |
| "loss": 0.0896, |
| "step": 81 |
| }, |
| { |
| "epoch": 1.0123456790123457, |
| "grad_norm": 0.48654258298002473, |
| "learning_rate": 1.9881654720812594e-05, |
| "loss": 0.0616, |
| "step": 82 |
| }, |
| { |
| "epoch": 1.0246913580246915, |
| "grad_norm": 0.49061163663393764, |
| "learning_rate": 1.9872018395535694e-05, |
| "loss": 0.0546, |
| "step": 83 |
| }, |
| { |
| "epoch": 1.037037037037037, |
| "grad_norm": 0.5362863528158126, |
| "learning_rate": 1.9862007473534026e-05, |
| "loss": 0.0633, |
| "step": 84 |
| }, |
| { |
| "epoch": 1.0493827160493827, |
| "grad_norm": 0.48289786827410613, |
| "learning_rate": 1.9851622334675065e-05, |
| "loss": 0.0651, |
| "step": 85 |
| }, |
| { |
| "epoch": 1.0617283950617284, |
| "grad_norm": 0.4191994966523693, |
| "learning_rate": 1.9840863373026046e-05, |
| "loss": 0.0378, |
| "step": 86 |
| }, |
| { |
| "epoch": 1.074074074074074, |
| "grad_norm": 0.48414987204753185, |
| "learning_rate": 1.982973099683902e-05, |
| "loss": 0.0479, |
| "step": 87 |
| }, |
| { |
| "epoch": 1.0864197530864197, |
| "grad_norm": 0.47673601747945576, |
| "learning_rate": 1.9818225628535372e-05, |
| "loss": 0.0482, |
| "step": 88 |
| }, |
| { |
| "epoch": 1.0987654320987654, |
| "grad_norm": 0.4668133839134151, |
| "learning_rate": 1.9806347704689778e-05, |
| "loss": 0.0691, |
| "step": 89 |
| }, |
| { |
| "epoch": 1.1111111111111112, |
| "grad_norm": 0.40912483085288526, |
| "learning_rate": 1.979409767601366e-05, |
| "loss": 0.0473, |
| "step": 90 |
| }, |
| { |
| "epoch": 1.123456790123457, |
| "grad_norm": 0.46287321439131024, |
| "learning_rate": 1.9781476007338058e-05, |
| "loss": 0.0487, |
| "step": 91 |
| }, |
| { |
| "epoch": 1.1358024691358024, |
| "grad_norm": 0.46265781731853756, |
| "learning_rate": 1.9768483177596008e-05, |
| "loss": 0.0548, |
| "step": 92 |
| }, |
| { |
| "epoch": 1.1481481481481481, |
| "grad_norm": 0.43307375489466543, |
| "learning_rate": 1.975511967980437e-05, |
| "loss": 0.045, |
| "step": 93 |
| }, |
| { |
| "epoch": 1.1604938271604939, |
| "grad_norm": 0.4715524387710138, |
| "learning_rate": 1.9741386021045105e-05, |
| "loss": 0.0417, |
| "step": 94 |
| }, |
| { |
| "epoch": 1.1728395061728394, |
| "grad_norm": 0.42072406617196306, |
| "learning_rate": 1.972728272244605e-05, |
| "loss": 0.0472, |
| "step": 95 |
| }, |
| { |
| "epoch": 1.1851851851851851, |
| "grad_norm": 0.43172521185288276, |
| "learning_rate": 1.971281031916114e-05, |
| "loss": 0.0377, |
| "step": 96 |
| }, |
| { |
| "epoch": 1.1975308641975309, |
| "grad_norm": 0.4497872448243483, |
| "learning_rate": 1.9697969360350098e-05, |
| "loss": 0.042, |
| "step": 97 |
| }, |
| { |
| "epoch": 1.2098765432098766, |
| "grad_norm": 0.4015427782061893, |
| "learning_rate": 1.968276040915759e-05, |
| "loss": 0.0467, |
| "step": 98 |
| }, |
| { |
| "epoch": 1.2222222222222223, |
| "grad_norm": 0.46433315248898455, |
| "learning_rate": 1.9667184042691877e-05, |
| "loss": 0.0781, |
| "step": 99 |
| }, |
| { |
| "epoch": 1.2345679012345678, |
| "grad_norm": 0.4607108700832256, |
| "learning_rate": 1.965124085200289e-05, |
| "loss": 0.0484, |
| "step": 100 |
| }, |
| { |
| "epoch": 1.2469135802469136, |
| "grad_norm": 0.4580474315592068, |
| "learning_rate": 1.9634931442059833e-05, |
| "loss": 0.0497, |
| "step": 101 |
| }, |
| { |
| "epoch": 1.2592592592592593, |
| "grad_norm": 0.4376923035257714, |
| "learning_rate": 1.961825643172819e-05, |
| "loss": 0.0372, |
| "step": 102 |
| }, |
| { |
| "epoch": 1.2716049382716048, |
| "grad_norm": 0.47470979724718027, |
| "learning_rate": 1.9601216453746285e-05, |
| "loss": 0.0412, |
| "step": 103 |
| }, |
| { |
| "epoch": 1.2839506172839505, |
| "grad_norm": 0.48607721607358306, |
| "learning_rate": 1.9583812154701227e-05, |
| "loss": 0.063, |
| "step": 104 |
| }, |
| { |
| "epoch": 1.2962962962962963, |
| "grad_norm": 0.3698617332173753, |
| "learning_rate": 1.956604419500441e-05, |
| "loss": 0.0531, |
| "step": 105 |
| }, |
| { |
| "epoch": 1.308641975308642, |
| "grad_norm": 0.4394488729697042, |
| "learning_rate": 1.9547913248866447e-05, |
| "loss": 0.0629, |
| "step": 106 |
| }, |
| { |
| "epoch": 1.3209876543209877, |
| "grad_norm": 0.36774414517262893, |
| "learning_rate": 1.9529420004271568e-05, |
| "loss": 0.0465, |
| "step": 107 |
| }, |
| { |
| "epoch": 1.3333333333333333, |
| "grad_norm": 0.4084826218592715, |
| "learning_rate": 1.9510565162951538e-05, |
| "loss": 0.0375, |
| "step": 108 |
| }, |
| { |
| "epoch": 1.345679012345679, |
| "grad_norm": 0.3974187863812691, |
| "learning_rate": 1.9491349440359014e-05, |
| "loss": 0.0372, |
| "step": 109 |
| }, |
| { |
| "epoch": 1.3580246913580247, |
| "grad_norm": 0.43146975975134916, |
| "learning_rate": 1.9471773565640405e-05, |
| "loss": 0.0366, |
| "step": 110 |
| }, |
| { |
| "epoch": 1.3703703703703702, |
| "grad_norm": 0.3812925184125817, |
| "learning_rate": 1.94518382816082e-05, |
| "loss": 0.0432, |
| "step": 111 |
| }, |
| { |
| "epoch": 1.382716049382716, |
| "grad_norm": 0.5295352324623606, |
| "learning_rate": 1.9431544344712776e-05, |
| "loss": 0.0643, |
| "step": 112 |
| }, |
| { |
| "epoch": 1.3950617283950617, |
| "grad_norm": 0.46291492570157333, |
| "learning_rate": 1.9410892525013717e-05, |
| "loss": 0.0419, |
| "step": 113 |
| }, |
| { |
| "epoch": 1.4074074074074074, |
| "grad_norm": 0.3646553984739498, |
| "learning_rate": 1.938988360615057e-05, |
| "loss": 0.0376, |
| "step": 114 |
| }, |
| { |
| "epoch": 1.4197530864197532, |
| "grad_norm": 0.4120840867522972, |
| "learning_rate": 1.9368518385313108e-05, |
| "loss": 0.0277, |
| "step": 115 |
| }, |
| { |
| "epoch": 1.4320987654320987, |
| "grad_norm": 0.3678320151928319, |
| "learning_rate": 1.9346797673211108e-05, |
| "loss": 0.0472, |
| "step": 116 |
| }, |
| { |
| "epoch": 1.4444444444444444, |
| "grad_norm": 0.3509880150630426, |
| "learning_rate": 1.932472229404356e-05, |
| "loss": 0.0532, |
| "step": 117 |
| }, |
| { |
| "epoch": 1.4567901234567902, |
| "grad_norm": 0.5013873051103777, |
| "learning_rate": 1.9302293085467404e-05, |
| "loss": 0.0468, |
| "step": 118 |
| }, |
| { |
| "epoch": 1.4691358024691357, |
| "grad_norm": 0.37461635613832134, |
| "learning_rate": 1.927951089856575e-05, |
| "loss": 0.0393, |
| "step": 119 |
| }, |
| { |
| "epoch": 1.4814814814814814, |
| "grad_norm": 0.6569993579112918, |
| "learning_rate": 1.9256376597815565e-05, |
| "loss": 0.0506, |
| "step": 120 |
| }, |
| { |
| "epoch": 1.4938271604938271, |
| "grad_norm": 0.3606538758277819, |
| "learning_rate": 1.9232891061054896e-05, |
| "loss": 0.0309, |
| "step": 121 |
| }, |
| { |
| "epoch": 1.5061728395061729, |
| "grad_norm": 0.4489552042946805, |
| "learning_rate": 1.920905517944954e-05, |
| "loss": 0.0495, |
| "step": 122 |
| }, |
| { |
| "epoch": 1.5185185185185186, |
| "grad_norm": 0.3640183154186618, |
| "learning_rate": 1.9184869857459233e-05, |
| "loss": 0.0294, |
| "step": 123 |
| }, |
| { |
| "epoch": 1.5308641975308643, |
| "grad_norm": 0.4987631065093897, |
| "learning_rate": 1.916033601280334e-05, |
| "loss": 0.0533, |
| "step": 124 |
| }, |
| { |
| "epoch": 1.5432098765432098, |
| "grad_norm": 0.40319894875583845, |
| "learning_rate": 1.913545457642601e-05, |
| "loss": 0.0386, |
| "step": 125 |
| }, |
| { |
| "epoch": 1.5555555555555556, |
| "grad_norm": 0.3044060536431661, |
| "learning_rate": 1.9110226492460886e-05, |
| "loss": 0.0342, |
| "step": 126 |
| }, |
| { |
| "epoch": 1.567901234567901, |
| "grad_norm": 0.36919281340072985, |
| "learning_rate": 1.9084652718195237e-05, |
| "loss": 0.034, |
| "step": 127 |
| }, |
| { |
| "epoch": 1.5802469135802468, |
| "grad_norm": 0.4554316645468543, |
| "learning_rate": 1.9058734224033673e-05, |
| "loss": 0.0442, |
| "step": 128 |
| }, |
| { |
| "epoch": 1.5925925925925926, |
| "grad_norm": 0.3860637450416904, |
| "learning_rate": 1.903247199346129e-05, |
| "loss": 0.0351, |
| "step": 129 |
| }, |
| { |
| "epoch": 1.6049382716049383, |
| "grad_norm": 0.4145904111255904, |
| "learning_rate": 1.9005867023006374e-05, |
| "loss": 0.0538, |
| "step": 130 |
| }, |
| { |
| "epoch": 1.617283950617284, |
| "grad_norm": 0.37152108124728694, |
| "learning_rate": 1.8978920322202582e-05, |
| "loss": 0.0372, |
| "step": 131 |
| }, |
| { |
| "epoch": 1.6296296296296298, |
| "grad_norm": 0.3533748489120987, |
| "learning_rate": 1.8951632913550625e-05, |
| "loss": 0.0347, |
| "step": 132 |
| }, |
| { |
| "epoch": 1.6419753086419753, |
| "grad_norm": 0.41307921206850035, |
| "learning_rate": 1.892400583247948e-05, |
| "loss": 0.0467, |
| "step": 133 |
| }, |
| { |
| "epoch": 1.654320987654321, |
| "grad_norm": 0.3623309750124559, |
| "learning_rate": 1.88960401273071e-05, |
| "loss": 0.0339, |
| "step": 134 |
| }, |
| { |
| "epoch": 1.6666666666666665, |
| "grad_norm": 0.32342684047692133, |
| "learning_rate": 1.886773685920062e-05, |
| "loss": 0.0475, |
| "step": 135 |
| }, |
| { |
| "epoch": 1.6790123456790123, |
| "grad_norm": 0.2751221763038461, |
| "learning_rate": 1.8839097102136123e-05, |
| "loss": 0.0411, |
| "step": 136 |
| }, |
| { |
| "epoch": 1.691358024691358, |
| "grad_norm": 0.4309033939961943, |
| "learning_rate": 1.8810121942857848e-05, |
| "loss": 0.0499, |
| "step": 137 |
| }, |
| { |
| "epoch": 1.7037037037037037, |
| "grad_norm": 0.40460125045082745, |
| "learning_rate": 1.878081248083698e-05, |
| "loss": 0.0704, |
| "step": 138 |
| }, |
| { |
| "epoch": 1.7160493827160495, |
| "grad_norm": 0.33237280139195097, |
| "learning_rate": 1.8751169828229927e-05, |
| "loss": 0.0288, |
| "step": 139 |
| }, |
| { |
| "epoch": 1.7283950617283952, |
| "grad_norm": 0.33276164181630363, |
| "learning_rate": 1.872119510983611e-05, |
| "loss": 0.0283, |
| "step": 140 |
| }, |
| { |
| "epoch": 1.7407407407407407, |
| "grad_norm": 0.33846042235910045, |
| "learning_rate": 1.8690889463055285e-05, |
| "loss": 0.0346, |
| "step": 141 |
| }, |
| { |
| "epoch": 1.7530864197530864, |
| "grad_norm": 0.2983835469497029, |
| "learning_rate": 1.866025403784439e-05, |
| "loss": 0.0204, |
| "step": 142 |
| }, |
| { |
| "epoch": 1.765432098765432, |
| "grad_norm": 0.32286554382633115, |
| "learning_rate": 1.86292899966739e-05, |
| "loss": 0.0564, |
| "step": 143 |
| }, |
| { |
| "epoch": 1.7777777777777777, |
| "grad_norm": 0.29494797125215033, |
| "learning_rate": 1.8597998514483724e-05, |
| "loss": 0.0199, |
| "step": 144 |
| }, |
| { |
| "epoch": 1.7901234567901234, |
| "grad_norm": 0.31485674246853385, |
| "learning_rate": 1.856638077863863e-05, |
| "loss": 0.0246, |
| "step": 145 |
| }, |
| { |
| "epoch": 1.8024691358024691, |
| "grad_norm": 0.3553330613625899, |
| "learning_rate": 1.853443798888316e-05, |
| "loss": 0.0428, |
| "step": 146 |
| }, |
| { |
| "epoch": 1.8148148148148149, |
| "grad_norm": 0.4094124323752043, |
| "learning_rate": 1.8502171357296144e-05, |
| "loss": 0.0325, |
| "step": 147 |
| }, |
| { |
| "epoch": 1.8271604938271606, |
| "grad_norm": 0.4512274400089503, |
| "learning_rate": 1.8469582108244672e-05, |
| "loss": 0.0316, |
| "step": 148 |
| }, |
| { |
| "epoch": 1.8395061728395061, |
| "grad_norm": 0.3386207896347789, |
| "learning_rate": 1.8436671478337666e-05, |
| "loss": 0.0264, |
| "step": 149 |
| }, |
| { |
| "epoch": 1.8518518518518519, |
| "grad_norm": 0.30377308765711836, |
| "learning_rate": 1.840344071637893e-05, |
| "loss": 0.0256, |
| "step": 150 |
| }, |
| { |
| "epoch": 1.8641975308641974, |
| "grad_norm": 0.3047593078910109, |
| "learning_rate": 1.836989108331978e-05, |
| "loss": 0.0406, |
| "step": 151 |
| }, |
| { |
| "epoch": 1.876543209876543, |
| "grad_norm": 0.3453918783190626, |
| "learning_rate": 1.8336023852211197e-05, |
| "loss": 0.0293, |
| "step": 152 |
| }, |
| { |
| "epoch": 1.8888888888888888, |
| "grad_norm": 0.34020059500334965, |
| "learning_rate": 1.8301840308155507e-05, |
| "loss": 0.0376, |
| "step": 153 |
| }, |
| { |
| "epoch": 1.9012345679012346, |
| "grad_norm": 0.3898075678508035, |
| "learning_rate": 1.8267341748257636e-05, |
| "loss": 0.0339, |
| "step": 154 |
| }, |
| { |
| "epoch": 1.9135802469135803, |
| "grad_norm": 0.5039729606941286, |
| "learning_rate": 1.8232529481575874e-05, |
| "loss": 0.0237, |
| "step": 155 |
| }, |
| { |
| "epoch": 1.925925925925926, |
| "grad_norm": 0.39939423124671763, |
| "learning_rate": 1.8197404829072214e-05, |
| "loss": 0.0567, |
| "step": 156 |
| }, |
| { |
| "epoch": 1.9382716049382716, |
| "grad_norm": 0.3110373822433647, |
| "learning_rate": 1.816196912356222e-05, |
| "loss": 0.0263, |
| "step": 157 |
| }, |
| { |
| "epoch": 1.9506172839506173, |
| "grad_norm": 0.2730898361055386, |
| "learning_rate": 1.812622370966446e-05, |
| "loss": 0.0167, |
| "step": 158 |
| }, |
| { |
| "epoch": 1.9629629629629628, |
| "grad_norm": 0.3740775592122447, |
| "learning_rate": 1.8090169943749477e-05, |
| "loss": 0.0366, |
| "step": 159 |
| }, |
| { |
| "epoch": 1.9753086419753085, |
| "grad_norm": 0.3517482972136562, |
| "learning_rate": 1.8053809193888326e-05, |
| "loss": 0.0343, |
| "step": 160 |
| }, |
| { |
| "epoch": 1.9876543209876543, |
| "grad_norm": 0.3448201470407539, |
| "learning_rate": 1.8017142839800667e-05, |
| "loss": 0.0407, |
| "step": 161 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.31463659127167554, |
| "learning_rate": 1.7980172272802398e-05, |
| "loss": 0.0326, |
| "step": 162 |
| }, |
| { |
| "epoch": 2.0123456790123457, |
| "grad_norm": 0.3119319557778762, |
| "learning_rate": 1.794289889575286e-05, |
| "loss": 0.0162, |
| "step": 163 |
| }, |
| { |
| "epoch": 2.0246913580246915, |
| "grad_norm": 0.32152041344430515, |
| "learning_rate": 1.7905324123001634e-05, |
| "loss": 0.0253, |
| "step": 164 |
| }, |
| { |
| "epoch": 2.037037037037037, |
| "grad_norm": 0.2773957765251134, |
| "learning_rate": 1.7867449380334834e-05, |
| "loss": 0.0203, |
| "step": 165 |
| }, |
| { |
| "epoch": 2.049382716049383, |
| "grad_norm": 0.22908870780721655, |
| "learning_rate": 1.782927610492103e-05, |
| "loss": 0.0186, |
| "step": 166 |
| }, |
| { |
| "epoch": 2.0617283950617282, |
| "grad_norm": 0.2955956970727338, |
| "learning_rate": 1.7790805745256703e-05, |
| "loss": 0.02, |
| "step": 167 |
| }, |
| { |
| "epoch": 2.074074074074074, |
| "grad_norm": 0.34426895222254217, |
| "learning_rate": 1.77520397611113e-05, |
| "loss": 0.0206, |
| "step": 168 |
| }, |
| { |
| "epoch": 2.0864197530864197, |
| "grad_norm": 0.3394383274911477, |
| "learning_rate": 1.771297962347181e-05, |
| "loss": 0.021, |
| "step": 169 |
| }, |
| { |
| "epoch": 2.0987654320987654, |
| "grad_norm": 0.3174782568269176, |
| "learning_rate": 1.767362681448697e-05, |
| "loss": 0.0223, |
| "step": 170 |
| }, |
| { |
| "epoch": 2.111111111111111, |
| "grad_norm": 0.28793132810552485, |
| "learning_rate": 1.763398282741103e-05, |
| "loss": 0.015, |
| "step": 171 |
| }, |
| { |
| "epoch": 2.123456790123457, |
| "grad_norm": 0.25904756194914924, |
| "learning_rate": 1.7594049166547073e-05, |
| "loss": 0.0152, |
| "step": 172 |
| }, |
| { |
| "epoch": 2.1358024691358026, |
| "grad_norm": 0.28872886550519916, |
| "learning_rate": 1.7553827347189937e-05, |
| "loss": 0.0218, |
| "step": 173 |
| }, |
| { |
| "epoch": 2.148148148148148, |
| "grad_norm": 0.3383745922979738, |
| "learning_rate": 1.7513318895568734e-05, |
| "loss": 0.0186, |
| "step": 174 |
| }, |
| { |
| "epoch": 2.1604938271604937, |
| "grad_norm": 0.3433669022459947, |
| "learning_rate": 1.747252534878891e-05, |
| "loss": 0.0244, |
| "step": 175 |
| }, |
| { |
| "epoch": 2.1728395061728394, |
| "grad_norm": 0.3096900118277167, |
| "learning_rate": 1.7431448254773943e-05, |
| "loss": 0.0247, |
| "step": 176 |
| }, |
| { |
| "epoch": 2.185185185185185, |
| "grad_norm": 0.3160661155338934, |
| "learning_rate": 1.7390089172206594e-05, |
| "loss": 0.0204, |
| "step": 177 |
| }, |
| { |
| "epoch": 2.197530864197531, |
| "grad_norm": 0.3031506421614271, |
| "learning_rate": 1.7348449670469758e-05, |
| "loss": 0.0247, |
| "step": 178 |
| }, |
| { |
| "epoch": 2.2098765432098766, |
| "grad_norm": 0.29699710884206193, |
| "learning_rate": 1.7306531329586933e-05, |
| "loss": 0.0212, |
| "step": 179 |
| }, |
| { |
| "epoch": 2.2222222222222223, |
| "grad_norm": 0.27055995416522066, |
| "learning_rate": 1.7264335740162244e-05, |
| "loss": 0.0141, |
| "step": 180 |
| }, |
| { |
| "epoch": 2.234567901234568, |
| "grad_norm": 0.2513720939615437, |
| "learning_rate": 1.7221864503320093e-05, |
| "loss": 0.0176, |
| "step": 181 |
| }, |
| { |
| "epoch": 2.246913580246914, |
| "grad_norm": 0.21463355334635534, |
| "learning_rate": 1.717911923064442e-05, |
| "loss": 0.0195, |
| "step": 182 |
| }, |
| { |
| "epoch": 2.259259259259259, |
| "grad_norm": 0.3057377106103357, |
| "learning_rate": 1.7136101544117526e-05, |
| "loss": 0.0216, |
| "step": 183 |
| }, |
| { |
| "epoch": 2.271604938271605, |
| "grad_norm": 0.29775222163099, |
| "learning_rate": 1.7092813076058536e-05, |
| "loss": 0.0275, |
| "step": 184 |
| }, |
| { |
| "epoch": 2.2839506172839505, |
| "grad_norm": 0.3055276236869669, |
| "learning_rate": 1.7049255469061476e-05, |
| "loss": 0.0344, |
| "step": 185 |
| }, |
| { |
| "epoch": 2.2962962962962963, |
| "grad_norm": 0.21549260314464128, |
| "learning_rate": 1.700543037593291e-05, |
| "loss": 0.0168, |
| "step": 186 |
| }, |
| { |
| "epoch": 2.308641975308642, |
| "grad_norm": 0.2617363419016656, |
| "learning_rate": 1.696133945962927e-05, |
| "loss": 0.0223, |
| "step": 187 |
| }, |
| { |
| "epoch": 2.3209876543209877, |
| "grad_norm": 0.24161925121125105, |
| "learning_rate": 1.6916984393193704e-05, |
| "loss": 0.0149, |
| "step": 188 |
| }, |
| { |
| "epoch": 2.3333333333333335, |
| "grad_norm": 0.3097073679033491, |
| "learning_rate": 1.687236685969263e-05, |
| "loss": 0.0278, |
| "step": 189 |
| }, |
| { |
| "epoch": 2.3456790123456788, |
| "grad_norm": 0.25261420518988137, |
| "learning_rate": 1.6827488552151855e-05, |
| "loss": 0.0209, |
| "step": 190 |
| }, |
| { |
| "epoch": 2.3580246913580245, |
| "grad_norm": 0.29919385437925194, |
| "learning_rate": 1.678235117349234e-05, |
| "loss": 0.021, |
| "step": 191 |
| }, |
| { |
| "epoch": 2.3703703703703702, |
| "grad_norm": 0.26627559608374113, |
| "learning_rate": 1.6736956436465573e-05, |
| "loss": 0.0156, |
| "step": 192 |
| }, |
| { |
| "epoch": 2.382716049382716, |
| "grad_norm": 0.315756273819458, |
| "learning_rate": 1.6691306063588583e-05, |
| "loss": 0.0119, |
| "step": 193 |
| }, |
| { |
| "epoch": 2.3950617283950617, |
| "grad_norm": 0.3521313813287778, |
| "learning_rate": 1.664540178707858e-05, |
| "loss": 0.0197, |
| "step": 194 |
| }, |
| { |
| "epoch": 2.4074074074074074, |
| "grad_norm": 0.23890807245570367, |
| "learning_rate": 1.659924534878723e-05, |
| "loss": 0.0161, |
| "step": 195 |
| }, |
| { |
| "epoch": 2.419753086419753, |
| "grad_norm": 0.23229593095773818, |
| "learning_rate": 1.655283850013454e-05, |
| "loss": 0.0082, |
| "step": 196 |
| }, |
| { |
| "epoch": 2.432098765432099, |
| "grad_norm": 0.2488497617155846, |
| "learning_rate": 1.650618300204242e-05, |
| "loss": 0.0154, |
| "step": 197 |
| }, |
| { |
| "epoch": 2.4444444444444446, |
| "grad_norm": 0.2846644237202055, |
| "learning_rate": 1.6459280624867876e-05, |
| "loss": 0.017, |
| "step": 198 |
| }, |
| { |
| "epoch": 2.45679012345679, |
| "grad_norm": 0.3307306653758879, |
| "learning_rate": 1.6412133148335786e-05, |
| "loss": 0.0253, |
| "step": 199 |
| }, |
| { |
| "epoch": 2.4691358024691357, |
| "grad_norm": 0.401524089447455, |
| "learning_rate": 1.6364742361471416e-05, |
| "loss": 0.0248, |
| "step": 200 |
| }, |
| { |
| "epoch": 2.4814814814814814, |
| "grad_norm": 0.24557760407685852, |
| "learning_rate": 1.631711006253251e-05, |
| "loss": 0.0219, |
| "step": 201 |
| }, |
| { |
| "epoch": 2.493827160493827, |
| "grad_norm": 0.2231254084874177, |
| "learning_rate": 1.626923805894107e-05, |
| "loss": 0.0185, |
| "step": 202 |
| }, |
| { |
| "epoch": 2.506172839506173, |
| "grad_norm": 0.2746670149754389, |
| "learning_rate": 1.6221128167214742e-05, |
| "loss": 0.0198, |
| "step": 203 |
| }, |
| { |
| "epoch": 2.5185185185185186, |
| "grad_norm": 0.22454413205332946, |
| "learning_rate": 1.617278221289793e-05, |
| "loss": 0.0131, |
| "step": 204 |
| }, |
| { |
| "epoch": 2.5308641975308643, |
| "grad_norm": 0.23942689316296767, |
| "learning_rate": 1.61242020304925e-05, |
| "loss": 0.0216, |
| "step": 205 |
| }, |
| { |
| "epoch": 2.5432098765432096, |
| "grad_norm": 0.3167065348394166, |
| "learning_rate": 1.607538946338817e-05, |
| "loss": 0.015, |
| "step": 206 |
| }, |
| { |
| "epoch": 2.5555555555555554, |
| "grad_norm": 0.3522580744129786, |
| "learning_rate": 1.6026346363792565e-05, |
| "loss": 0.0284, |
| "step": 207 |
| }, |
| { |
| "epoch": 2.567901234567901, |
| "grad_norm": 0.2988578750438895, |
| "learning_rate": 1.5977074592660936e-05, |
| "loss": 0.0166, |
| "step": 208 |
| }, |
| { |
| "epoch": 2.580246913580247, |
| "grad_norm": 0.2119054892726317, |
| "learning_rate": 1.592757601962555e-05, |
| "loss": 0.0096, |
| "step": 209 |
| }, |
| { |
| "epoch": 2.5925925925925926, |
| "grad_norm": 0.20741064911092552, |
| "learning_rate": 1.5877852522924733e-05, |
| "loss": 0.0191, |
| "step": 210 |
| }, |
| { |
| "epoch": 2.6049382716049383, |
| "grad_norm": 0.26294749753775415, |
| "learning_rate": 1.582790598933161e-05, |
| "loss": 0.0265, |
| "step": 211 |
| }, |
| { |
| "epoch": 2.617283950617284, |
| "grad_norm": 0.2521344397345761, |
| "learning_rate": 1.5777738314082514e-05, |
| "loss": 0.0174, |
| "step": 212 |
| }, |
| { |
| "epoch": 2.6296296296296298, |
| "grad_norm": 0.2090224535341981, |
| "learning_rate": 1.5727351400805054e-05, |
| "loss": 0.0159, |
| "step": 213 |
| }, |
| { |
| "epoch": 2.6419753086419755, |
| "grad_norm": 0.21033486854745503, |
| "learning_rate": 1.5676747161445903e-05, |
| "loss": 0.0236, |
| "step": 214 |
| }, |
| { |
| "epoch": 2.6543209876543212, |
| "grad_norm": 0.19657648924979762, |
| "learning_rate": 1.5625927516198235e-05, |
| "loss": 0.0178, |
| "step": 215 |
| }, |
| { |
| "epoch": 2.6666666666666665, |
| "grad_norm": 0.2411052605645195, |
| "learning_rate": 1.5574894393428856e-05, |
| "loss": 0.0111, |
| "step": 216 |
| }, |
| { |
| "epoch": 2.6790123456790123, |
| "grad_norm": 0.31954942273313647, |
| "learning_rate": 1.552364972960506e-05, |
| "loss": 0.026, |
| "step": 217 |
| }, |
| { |
| "epoch": 2.691358024691358, |
| "grad_norm": 0.2838958762793406, |
| "learning_rate": 1.5472195469221115e-05, |
| "loss": 0.0309, |
| "step": 218 |
| }, |
| { |
| "epoch": 2.7037037037037037, |
| "grad_norm": 0.20041777320173626, |
| "learning_rate": 1.5420533564724495e-05, |
| "loss": 0.0183, |
| "step": 219 |
| }, |
| { |
| "epoch": 2.7160493827160495, |
| "grad_norm": 0.2450116676808879, |
| "learning_rate": 1.5368665976441802e-05, |
| "loss": 0.0209, |
| "step": 220 |
| }, |
| { |
| "epoch": 2.728395061728395, |
| "grad_norm": 0.23961047907329863, |
| "learning_rate": 1.5316594672504362e-05, |
| "loss": 0.0177, |
| "step": 221 |
| }, |
| { |
| "epoch": 2.7407407407407405, |
| "grad_norm": 0.22107765498822843, |
| "learning_rate": 1.526432162877356e-05, |
| "loss": 0.0115, |
| "step": 222 |
| }, |
| { |
| "epoch": 2.753086419753086, |
| "grad_norm": 0.283474952878893, |
| "learning_rate": 1.5211848828765852e-05, |
| "loss": 0.034, |
| "step": 223 |
| }, |
| { |
| "epoch": 2.765432098765432, |
| "grad_norm": 0.21699559982290464, |
| "learning_rate": 1.5159178263577512e-05, |
| "loss": 0.0101, |
| "step": 224 |
| }, |
| { |
| "epoch": 2.7777777777777777, |
| "grad_norm": 0.22698992910116217, |
| "learning_rate": 1.510631193180907e-05, |
| "loss": 0.0251, |
| "step": 225 |
| }, |
| { |
| "epoch": 2.7901234567901234, |
| "grad_norm": 0.21582178770974225, |
| "learning_rate": 1.5053251839489482e-05, |
| "loss": 0.0108, |
| "step": 226 |
| }, |
| { |
| "epoch": 2.802469135802469, |
| "grad_norm": 0.26829257899099024, |
| "learning_rate": 1.5000000000000002e-05, |
| "loss": 0.0209, |
| "step": 227 |
| }, |
| { |
| "epoch": 2.814814814814815, |
| "grad_norm": 0.3507275921603246, |
| "learning_rate": 1.4946558433997792e-05, |
| "loss": 0.0264, |
| "step": 228 |
| }, |
| { |
| "epoch": 2.8271604938271606, |
| "grad_norm": 0.17972895444262435, |
| "learning_rate": 1.4892929169339237e-05, |
| "loss": 0.0064, |
| "step": 229 |
| }, |
| { |
| "epoch": 2.8395061728395063, |
| "grad_norm": 0.2617950316333363, |
| "learning_rate": 1.4839114241003017e-05, |
| "loss": 0.0205, |
| "step": 230 |
| }, |
| { |
| "epoch": 2.851851851851852, |
| "grad_norm": 0.21854432114775668, |
| "learning_rate": 1.4785115691012866e-05, |
| "loss": 0.0133, |
| "step": 231 |
| }, |
| { |
| "epoch": 2.8641975308641974, |
| "grad_norm": 0.3086185413115422, |
| "learning_rate": 1.4730935568360103e-05, |
| "loss": 0.0258, |
| "step": 232 |
| }, |
| { |
| "epoch": 2.876543209876543, |
| "grad_norm": 0.26360881813895337, |
| "learning_rate": 1.4676575928925869e-05, |
| "loss": 0.0122, |
| "step": 233 |
| }, |
| { |
| "epoch": 2.888888888888889, |
| "grad_norm": 0.19397354302951686, |
| "learning_rate": 1.4622038835403135e-05, |
| "loss": 0.0189, |
| "step": 234 |
| }, |
| { |
| "epoch": 2.9012345679012346, |
| "grad_norm": 0.27942768599957285, |
| "learning_rate": 1.4567326357218408e-05, |
| "loss": 0.025, |
| "step": 235 |
| }, |
| { |
| "epoch": 2.9135802469135803, |
| "grad_norm": 0.2950242324226194, |
| "learning_rate": 1.451244057045323e-05, |
| "loss": 0.0365, |
| "step": 236 |
| }, |
| { |
| "epoch": 2.925925925925926, |
| "grad_norm": 0.17975187594689376, |
| "learning_rate": 1.4457383557765385e-05, |
| "loss": 0.0188, |
| "step": 237 |
| }, |
| { |
| "epoch": 2.9382716049382713, |
| "grad_norm": 0.23376665230749175, |
| "learning_rate": 1.4402157408309876e-05, |
| "loss": 0.0099, |
| "step": 238 |
| }, |
| { |
| "epoch": 2.950617283950617, |
| "grad_norm": 0.19019995851535298, |
| "learning_rate": 1.4346764217659652e-05, |
| "loss": 0.0162, |
| "step": 239 |
| }, |
| { |
| "epoch": 2.962962962962963, |
| "grad_norm": 0.2214940835243357, |
| "learning_rate": 1.429120608772609e-05, |
| "loss": 0.0237, |
| "step": 240 |
| }, |
| { |
| "epoch": 2.9753086419753085, |
| "grad_norm": 0.2627103929594842, |
| "learning_rate": 1.4235485126679244e-05, |
| "loss": 0.0157, |
| "step": 241 |
| }, |
| { |
| "epoch": 2.9876543209876543, |
| "grad_norm": 0.2682992652202373, |
| "learning_rate": 1.4179603448867836e-05, |
| "loss": 0.0298, |
| "step": 242 |
| }, |
| { |
| "epoch": 3.0, |
| "grad_norm": 0.298232489464775, |
| "learning_rate": 1.4123563174739036e-05, |
| "loss": 0.0152, |
| "step": 243 |
| }, |
| { |
| "epoch": 3.0123456790123457, |
| "grad_norm": 0.12740632304155414, |
| "learning_rate": 1.4067366430758004e-05, |
| "loss": 0.0059, |
| "step": 244 |
| }, |
| { |
| "epoch": 3.0246913580246915, |
| "grad_norm": 0.19794558417137054, |
| "learning_rate": 1.4011015349327188e-05, |
| "loss": 0.0088, |
| "step": 245 |
| }, |
| { |
| "epoch": 3.037037037037037, |
| "grad_norm": 0.1922683196961256, |
| "learning_rate": 1.3954512068705425e-05, |
| "loss": 0.0081, |
| "step": 246 |
| }, |
| { |
| "epoch": 3.049382716049383, |
| "grad_norm": 0.17829186059186022, |
| "learning_rate": 1.3897858732926794e-05, |
| "loss": 0.0071, |
| "step": 247 |
| }, |
| { |
| "epoch": 3.0617283950617282, |
| "grad_norm": 0.14264428973058538, |
| "learning_rate": 1.3841057491719261e-05, |
| "loss": 0.0096, |
| "step": 248 |
| }, |
| { |
| "epoch": 3.074074074074074, |
| "grad_norm": 0.2266151450801307, |
| "learning_rate": 1.3784110500423104e-05, |
| "loss": 0.0154, |
| "step": 249 |
| }, |
| { |
| "epoch": 3.0864197530864197, |
| "grad_norm": 0.2197194981703044, |
| "learning_rate": 1.372701991990914e-05, |
| "loss": 0.0231, |
| "step": 250 |
| }, |
| { |
| "epoch": 3.0987654320987654, |
| "grad_norm": 0.21131738021284832, |
| "learning_rate": 1.3669787916496722e-05, |
| "loss": 0.0094, |
| "step": 251 |
| }, |
| { |
| "epoch": 3.111111111111111, |
| "grad_norm": 0.20464969805446995, |
| "learning_rate": 1.3612416661871532e-05, |
| "loss": 0.0085, |
| "step": 252 |
| }, |
| { |
| "epoch": 3.123456790123457, |
| "grad_norm": 0.21532674194312212, |
| "learning_rate": 1.355490833300318e-05, |
| "loss": 0.0087, |
| "step": 253 |
| }, |
| { |
| "epoch": 3.1358024691358026, |
| "grad_norm": 0.16104981288328868, |
| "learning_rate": 1.3497265112062613e-05, |
| "loss": 0.0055, |
| "step": 254 |
| }, |
| { |
| "epoch": 3.148148148148148, |
| "grad_norm": 0.24076058570830933, |
| "learning_rate": 1.3439489186339283e-05, |
| "loss": 0.008, |
| "step": 255 |
| }, |
| { |
| "epoch": 3.1604938271604937, |
| "grad_norm": 0.2604821262949657, |
| "learning_rate": 1.3381582748158173e-05, |
| "loss": 0.0231, |
| "step": 256 |
| }, |
| { |
| "epoch": 3.1728395061728394, |
| "grad_norm": 0.24580486733045098, |
| "learning_rate": 1.3323547994796597e-05, |
| "loss": 0.0174, |
| "step": 257 |
| }, |
| { |
| "epoch": 3.185185185185185, |
| "grad_norm": 0.3154617969549976, |
| "learning_rate": 1.3265387128400833e-05, |
| "loss": 0.0092, |
| "step": 258 |
| }, |
| { |
| "epoch": 3.197530864197531, |
| "grad_norm": 0.2224133620360006, |
| "learning_rate": 1.3207102355902553e-05, |
| "loss": 0.0129, |
| "step": 259 |
| }, |
| { |
| "epoch": 3.2098765432098766, |
| "grad_norm": 0.281666620283078, |
| "learning_rate": 1.314869588893508e-05, |
| "loss": 0.0114, |
| "step": 260 |
| }, |
| { |
| "epoch": 3.2222222222222223, |
| "grad_norm": 0.22582566658451753, |
| "learning_rate": 1.3090169943749475e-05, |
| "loss": 0.0125, |
| "step": 261 |
| }, |
| { |
| "epoch": 3.234567901234568, |
| "grad_norm": 0.20743608754259807, |
| "learning_rate": 1.3031526741130435e-05, |
| "loss": 0.0133, |
| "step": 262 |
| }, |
| { |
| "epoch": 3.246913580246914, |
| "grad_norm": 0.1823778790146933, |
| "learning_rate": 1.2972768506312028e-05, |
| "loss": 0.0097, |
| "step": 263 |
| }, |
| { |
| "epoch": 3.259259259259259, |
| "grad_norm": 0.24794430856710525, |
| "learning_rate": 1.2913897468893249e-05, |
| "loss": 0.0081, |
| "step": 264 |
| }, |
| { |
| "epoch": 3.271604938271605, |
| "grad_norm": 0.22571751396689432, |
| "learning_rate": 1.2854915862753424e-05, |
| "loss": 0.0114, |
| "step": 265 |
| }, |
| { |
| "epoch": 3.2839506172839505, |
| "grad_norm": 0.19127790036678968, |
| "learning_rate": 1.279582592596744e-05, |
| "loss": 0.009, |
| "step": 266 |
| }, |
| { |
| "epoch": 3.2962962962962963, |
| "grad_norm": 0.21071669945822885, |
| "learning_rate": 1.2736629900720832e-05, |
| "loss": 0.0155, |
| "step": 267 |
| }, |
| { |
| "epoch": 3.308641975308642, |
| "grad_norm": 0.15143256661237275, |
| "learning_rate": 1.2677330033224681e-05, |
| "loss": 0.0065, |
| "step": 268 |
| }, |
| { |
| "epoch": 3.3209876543209877, |
| "grad_norm": 0.23256148298533083, |
| "learning_rate": 1.2617928573630405e-05, |
| "loss": 0.0124, |
| "step": 269 |
| }, |
| { |
| "epoch": 3.3333333333333335, |
| "grad_norm": 0.22582573521296015, |
| "learning_rate": 1.2558427775944357e-05, |
| "loss": 0.0216, |
| "step": 270 |
| }, |
| { |
| "epoch": 3.3456790123456788, |
| "grad_norm": 0.13193162285788634, |
| "learning_rate": 1.2498829897942308e-05, |
| "loss": 0.011, |
| "step": 271 |
| }, |
| { |
| "epoch": 3.3580246913580245, |
| "grad_norm": 0.312976420046679, |
| "learning_rate": 1.2439137201083772e-05, |
| "loss": 0.0106, |
| "step": 272 |
| }, |
| { |
| "epoch": 3.3703703703703702, |
| "grad_norm": 0.19625961746503687, |
| "learning_rate": 1.2379351950426188e-05, |
| "loss": 0.0089, |
| "step": 273 |
| }, |
| { |
| "epoch": 3.382716049382716, |
| "grad_norm": 0.16447584005359223, |
| "learning_rate": 1.2319476414538982e-05, |
| "loss": 0.0063, |
| "step": 274 |
| }, |
| { |
| "epoch": 3.3950617283950617, |
| "grad_norm": 0.1965358769188583, |
| "learning_rate": 1.2259512865417478e-05, |
| "loss": 0.0079, |
| "step": 275 |
| }, |
| { |
| "epoch": 3.4074074074074074, |
| "grad_norm": 0.2160518293938132, |
| "learning_rate": 1.2199463578396688e-05, |
| "loss": 0.0115, |
| "step": 276 |
| }, |
| { |
| "epoch": 3.419753086419753, |
| "grad_norm": 0.15127756963407038, |
| "learning_rate": 1.2139330832064975e-05, |
| "loss": 0.0094, |
| "step": 277 |
| }, |
| { |
| "epoch": 3.432098765432099, |
| "grad_norm": 0.15670354823656912, |
| "learning_rate": 1.2079116908177592e-05, |
| "loss": 0.0067, |
| "step": 278 |
| }, |
| { |
| "epoch": 3.4444444444444446, |
| "grad_norm": 0.1832029547236401, |
| "learning_rate": 1.2018824091570103e-05, |
| "loss": 0.0076, |
| "step": 279 |
| }, |
| { |
| "epoch": 3.45679012345679, |
| "grad_norm": 0.1847955538396133, |
| "learning_rate": 1.1958454670071671e-05, |
| "loss": 0.0068, |
| "step": 280 |
| }, |
| { |
| "epoch": 3.4691358024691357, |
| "grad_norm": 0.16708923166356573, |
| "learning_rate": 1.1898010934418261e-05, |
| "loss": 0.0077, |
| "step": 281 |
| }, |
| { |
| "epoch": 3.4814814814814814, |
| "grad_norm": 0.219254384910188, |
| "learning_rate": 1.1837495178165706e-05, |
| "loss": 0.0136, |
| "step": 282 |
| }, |
| { |
| "epoch": 3.493827160493827, |
| "grad_norm": 0.3031509215534536, |
| "learning_rate": 1.177690969760269e-05, |
| "loss": 0.0078, |
| "step": 283 |
| }, |
| { |
| "epoch": 3.506172839506173, |
| "grad_norm": 0.20587394181029134, |
| "learning_rate": 1.1716256791663599e-05, |
| "loss": 0.0107, |
| "step": 284 |
| }, |
| { |
| "epoch": 3.5185185185185186, |
| "grad_norm": 0.18349458503479488, |
| "learning_rate": 1.16555387618413e-05, |
| "loss": 0.0114, |
| "step": 285 |
| }, |
| { |
| "epoch": 3.5308641975308643, |
| "grad_norm": 0.21953256031427928, |
| "learning_rate": 1.159475791209981e-05, |
| "loss": 0.021, |
| "step": 286 |
| }, |
| { |
| "epoch": 3.5432098765432096, |
| "grad_norm": 0.16802927221598177, |
| "learning_rate": 1.1533916548786856e-05, |
| "loss": 0.017, |
| "step": 287 |
| }, |
| { |
| "epoch": 3.5555555555555554, |
| "grad_norm": 0.17437530961491682, |
| "learning_rate": 1.1473016980546377e-05, |
| "loss": 0.0082, |
| "step": 288 |
| }, |
| { |
| "epoch": 3.567901234567901, |
| "grad_norm": 0.18753660249190723, |
| "learning_rate": 1.1412061518230916e-05, |
| "loss": 0.0104, |
| "step": 289 |
| }, |
| { |
| "epoch": 3.580246913580247, |
| "grad_norm": 0.1961816817103867, |
| "learning_rate": 1.135105247481393e-05, |
| "loss": 0.0151, |
| "step": 290 |
| }, |
| { |
| "epoch": 3.5925925925925926, |
| "grad_norm": 0.21880871777689972, |
| "learning_rate": 1.1289992165302036e-05, |
| "loss": 0.0114, |
| "step": 291 |
| }, |
| { |
| "epoch": 3.6049382716049383, |
| "grad_norm": 0.1704995800988171, |
| "learning_rate": 1.1228882906647142e-05, |
| "loss": 0.0082, |
| "step": 292 |
| }, |
| { |
| "epoch": 3.617283950617284, |
| "grad_norm": 0.2617854020277945, |
| "learning_rate": 1.1167727017658562e-05, |
| "loss": 0.0116, |
| "step": 293 |
| }, |
| { |
| "epoch": 3.6296296296296298, |
| "grad_norm": 0.17168277914622568, |
| "learning_rate": 1.1106526818915008e-05, |
| "loss": 0.0083, |
| "step": 294 |
| }, |
| { |
| "epoch": 3.6419753086419755, |
| "grad_norm": 0.23889824190420522, |
| "learning_rate": 1.1045284632676535e-05, |
| "loss": 0.0147, |
| "step": 295 |
| }, |
| { |
| "epoch": 3.6543209876543212, |
| "grad_norm": 0.1539777031660912, |
| "learning_rate": 1.0984002782796427e-05, |
| "loss": 0.0164, |
| "step": 296 |
| }, |
| { |
| "epoch": 3.6666666666666665, |
| "grad_norm": 0.15189002638648533, |
| "learning_rate": 1.092268359463302e-05, |
| "loss": 0.0093, |
| "step": 297 |
| }, |
| { |
| "epoch": 3.6790123456790123, |
| "grad_norm": 0.1931624710226531, |
| "learning_rate": 1.0861329394961461e-05, |
| "loss": 0.0159, |
| "step": 298 |
| }, |
| { |
| "epoch": 3.691358024691358, |
| "grad_norm": 0.19079183444633152, |
| "learning_rate": 1.0799942511885417e-05, |
| "loss": 0.0123, |
| "step": 299 |
| }, |
| { |
| "epoch": 3.7037037037037037, |
| "grad_norm": 0.19983669205081916, |
| "learning_rate": 1.073852527474874e-05, |
| "loss": 0.0107, |
| "step": 300 |
| }, |
| { |
| "epoch": 3.7160493827160495, |
| "grad_norm": 0.20550325855050947, |
| "learning_rate": 1.0677080014047076e-05, |
| "loss": 0.004, |
| "step": 301 |
| }, |
| { |
| "epoch": 3.728395061728395, |
| "grad_norm": 0.2164226790088601, |
| "learning_rate": 1.0615609061339431e-05, |
| "loss": 0.011, |
| "step": 302 |
| }, |
| { |
| "epoch": 3.7407407407407405, |
| "grad_norm": 0.14412057991078386, |
| "learning_rate": 1.05541147491597e-05, |
| "loss": 0.0051, |
| "step": 303 |
| }, |
| { |
| "epoch": 3.753086419753086, |
| "grad_norm": 0.2650461598250637, |
| "learning_rate": 1.049259941092817e-05, |
| "loss": 0.0101, |
| "step": 304 |
| }, |
| { |
| "epoch": 3.765432098765432, |
| "grad_norm": 0.30085979106051064, |
| "learning_rate": 1.0431065380862959e-05, |
| "loss": 0.0132, |
| "step": 305 |
| }, |
| { |
| "epoch": 3.7777777777777777, |
| "grad_norm": 0.24498181798018345, |
| "learning_rate": 1.0369514993891451e-05, |
| "loss": 0.0106, |
| "step": 306 |
| }, |
| { |
| "epoch": 3.7901234567901234, |
| "grad_norm": 0.2082184557258639, |
| "learning_rate": 1.0307950585561705e-05, |
| "loss": 0.0109, |
| "step": 307 |
| }, |
| { |
| "epoch": 3.802469135802469, |
| "grad_norm": 0.1840429660303778, |
| "learning_rate": 1.0246374491953823e-05, |
| "loss": 0.0057, |
| "step": 308 |
| }, |
| { |
| "epoch": 3.814814814814815, |
| "grad_norm": 0.1543192699947808, |
| "learning_rate": 1.01847890495913e-05, |
| "loss": 0.0057, |
| "step": 309 |
| }, |
| { |
| "epoch": 3.8271604938271606, |
| "grad_norm": 0.31085221751147407, |
| "learning_rate": 1.0123196595352385e-05, |
| "loss": 0.0182, |
| "step": 310 |
| }, |
| { |
| "epoch": 3.8395061728395063, |
| "grad_norm": 0.1871258490697948, |
| "learning_rate": 1.0061599466381388e-05, |
| "loss": 0.0145, |
| "step": 311 |
| }, |
| { |
| "epoch": 3.851851851851852, |
| "grad_norm": 0.1551317940376091, |
| "learning_rate": 1e-05, |
| "loss": 0.0064, |
| "step": 312 |
| }, |
| { |
| "epoch": 3.8641975308641974, |
| "grad_norm": 0.12067080386439995, |
| "learning_rate": 9.938400533618615e-06, |
| "loss": 0.0086, |
| "step": 313 |
| }, |
| { |
| "epoch": 3.876543209876543, |
| "grad_norm": 0.14504074540723555, |
| "learning_rate": 9.876803404647617e-06, |
| "loss": 0.0047, |
| "step": 314 |
| }, |
| { |
| "epoch": 3.888888888888889, |
| "grad_norm": 0.2391873275575988, |
| "learning_rate": 9.815210950408703e-06, |
| "loss": 0.0075, |
| "step": 315 |
| }, |
| { |
| "epoch": 3.9012345679012346, |
| "grad_norm": 0.20353348393758977, |
| "learning_rate": 9.753625508046183e-06, |
| "loss": 0.0227, |
| "step": 316 |
| }, |
| { |
| "epoch": 3.9135802469135803, |
| "grad_norm": 0.20785860276808238, |
| "learning_rate": 9.692049414438298e-06, |
| "loss": 0.0191, |
| "step": 317 |
| }, |
| { |
| "epoch": 3.925925925925926, |
| "grad_norm": 0.1771402757741428, |
| "learning_rate": 9.630485006108554e-06, |
| "loss": 0.0074, |
| "step": 318 |
| }, |
| { |
| "epoch": 3.9382716049382713, |
| "grad_norm": 0.23082418089890744, |
| "learning_rate": 9.568934619137048e-06, |
| "loss": 0.0181, |
| "step": 319 |
| }, |
| { |
| "epoch": 3.950617283950617, |
| "grad_norm": 0.1985690884699008, |
| "learning_rate": 9.507400589071833e-06, |
| "loss": 0.0116, |
| "step": 320 |
| }, |
| { |
| "epoch": 3.962962962962963, |
| "grad_norm": 0.18052282236312095, |
| "learning_rate": 9.445885250840301e-06, |
| "loss": 0.008, |
| "step": 321 |
| }, |
| { |
| "epoch": 3.9753086419753085, |
| "grad_norm": 0.18304117085309365, |
| "learning_rate": 9.384390938660572e-06, |
| "loss": 0.0075, |
| "step": 322 |
| }, |
| { |
| "epoch": 3.9876543209876543, |
| "grad_norm": 0.1352378832885016, |
| "learning_rate": 9.322919985952926e-06, |
| "loss": 0.0043, |
| "step": 323 |
| }, |
| { |
| "epoch": 4.0, |
| "grad_norm": 0.1888441520514094, |
| "learning_rate": 9.261474725251261e-06, |
| "loss": 0.0066, |
| "step": 324 |
| }, |
| { |
| "epoch": 4.012345679012346, |
| "grad_norm": 0.17576556583410355, |
| "learning_rate": 9.200057488114585e-06, |
| "loss": 0.0111, |
| "step": 325 |
| }, |
| { |
| "epoch": 4.0246913580246915, |
| "grad_norm": 0.1786262879981403, |
| "learning_rate": 9.138670605038542e-06, |
| "loss": 0.0051, |
| "step": 326 |
| }, |
| { |
| "epoch": 4.037037037037037, |
| "grad_norm": 0.18990489010264094, |
| "learning_rate": 9.07731640536698e-06, |
| "loss": 0.0071, |
| "step": 327 |
| }, |
| { |
| "epoch": 4.049382716049383, |
| "grad_norm": 0.12318159558833343, |
| "learning_rate": 9.015997217203574e-06, |
| "loss": 0.0034, |
| "step": 328 |
| }, |
| { |
| "epoch": 4.061728395061729, |
| "grad_norm": 0.13751334569727902, |
| "learning_rate": 8.954715367323468e-06, |
| "loss": 0.0063, |
| "step": 329 |
| }, |
| { |
| "epoch": 4.074074074074074, |
| "grad_norm": 0.13146054154726655, |
| "learning_rate": 8.893473181084993e-06, |
| "loss": 0.0047, |
| "step": 330 |
| }, |
| { |
| "epoch": 4.08641975308642, |
| "grad_norm": 0.15881793905029615, |
| "learning_rate": 8.83227298234144e-06, |
| "loss": 0.0086, |
| "step": 331 |
| }, |
| { |
| "epoch": 4.098765432098766, |
| "grad_norm": 0.12142724241545133, |
| "learning_rate": 8.771117093352861e-06, |
| "loss": 0.0051, |
| "step": 332 |
| }, |
| { |
| "epoch": 4.111111111111111, |
| "grad_norm": 0.2286294288273942, |
| "learning_rate": 8.71000783469797e-06, |
| "loss": 0.0121, |
| "step": 333 |
| }, |
| { |
| "epoch": 4.1234567901234565, |
| "grad_norm": 0.16035361370524753, |
| "learning_rate": 8.648947525186073e-06, |
| "loss": 0.005, |
| "step": 334 |
| }, |
| { |
| "epoch": 4.135802469135802, |
| "grad_norm": 0.1828509697608793, |
| "learning_rate": 8.58793848176909e-06, |
| "loss": 0.0058, |
| "step": 335 |
| }, |
| { |
| "epoch": 4.148148148148148, |
| "grad_norm": 0.18114371230252813, |
| "learning_rate": 8.526983019453624e-06, |
| "loss": 0.0041, |
| "step": 336 |
| }, |
| { |
| "epoch": 4.160493827160494, |
| "grad_norm": 0.13123399087407325, |
| "learning_rate": 8.466083451213145e-06, |
| "loss": 0.0055, |
| "step": 337 |
| }, |
| { |
| "epoch": 4.172839506172839, |
| "grad_norm": 0.15545804752592987, |
| "learning_rate": 8.405242087900192e-06, |
| "loss": 0.0031, |
| "step": 338 |
| }, |
| { |
| "epoch": 4.185185185185185, |
| "grad_norm": 0.14836867939239134, |
| "learning_rate": 8.3444612381587e-06, |
| "loss": 0.0105, |
| "step": 339 |
| }, |
| { |
| "epoch": 4.197530864197531, |
| "grad_norm": 0.205738715941424, |
| "learning_rate": 8.283743208336403e-06, |
| "loss": 0.0055, |
| "step": 340 |
| }, |
| { |
| "epoch": 4.209876543209877, |
| "grad_norm": 0.21183835509861776, |
| "learning_rate": 8.223090302397313e-06, |
| "loss": 0.006, |
| "step": 341 |
| }, |
| { |
| "epoch": 4.222222222222222, |
| "grad_norm": 0.15257239381928547, |
| "learning_rate": 8.162504821834296e-06, |
| "loss": 0.0047, |
| "step": 342 |
| }, |
| { |
| "epoch": 4.234567901234568, |
| "grad_norm": 0.11658733556890352, |
| "learning_rate": 8.101989065581742e-06, |
| "loss": 0.0068, |
| "step": 343 |
| }, |
| { |
| "epoch": 4.246913580246914, |
| "grad_norm": 0.13573163174587216, |
| "learning_rate": 8.041545329928332e-06, |
| "loss": 0.0123, |
| "step": 344 |
| }, |
| { |
| "epoch": 4.2592592592592595, |
| "grad_norm": 0.13777699068418175, |
| "learning_rate": 7.9811759084299e-06, |
| "loss": 0.0097, |
| "step": 345 |
| }, |
| { |
| "epoch": 4.271604938271605, |
| "grad_norm": 0.15763381172079863, |
| "learning_rate": 7.92088309182241e-06, |
| "loss": 0.0087, |
| "step": 346 |
| }, |
| { |
| "epoch": 4.283950617283951, |
| "grad_norm": 0.15479605806346516, |
| "learning_rate": 7.860669167935028e-06, |
| "loss": 0.0032, |
| "step": 347 |
| }, |
| { |
| "epoch": 4.296296296296296, |
| "grad_norm": 0.1586828874171454, |
| "learning_rate": 7.800536421603317e-06, |
| "loss": 0.008, |
| "step": 348 |
| }, |
| { |
| "epoch": 4.308641975308642, |
| "grad_norm": 0.15327141154299848, |
| "learning_rate": 7.740487134582527e-06, |
| "loss": 0.0059, |
| "step": 349 |
| }, |
| { |
| "epoch": 4.320987654320987, |
| "grad_norm": 0.140301040005369, |
| "learning_rate": 7.680523585461021e-06, |
| "loss": 0.0062, |
| "step": 350 |
| }, |
| { |
| "epoch": 4.333333333333333, |
| "grad_norm": 0.12438760539558087, |
| "learning_rate": 7.620648049573815e-06, |
| "loss": 0.0033, |
| "step": 351 |
| }, |
| { |
| "epoch": 4.345679012345679, |
| "grad_norm": 0.15251811244541832, |
| "learning_rate": 7.560862798916229e-06, |
| "loss": 0.0066, |
| "step": 352 |
| }, |
| { |
| "epoch": 4.3580246913580245, |
| "grad_norm": 0.12017480297474846, |
| "learning_rate": 7.501170102057691e-06, |
| "loss": 0.0033, |
| "step": 353 |
| }, |
| { |
| "epoch": 4.37037037037037, |
| "grad_norm": 0.07857457205022518, |
| "learning_rate": 7.441572224055644e-06, |
| "loss": 0.0021, |
| "step": 354 |
| }, |
| { |
| "epoch": 4.382716049382716, |
| "grad_norm": 0.1278711203666051, |
| "learning_rate": 7.382071426369597e-06, |
| "loss": 0.0064, |
| "step": 355 |
| }, |
| { |
| "epoch": 4.395061728395062, |
| "grad_norm": 0.18131359376325312, |
| "learning_rate": 7.322669966775321e-06, |
| "loss": 0.0095, |
| "step": 356 |
| }, |
| { |
| "epoch": 4.407407407407407, |
| "grad_norm": 0.09723568551494033, |
| "learning_rate": 7.263370099279173e-06, |
| "loss": 0.0027, |
| "step": 357 |
| }, |
| { |
| "epoch": 4.419753086419753, |
| "grad_norm": 0.1672781410819625, |
| "learning_rate": 7.204174074032562e-06, |
| "loss": 0.0066, |
| "step": 358 |
| }, |
| { |
| "epoch": 4.432098765432099, |
| "grad_norm": 0.1258158648501038, |
| "learning_rate": 7.1450841372465806e-06, |
| "loss": 0.0056, |
| "step": 359 |
| }, |
| { |
| "epoch": 4.444444444444445, |
| "grad_norm": 0.11731253934039346, |
| "learning_rate": 7.086102531106755e-06, |
| "loss": 0.0031, |
| "step": 360 |
| }, |
| { |
| "epoch": 4.45679012345679, |
| "grad_norm": 0.21138242950668157, |
| "learning_rate": 7.027231493687974e-06, |
| "loss": 0.0058, |
| "step": 361 |
| }, |
| { |
| "epoch": 4.469135802469136, |
| "grad_norm": 0.12057312296375491, |
| "learning_rate": 6.968473258869566e-06, |
| "loss": 0.0055, |
| "step": 362 |
| }, |
| { |
| "epoch": 4.481481481481482, |
| "grad_norm": 0.18470894070091085, |
| "learning_rate": 6.909830056250527e-06, |
| "loss": 0.0061, |
| "step": 363 |
| }, |
| { |
| "epoch": 4.493827160493828, |
| "grad_norm": 0.10365823512721326, |
| "learning_rate": 6.851304111064923e-06, |
| "loss": 0.004, |
| "step": 364 |
| }, |
| { |
| "epoch": 4.506172839506172, |
| "grad_norm": 0.14863072835909422, |
| "learning_rate": 6.7928976440974504e-06, |
| "loss": 0.0064, |
| "step": 365 |
| }, |
| { |
| "epoch": 4.518518518518518, |
| "grad_norm": 0.11216394972543642, |
| "learning_rate": 6.734612871599169e-06, |
| "loss": 0.0047, |
| "step": 366 |
| }, |
| { |
| "epoch": 4.530864197530864, |
| "grad_norm": 0.1769379177433203, |
| "learning_rate": 6.6764520052034054e-06, |
| "loss": 0.0071, |
| "step": 367 |
| }, |
| { |
| "epoch": 4.54320987654321, |
| "grad_norm": 0.11523498585679909, |
| "learning_rate": 6.618417251841829e-06, |
| "loss": 0.0052, |
| "step": 368 |
| }, |
| { |
| "epoch": 4.555555555555555, |
| "grad_norm": 0.213990728294252, |
| "learning_rate": 6.560510813660719e-06, |
| "loss": 0.01, |
| "step": 369 |
| }, |
| { |
| "epoch": 4.567901234567901, |
| "grad_norm": 0.22242402616595255, |
| "learning_rate": 6.502734887937389e-06, |
| "loss": 0.0091, |
| "step": 370 |
| }, |
| { |
| "epoch": 4.580246913580247, |
| "grad_norm": 0.22736754834624315, |
| "learning_rate": 6.44509166699682e-06, |
| "loss": 0.0098, |
| "step": 371 |
| }, |
| { |
| "epoch": 4.592592592592593, |
| "grad_norm": 0.12867942855007472, |
| "learning_rate": 6.387583338128471e-06, |
| "loss": 0.0035, |
| "step": 372 |
| }, |
| { |
| "epoch": 4.604938271604938, |
| "grad_norm": 0.12725911519226457, |
| "learning_rate": 6.33021208350328e-06, |
| "loss": 0.0064, |
| "step": 373 |
| }, |
| { |
| "epoch": 4.617283950617284, |
| "grad_norm": 0.17702466948892448, |
| "learning_rate": 6.27298008009086e-06, |
| "loss": 0.0054, |
| "step": 374 |
| }, |
| { |
| "epoch": 4.62962962962963, |
| "grad_norm": 0.09649965912500313, |
| "learning_rate": 6.215889499576898e-06, |
| "loss": 0.0074, |
| "step": 375 |
| }, |
| { |
| "epoch": 4.6419753086419755, |
| "grad_norm": 0.1616613279100784, |
| "learning_rate": 6.158942508280743e-06, |
| "loss": 0.0069, |
| "step": 376 |
| }, |
| { |
| "epoch": 4.654320987654321, |
| "grad_norm": 0.18968833143117372, |
| "learning_rate": 6.102141267073207e-06, |
| "loss": 0.0077, |
| "step": 377 |
| }, |
| { |
| "epoch": 4.666666666666667, |
| "grad_norm": 0.16248020648691364, |
| "learning_rate": 6.0454879312945755e-06, |
| "loss": 0.0078, |
| "step": 378 |
| }, |
| { |
| "epoch": 4.679012345679013, |
| "grad_norm": 0.12051760198559061, |
| "learning_rate": 5.988984650672813e-06, |
| "loss": 0.0034, |
| "step": 379 |
| }, |
| { |
| "epoch": 4.6913580246913575, |
| "grad_norm": 0.13200347199182663, |
| "learning_rate": 5.932633569242e-06, |
| "loss": 0.0098, |
| "step": 380 |
| }, |
| { |
| "epoch": 4.703703703703704, |
| "grad_norm": 0.16681661954858018, |
| "learning_rate": 5.876436825260967e-06, |
| "loss": 0.0041, |
| "step": 381 |
| }, |
| { |
| "epoch": 4.716049382716049, |
| "grad_norm": 0.14006551980072618, |
| "learning_rate": 5.82039655113217e-06, |
| "loss": 0.0058, |
| "step": 382 |
| }, |
| { |
| "epoch": 4.728395061728395, |
| "grad_norm": 0.15271237139456825, |
| "learning_rate": 5.764514873320761e-06, |
| "loss": 0.0069, |
| "step": 383 |
| }, |
| { |
| "epoch": 4.7407407407407405, |
| "grad_norm": 0.09090612510274507, |
| "learning_rate": 5.708793912273911e-06, |
| "loss": 0.003, |
| "step": 384 |
| }, |
| { |
| "epoch": 4.753086419753086, |
| "grad_norm": 0.2582667643739463, |
| "learning_rate": 5.653235782340351e-06, |
| "loss": 0.0048, |
| "step": 385 |
| }, |
| { |
| "epoch": 4.765432098765432, |
| "grad_norm": 0.11419791645573754, |
| "learning_rate": 5.597842591690128e-06, |
| "loss": 0.0029, |
| "step": 386 |
| }, |
| { |
| "epoch": 4.777777777777778, |
| "grad_norm": 0.12658265868721003, |
| "learning_rate": 5.542616442234618e-06, |
| "loss": 0.0052, |
| "step": 387 |
| }, |
| { |
| "epoch": 4.790123456790123, |
| "grad_norm": 0.11780861249847288, |
| "learning_rate": 5.487559429546772e-06, |
| "loss": 0.003, |
| "step": 388 |
| }, |
| { |
| "epoch": 4.802469135802469, |
| "grad_norm": 0.10635775400249556, |
| "learning_rate": 5.432673642781595e-06, |
| "loss": 0.0056, |
| "step": 389 |
| }, |
| { |
| "epoch": 4.814814814814815, |
| "grad_norm": 0.1886486359438476, |
| "learning_rate": 5.3779611645968696e-06, |
| "loss": 0.0035, |
| "step": 390 |
| }, |
| { |
| "epoch": 4.827160493827161, |
| "grad_norm": 0.11101625407249367, |
| "learning_rate": 5.3234240710741335e-06, |
| "loss": 0.003, |
| "step": 391 |
| }, |
| { |
| "epoch": 4.839506172839506, |
| "grad_norm": 0.20857170942236092, |
| "learning_rate": 5.269064431639901e-06, |
| "loss": 0.0058, |
| "step": 392 |
| }, |
| { |
| "epoch": 4.851851851851852, |
| "grad_norm": 0.14690806172755955, |
| "learning_rate": 5.214884308987136e-06, |
| "loss": 0.0046, |
| "step": 393 |
| }, |
| { |
| "epoch": 4.864197530864198, |
| "grad_norm": 0.11451036979746576, |
| "learning_rate": 5.160885758996985e-06, |
| "loss": 0.0033, |
| "step": 394 |
| }, |
| { |
| "epoch": 4.8765432098765435, |
| "grad_norm": 0.16454312350788597, |
| "learning_rate": 5.107070830660765e-06, |
| "loss": 0.0033, |
| "step": 395 |
| }, |
| { |
| "epoch": 4.888888888888889, |
| "grad_norm": 0.0788817718822425, |
| "learning_rate": 5.053441566002214e-06, |
| "loss": 0.0027, |
| "step": 396 |
| }, |
| { |
| "epoch": 4.901234567901234, |
| "grad_norm": 0.14669951303785775, |
| "learning_rate": 5.000000000000003e-06, |
| "loss": 0.0081, |
| "step": 397 |
| }, |
| { |
| "epoch": 4.91358024691358, |
| "grad_norm": 0.10678859264163215, |
| "learning_rate": 4.946748160510522e-06, |
| "loss": 0.0026, |
| "step": 398 |
| }, |
| { |
| "epoch": 4.925925925925926, |
| "grad_norm": 0.13706317316845099, |
| "learning_rate": 4.893688068190933e-06, |
| "loss": 0.011, |
| "step": 399 |
| }, |
| { |
| "epoch": 4.938271604938271, |
| "grad_norm": 0.17403711057746862, |
| "learning_rate": 4.8408217364224886e-06, |
| "loss": 0.0135, |
| "step": 400 |
| }, |
| { |
| "epoch": 4.950617283950617, |
| "grad_norm": 0.11398641090194171, |
| "learning_rate": 4.788151171234149e-06, |
| "loss": 0.0027, |
| "step": 401 |
| }, |
| { |
| "epoch": 4.962962962962963, |
| "grad_norm": 0.13576944911004699, |
| "learning_rate": 4.7356783712264405e-06, |
| "loss": 0.0076, |
| "step": 402 |
| }, |
| { |
| "epoch": 4.9753086419753085, |
| "grad_norm": 0.12391596280841162, |
| "learning_rate": 4.683405327495638e-06, |
| "loss": 0.0039, |
| "step": 403 |
| }, |
| { |
| "epoch": 4.987654320987654, |
| "grad_norm": 0.14003588815325335, |
| "learning_rate": 4.631334023558199e-06, |
| "loss": 0.0036, |
| "step": 404 |
| }, |
| { |
| "epoch": 5.0, |
| "grad_norm": 0.13760549101766917, |
| "learning_rate": 4.579466435275506e-06, |
| "loss": 0.0044, |
| "step": 405 |
| }, |
| { |
| "epoch": 5.012345679012346, |
| "grad_norm": 0.10108142455180544, |
| "learning_rate": 4.5278045307788885e-06, |
| "loss": 0.0029, |
| "step": 406 |
| }, |
| { |
| "epoch": 5.0246913580246915, |
| "grad_norm": 0.06135525071329738, |
| "learning_rate": 4.476350270394942e-06, |
| "loss": 0.0017, |
| "step": 407 |
| }, |
| { |
| "epoch": 5.037037037037037, |
| "grad_norm": 0.1357779809507268, |
| "learning_rate": 4.425105606571145e-06, |
| "loss": 0.0035, |
| "step": 408 |
| }, |
| { |
| "epoch": 5.049382716049383, |
| "grad_norm": 0.09775066607270783, |
| "learning_rate": 4.374072483801769e-06, |
| "loss": 0.0026, |
| "step": 409 |
| }, |
| { |
| "epoch": 5.061728395061729, |
| "grad_norm": 0.0990513642880167, |
| "learning_rate": 4.323252838554099e-06, |
| "loss": 0.0024, |
| "step": 410 |
| }, |
| { |
| "epoch": 5.074074074074074, |
| "grad_norm": 0.07747603969017622, |
| "learning_rate": 4.272648599194948e-06, |
| "loss": 0.0016, |
| "step": 411 |
| }, |
| { |
| "epoch": 5.08641975308642, |
| "grad_norm": 0.14776525482891759, |
| "learning_rate": 4.222261685917489e-06, |
| "loss": 0.0061, |
| "step": 412 |
| }, |
| { |
| "epoch": 5.098765432098766, |
| "grad_norm": 0.06839289353699521, |
| "learning_rate": 4.1720940106683915e-06, |
| "loss": 0.0017, |
| "step": 413 |
| }, |
| { |
| "epoch": 5.111111111111111, |
| "grad_norm": 0.08384861829329324, |
| "learning_rate": 4.12214747707527e-06, |
| "loss": 0.0029, |
| "step": 414 |
| }, |
| { |
| "epoch": 5.1234567901234565, |
| "grad_norm": 0.08959984954999654, |
| "learning_rate": 4.0724239803744524e-06, |
| "loss": 0.0058, |
| "step": 415 |
| }, |
| { |
| "epoch": 5.135802469135802, |
| "grad_norm": 0.12415321940213553, |
| "learning_rate": 4.022925407339064e-06, |
| "loss": 0.0046, |
| "step": 416 |
| }, |
| { |
| "epoch": 5.148148148148148, |
| "grad_norm": 0.08785424848498224, |
| "learning_rate": 3.973653636207437e-06, |
| "loss": 0.0028, |
| "step": 417 |
| }, |
| { |
| "epoch": 5.160493827160494, |
| "grad_norm": 0.08347388278874737, |
| "learning_rate": 3.924610536611833e-06, |
| "loss": 0.0035, |
| "step": 418 |
| }, |
| { |
| "epoch": 5.172839506172839, |
| "grad_norm": 0.14158253470926024, |
| "learning_rate": 3.875797969507502e-06, |
| "loss": 0.0029, |
| "step": 419 |
| }, |
| { |
| "epoch": 5.185185185185185, |
| "grad_norm": 0.07618847346132784, |
| "learning_rate": 3.827217787102072e-06, |
| "loss": 0.0021, |
| "step": 420 |
| }, |
| { |
| "epoch": 5.197530864197531, |
| "grad_norm": 0.0799051798869917, |
| "learning_rate": 3.7788718327852625e-06, |
| "loss": 0.0021, |
| "step": 421 |
| }, |
| { |
| "epoch": 5.209876543209877, |
| "grad_norm": 0.12744475006620873, |
| "learning_rate": 3.730761941058938e-06, |
| "loss": 0.0048, |
| "step": 422 |
| }, |
| { |
| "epoch": 5.222222222222222, |
| "grad_norm": 0.1367296903886182, |
| "learning_rate": 3.6828899374674933e-06, |
| "loss": 0.003, |
| "step": 423 |
| }, |
| { |
| "epoch": 5.234567901234568, |
| "grad_norm": 0.12613896676405553, |
| "learning_rate": 3.635257638528589e-06, |
| "loss": 0.0028, |
| "step": 424 |
| }, |
| { |
| "epoch": 5.246913580246914, |
| "grad_norm": 0.08185887424793617, |
| "learning_rate": 3.587866851664219e-06, |
| "loss": 0.0021, |
| "step": 425 |
| }, |
| { |
| "epoch": 5.2592592592592595, |
| "grad_norm": 0.05307305165318999, |
| "learning_rate": 3.540719375132129e-06, |
| "loss": 0.0016, |
| "step": 426 |
| }, |
| { |
| "epoch": 5.271604938271605, |
| "grad_norm": 0.13804574516617765, |
| "learning_rate": 3.493816997957582e-06, |
| "loss": 0.0047, |
| "step": 427 |
| }, |
| { |
| "epoch": 5.283950617283951, |
| "grad_norm": 0.07491487304908827, |
| "learning_rate": 3.447161499865467e-06, |
| "loss": 0.0018, |
| "step": 428 |
| }, |
| { |
| "epoch": 5.296296296296296, |
| "grad_norm": 0.08625813878349398, |
| "learning_rate": 3.4007546512127764e-06, |
| "loss": 0.0019, |
| "step": 429 |
| }, |
| { |
| "epoch": 5.308641975308642, |
| "grad_norm": 0.06179766978879838, |
| "learning_rate": 3.3545982129214227e-06, |
| "loss": 0.0015, |
| "step": 430 |
| }, |
| { |
| "epoch": 5.320987654320987, |
| "grad_norm": 0.15978150579410316, |
| "learning_rate": 3.308693936411421e-06, |
| "loss": 0.0043, |
| "step": 431 |
| }, |
| { |
| "epoch": 5.333333333333333, |
| "grad_norm": 0.1469342970511695, |
| "learning_rate": 3.2630435635344283e-06, |
| "loss": 0.0035, |
| "step": 432 |
| }, |
| { |
| "epoch": 5.345679012345679, |
| "grad_norm": 0.14475265179014074, |
| "learning_rate": 3.21764882650766e-06, |
| "loss": 0.004, |
| "step": 433 |
| }, |
| { |
| "epoch": 5.3580246913580245, |
| "grad_norm": 0.1308223994783395, |
| "learning_rate": 3.1725114478481458e-06, |
| "loss": 0.0026, |
| "step": 434 |
| }, |
| { |
| "epoch": 5.37037037037037, |
| "grad_norm": 0.13456024267263844, |
| "learning_rate": 3.1276331403073733e-06, |
| "loss": 0.0113, |
| "step": 435 |
| }, |
| { |
| "epoch": 5.382716049382716, |
| "grad_norm": 0.08889181174562952, |
| "learning_rate": 3.0830156068063e-06, |
| "loss": 0.0016, |
| "step": 436 |
| }, |
| { |
| "epoch": 5.395061728395062, |
| "grad_norm": 0.10350317432315419, |
| "learning_rate": 3.0386605403707347e-06, |
| "loss": 0.0029, |
| "step": 437 |
| }, |
| { |
| "epoch": 5.407407407407407, |
| "grad_norm": 0.11255331179888899, |
| "learning_rate": 2.9945696240670905e-06, |
| "loss": 0.007, |
| "step": 438 |
| }, |
| { |
| "epoch": 5.419753086419753, |
| "grad_norm": 0.12613776990357034, |
| "learning_rate": 2.95074453093853e-06, |
| "loss": 0.0037, |
| "step": 439 |
| }, |
| { |
| "epoch": 5.432098765432099, |
| "grad_norm": 0.13211120615930835, |
| "learning_rate": 2.907186923941466e-06, |
| "loss": 0.0029, |
| "step": 440 |
| }, |
| { |
| "epoch": 5.444444444444445, |
| "grad_norm": 0.13009133491131789, |
| "learning_rate": 2.8638984558824777e-06, |
| "loss": 0.0056, |
| "step": 441 |
| }, |
| { |
| "epoch": 5.45679012345679, |
| "grad_norm": 0.11165738765343762, |
| "learning_rate": 2.820880769355582e-06, |
| "loss": 0.0023, |
| "step": 442 |
| }, |
| { |
| "epoch": 5.469135802469136, |
| "grad_norm": 0.09955128366405679, |
| "learning_rate": 2.778135496679908e-06, |
| "loss": 0.0043, |
| "step": 443 |
| }, |
| { |
| "epoch": 5.481481481481482, |
| "grad_norm": 0.13140680037758684, |
| "learning_rate": 2.7356642598377604e-06, |
| "loss": 0.005, |
| "step": 444 |
| }, |
| { |
| "epoch": 5.493827160493828, |
| "grad_norm": 0.11819363282576739, |
| "learning_rate": 2.6934686704130698e-06, |
| "loss": 0.0021, |
| "step": 445 |
| }, |
| { |
| "epoch": 5.506172839506172, |
| "grad_norm": 0.08748806809387456, |
| "learning_rate": 2.6515503295302446e-06, |
| "loss": 0.0014, |
| "step": 446 |
| }, |
| { |
| "epoch": 5.518518518518518, |
| "grad_norm": 0.10665634756766484, |
| "learning_rate": 2.6099108277934105e-06, |
| "loss": 0.0038, |
| "step": 447 |
| }, |
| { |
| "epoch": 5.530864197530864, |
| "grad_norm": 0.06465287847372936, |
| "learning_rate": 2.5685517452260566e-06, |
| "loss": 0.0043, |
| "step": 448 |
| }, |
| { |
| "epoch": 5.54320987654321, |
| "grad_norm": 0.06355150302844088, |
| "learning_rate": 2.527474651211089e-06, |
| "loss": 0.0016, |
| "step": 449 |
| }, |
| { |
| "epoch": 5.555555555555555, |
| "grad_norm": 0.14706952848764682, |
| "learning_rate": 2.4866811044312667e-06, |
| "loss": 0.0095, |
| "step": 450 |
| }, |
| { |
| "epoch": 5.567901234567901, |
| "grad_norm": 0.11732857265877357, |
| "learning_rate": 2.4461726528100615e-06, |
| "loss": 0.0062, |
| "step": 451 |
| }, |
| { |
| "epoch": 5.580246913580247, |
| "grad_norm": 0.09196481051355447, |
| "learning_rate": 2.405950833452928e-06, |
| "loss": 0.0043, |
| "step": 452 |
| }, |
| { |
| "epoch": 5.592592592592593, |
| "grad_norm": 0.16089432829098174, |
| "learning_rate": 2.3660171725889703e-06, |
| "loss": 0.005, |
| "step": 453 |
| }, |
| { |
| "epoch": 5.604938271604938, |
| "grad_norm": 0.09752738474918766, |
| "learning_rate": 2.32637318551303e-06, |
| "loss": 0.003, |
| "step": 454 |
| }, |
| { |
| "epoch": 5.617283950617284, |
| "grad_norm": 0.111669117303663, |
| "learning_rate": 2.287020376528193e-06, |
| "loss": 0.0023, |
| "step": 455 |
| }, |
| { |
| "epoch": 5.62962962962963, |
| "grad_norm": 0.06950187494180007, |
| "learning_rate": 2.2479602388887013e-06, |
| "loss": 0.0024, |
| "step": 456 |
| }, |
| { |
| "epoch": 5.6419753086419755, |
| "grad_norm": 0.09005092942064802, |
| "learning_rate": 2.209194254743295e-06, |
| "loss": 0.0041, |
| "step": 457 |
| }, |
| { |
| "epoch": 5.654320987654321, |
| "grad_norm": 0.058271649974103654, |
| "learning_rate": 2.170723895078972e-06, |
| "loss": 0.0013, |
| "step": 458 |
| }, |
| { |
| "epoch": 5.666666666666667, |
| "grad_norm": 0.05801255129790246, |
| "learning_rate": 2.132550619665168e-06, |
| "loss": 0.0015, |
| "step": 459 |
| }, |
| { |
| "epoch": 5.679012345679013, |
| "grad_norm": 0.05173255822131142, |
| "learning_rate": 2.0946758769983666e-06, |
| "loss": 0.0015, |
| "step": 460 |
| }, |
| { |
| "epoch": 5.6913580246913575, |
| "grad_norm": 0.12763685917437947, |
| "learning_rate": 2.05710110424714e-06, |
| "loss": 0.0022, |
| "step": 461 |
| }, |
| { |
| "epoch": 5.703703703703704, |
| "grad_norm": 0.13571202690989434, |
| "learning_rate": 2.019827727197605e-06, |
| "loss": 0.0031, |
| "step": 462 |
| }, |
| { |
| "epoch": 5.716049382716049, |
| "grad_norm": 0.08858787712719375, |
| "learning_rate": 1.982857160199334e-06, |
| "loss": 0.0025, |
| "step": 463 |
| }, |
| { |
| "epoch": 5.728395061728395, |
| "grad_norm": 0.09041339350659952, |
| "learning_rate": 1.946190806111674e-06, |
| "loss": 0.0022, |
| "step": 464 |
| }, |
| { |
| "epoch": 5.7407407407407405, |
| "grad_norm": 0.12427642574112911, |
| "learning_rate": 1.9098300562505266e-06, |
| "loss": 0.0026, |
| "step": 465 |
| }, |
| { |
| "epoch": 5.753086419753086, |
| "grad_norm": 0.07753348116821808, |
| "learning_rate": 1.873776290335545e-06, |
| "loss": 0.0024, |
| "step": 466 |
| }, |
| { |
| "epoch": 5.765432098765432, |
| "grad_norm": 0.10239535590693546, |
| "learning_rate": 1.8380308764377841e-06, |
| "loss": 0.0023, |
| "step": 467 |
| }, |
| { |
| "epoch": 5.777777777777778, |
| "grad_norm": 0.0932480254649128, |
| "learning_rate": 1.80259517092779e-06, |
| "loss": 0.0026, |
| "step": 468 |
| }, |
| { |
| "epoch": 5.790123456790123, |
| "grad_norm": 0.07678126376324187, |
| "learning_rate": 1.767470518424129e-06, |
| "loss": 0.0023, |
| "step": 469 |
| }, |
| { |
| "epoch": 5.802469135802469, |
| "grad_norm": 0.07579256484399811, |
| "learning_rate": 1.7326582517423662e-06, |
| "loss": 0.0014, |
| "step": 470 |
| }, |
| { |
| "epoch": 5.814814814814815, |
| "grad_norm": 0.08198420550789928, |
| "learning_rate": 1.6981596918444953e-06, |
| "loss": 0.0019, |
| "step": 471 |
| }, |
| { |
| "epoch": 5.827160493827161, |
| "grad_norm": 0.06306638866343868, |
| "learning_rate": 1.663976147788806e-06, |
| "loss": 0.0013, |
| "step": 472 |
| }, |
| { |
| "epoch": 5.839506172839506, |
| "grad_norm": 0.07973362090991411, |
| "learning_rate": 1.6301089166802232e-06, |
| "loss": 0.0017, |
| "step": 473 |
| }, |
| { |
| "epoch": 5.851851851851852, |
| "grad_norm": 0.10612340130126985, |
| "learning_rate": 1.596559283621074e-06, |
| "loss": 0.0023, |
| "step": 474 |
| }, |
| { |
| "epoch": 5.864197530864198, |
| "grad_norm": 0.060264533571981124, |
| "learning_rate": 1.5633285216623384e-06, |
| "loss": 0.0016, |
| "step": 475 |
| }, |
| { |
| "epoch": 5.8765432098765435, |
| "grad_norm": 0.06817593452948047, |
| "learning_rate": 1.5304178917553302e-06, |
| "loss": 0.0012, |
| "step": 476 |
| }, |
| { |
| "epoch": 5.888888888888889, |
| "grad_norm": 0.09699227562887566, |
| "learning_rate": 1.4978286427038602e-06, |
| "loss": 0.003, |
| "step": 477 |
| }, |
| { |
| "epoch": 5.901234567901234, |
| "grad_norm": 0.08289137794935784, |
| "learning_rate": 1.4655620111168422e-06, |
| "loss": 0.002, |
| "step": 478 |
| }, |
| { |
| "epoch": 5.91358024691358, |
| "grad_norm": 0.09286549417981105, |
| "learning_rate": 1.4336192213613742e-06, |
| "loss": 0.0059, |
| "step": 479 |
| }, |
| { |
| "epoch": 5.925925925925926, |
| "grad_norm": 0.11098773394965465, |
| "learning_rate": 1.4020014855162755e-06, |
| "loss": 0.0053, |
| "step": 480 |
| }, |
| { |
| "epoch": 5.938271604938271, |
| "grad_norm": 0.09542493224972246, |
| "learning_rate": 1.3707100033261035e-06, |
| "loss": 0.0031, |
| "step": 481 |
| }, |
| { |
| "epoch": 5.950617283950617, |
| "grad_norm": 0.07639864399541678, |
| "learning_rate": 1.339745962155613e-06, |
| "loss": 0.0022, |
| "step": 482 |
| }, |
| { |
| "epoch": 5.962962962962963, |
| "grad_norm": 0.08849988543559398, |
| "learning_rate": 1.3091105369447166e-06, |
| "loss": 0.0017, |
| "step": 483 |
| }, |
| { |
| "epoch": 5.9753086419753085, |
| "grad_norm": 0.0884132771726154, |
| "learning_rate": 1.2788048901638917e-06, |
| "loss": 0.0017, |
| "step": 484 |
| }, |
| { |
| "epoch": 5.987654320987654, |
| "grad_norm": 0.06292245984188637, |
| "learning_rate": 1.2488301717700735e-06, |
| "loss": 0.0016, |
| "step": 485 |
| }, |
| { |
| "epoch": 6.0, |
| "grad_norm": 0.062090787373349644, |
| "learning_rate": 1.2191875191630209e-06, |
| "loss": 0.0017, |
| "step": 486 |
| }, |
| { |
| "epoch": 6.012345679012346, |
| "grad_norm": 0.04437033735955514, |
| "learning_rate": 1.1898780571421554e-06, |
| "loss": 0.0013, |
| "step": 487 |
| }, |
| { |
| "epoch": 6.0246913580246915, |
| "grad_norm": 0.055690581108790035, |
| "learning_rate": 1.1609028978638804e-06, |
| "loss": 0.0015, |
| "step": 488 |
| }, |
| { |
| "epoch": 6.037037037037037, |
| "grad_norm": 0.06322304615189499, |
| "learning_rate": 1.132263140799381e-06, |
| "loss": 0.0016, |
| "step": 489 |
| }, |
| { |
| "epoch": 6.049382716049383, |
| "grad_norm": 0.06141880723579853, |
| "learning_rate": 1.1039598726929046e-06, |
| "loss": 0.0019, |
| "step": 490 |
| }, |
| { |
| "epoch": 6.061728395061729, |
| "grad_norm": 0.0559934719425724, |
| "learning_rate": 1.0759941675205221e-06, |
| "loss": 0.0019, |
| "step": 491 |
| }, |
| { |
| "epoch": 6.074074074074074, |
| "grad_norm": 0.052119641486773, |
| "learning_rate": 1.0483670864493777e-06, |
| "loss": 0.0041, |
| "step": 492 |
| }, |
| { |
| "epoch": 6.08641975308642, |
| "grad_norm": 0.052574867923764465, |
| "learning_rate": 1.0210796777974196e-06, |
| "loss": 0.0016, |
| "step": 493 |
| }, |
| { |
| "epoch": 6.098765432098766, |
| "grad_norm": 0.04551032766348937, |
| "learning_rate": 9.94132976993627e-07, |
| "loss": 0.0015, |
| "step": 494 |
| }, |
| { |
| "epoch": 6.111111111111111, |
| "grad_norm": 0.07300563736192743, |
| "learning_rate": 9.675280065387117e-07, |
| "loss": 0.0026, |
| "step": 495 |
| }, |
| { |
| "epoch": 6.1234567901234565, |
| "grad_norm": 0.0589388985855795, |
| "learning_rate": 9.412657759663279e-07, |
| "loss": 0.002, |
| "step": 496 |
| }, |
| { |
| "epoch": 6.135802469135802, |
| "grad_norm": 0.036832313525668736, |
| "learning_rate": 9.153472818047627e-07, |
| "loss": 0.001, |
| "step": 497 |
| }, |
| { |
| "epoch": 6.148148148148148, |
| "grad_norm": 0.08579156187113851, |
| "learning_rate": 8.897735075391156e-07, |
| "loss": 0.0019, |
| "step": 498 |
| }, |
| { |
| "epoch": 6.160493827160494, |
| "grad_norm": 0.03254562268330724, |
| "learning_rate": 8.645454235739903e-07, |
| "loss": 0.001, |
| "step": 499 |
| }, |
| { |
| "epoch": 6.172839506172839, |
| "grad_norm": 0.06668010013885942, |
| "learning_rate": 8.39663987196665e-07, |
| "loss": 0.0036, |
| "step": 500 |
| }, |
| { |
| "epoch": 6.185185185185185, |
| "grad_norm": 0.05254242210245364, |
| "learning_rate": 8.151301425407699e-07, |
| "loss": 0.0052, |
| "step": 501 |
| }, |
| { |
| "epoch": 6.197530864197531, |
| "grad_norm": 0.06269798774876521, |
| "learning_rate": 7.909448205504633e-07, |
| "loss": 0.0017, |
| "step": 502 |
| }, |
| { |
| "epoch": 6.209876543209877, |
| "grad_norm": 0.05212716006751509, |
| "learning_rate": 7.671089389451059e-07, |
| "loss": 0.0014, |
| "step": 503 |
| }, |
| { |
| "epoch": 6.222222222222222, |
| "grad_norm": 0.06723279411740171, |
| "learning_rate": 7.43623402184438e-07, |
| "loss": 0.0046, |
| "step": 504 |
| }, |
| { |
| "epoch": 6.234567901234568, |
| "grad_norm": 0.053338908065449955, |
| "learning_rate": 7.204891014342552e-07, |
| "loss": 0.0015, |
| "step": 505 |
| }, |
| { |
| "epoch": 6.246913580246914, |
| "grad_norm": 0.06851159604452965, |
| "learning_rate": 6.977069145325987e-07, |
| "loss": 0.0019, |
| "step": 506 |
| }, |
| { |
| "epoch": 6.2592592592592595, |
| "grad_norm": 0.03228477145799883, |
| "learning_rate": 6.752777059564431e-07, |
| "loss": 0.0012, |
| "step": 507 |
| }, |
| { |
| "epoch": 6.271604938271605, |
| "grad_norm": 0.037103749830438106, |
| "learning_rate": 6.532023267888954e-07, |
| "loss": 0.0009, |
| "step": 508 |
| }, |
| { |
| "epoch": 6.283950617283951, |
| "grad_norm": 0.06563663547083265, |
| "learning_rate": 6.314816146868951e-07, |
| "loss": 0.0017, |
| "step": 509 |
| }, |
| { |
| "epoch": 6.296296296296296, |
| "grad_norm": 0.07016302273033481, |
| "learning_rate": 6.101163938494359e-07, |
| "loss": 0.0035, |
| "step": 510 |
| }, |
| { |
| "epoch": 6.308641975308642, |
| "grad_norm": 0.06182335435231687, |
| "learning_rate": 5.891074749862857e-07, |
| "loss": 0.0012, |
| "step": 511 |
| }, |
| { |
| "epoch": 6.320987654320987, |
| "grad_norm": 0.05596013279096978, |
| "learning_rate": 5.684556552872256e-07, |
| "loss": 0.0033, |
| "step": 512 |
| }, |
| { |
| "epoch": 6.333333333333333, |
| "grad_norm": 0.05534288287471249, |
| "learning_rate": 5.481617183918053e-07, |
| "loss": 0.0014, |
| "step": 513 |
| }, |
| { |
| "epoch": 6.345679012345679, |
| "grad_norm": 0.04118593026135544, |
| "learning_rate": 5.282264343595977e-07, |
| "loss": 0.0013, |
| "step": 514 |
| }, |
| { |
| "epoch": 6.3580246913580245, |
| "grad_norm": 0.04719174098780563, |
| "learning_rate": 5.086505596409885e-07, |
| "loss": 0.0012, |
| "step": 515 |
| }, |
| { |
| "epoch": 6.37037037037037, |
| "grad_norm": 0.06236635038173847, |
| "learning_rate": 4.894348370484648e-07, |
| "loss": 0.0019, |
| "step": 516 |
| }, |
| { |
| "epoch": 6.382716049382716, |
| "grad_norm": 0.05738720109912046, |
| "learning_rate": 4.7057999572843516e-07, |
| "loss": 0.0015, |
| "step": 517 |
| }, |
| { |
| "epoch": 6.395061728395062, |
| "grad_norm": 0.06748476210640128, |
| "learning_rate": 4.520867511335569e-07, |
| "loss": 0.0053, |
| "step": 518 |
| }, |
| { |
| "epoch": 6.407407407407407, |
| "grad_norm": 0.05317337463002061, |
| "learning_rate": 4.3395580499559276e-07, |
| "loss": 0.0016, |
| "step": 519 |
| }, |
| { |
| "epoch": 6.419753086419753, |
| "grad_norm": 0.042798403768389934, |
| "learning_rate": 4.161878452987778e-07, |
| "loss": 0.0014, |
| "step": 520 |
| }, |
| { |
| "epoch": 6.432098765432099, |
| "grad_norm": 0.03849912145724003, |
| "learning_rate": 3.9878354625371927e-07, |
| "loss": 0.001, |
| "step": 521 |
| }, |
| { |
| "epoch": 6.444444444444445, |
| "grad_norm": 0.06507943189963111, |
| "learning_rate": 3.817435682718096e-07, |
| "loss": 0.0016, |
| "step": 522 |
| }, |
| { |
| "epoch": 6.45679012345679, |
| "grad_norm": 0.06834437894921451, |
| "learning_rate": 3.650685579401692e-07, |
| "loss": 0.0014, |
| "step": 523 |
| }, |
| { |
| "epoch": 6.469135802469136, |
| "grad_norm": 0.068135936362266, |
| "learning_rate": 3.4875914799710975e-07, |
| "loss": 0.0037, |
| "step": 524 |
| }, |
| { |
| "epoch": 6.481481481481482, |
| "grad_norm": 0.0769225470497784, |
| "learning_rate": 3.328159573081258e-07, |
| "loss": 0.0025, |
| "step": 525 |
| }, |
| { |
| "epoch": 6.493827160493828, |
| "grad_norm": 0.04510279405686483, |
| "learning_rate": 3.1723959084241195e-07, |
| "loss": 0.0013, |
| "step": 526 |
| }, |
| { |
| "epoch": 6.506172839506172, |
| "grad_norm": 0.06253123572490814, |
| "learning_rate": 3.020306396499062e-07, |
| "loss": 0.0018, |
| "step": 527 |
| }, |
| { |
| "epoch": 6.518518518518518, |
| "grad_norm": 0.05607017364278352, |
| "learning_rate": 2.871896808388608e-07, |
| "loss": 0.0015, |
| "step": 528 |
| }, |
| { |
| "epoch": 6.530864197530864, |
| "grad_norm": 0.06565892652904905, |
| "learning_rate": 2.727172775539522e-07, |
| "loss": 0.0015, |
| "step": 529 |
| }, |
| { |
| "epoch": 6.54320987654321, |
| "grad_norm": 0.06779883467508055, |
| "learning_rate": 2.5861397895489914e-07, |
| "loss": 0.0054, |
| "step": 530 |
| }, |
| { |
| "epoch": 6.555555555555555, |
| "grad_norm": 0.0625439341793089, |
| "learning_rate": 2.44880320195634e-07, |
| "loss": 0.0014, |
| "step": 531 |
| }, |
| { |
| "epoch": 6.567901234567901, |
| "grad_norm": 0.046645244461245204, |
| "learning_rate": 2.315168224039932e-07, |
| "loss": 0.0011, |
| "step": 532 |
| }, |
| { |
| "epoch": 6.580246913580247, |
| "grad_norm": 0.05642062644236279, |
| "learning_rate": 2.1852399266194312e-07, |
| "loss": 0.002, |
| "step": 533 |
| }, |
| { |
| "epoch": 6.592592592592593, |
| "grad_norm": 0.053426947443764354, |
| "learning_rate": 2.0590232398634114e-07, |
| "loss": 0.0035, |
| "step": 534 |
| }, |
| { |
| "epoch": 6.604938271604938, |
| "grad_norm": 0.02804527022906818, |
| "learning_rate": 1.9365229531022267e-07, |
| "loss": 0.0009, |
| "step": 535 |
| }, |
| { |
| "epoch": 6.617283950617284, |
| "grad_norm": 0.04988184506603678, |
| "learning_rate": 1.817743714646314e-07, |
| "loss": 0.0012, |
| "step": 536 |
| }, |
| { |
| "epoch": 6.62962962962963, |
| "grad_norm": 0.03572163754383024, |
| "learning_rate": 1.7026900316098217e-07, |
| "loss": 0.0012, |
| "step": 537 |
| }, |
| { |
| "epoch": 6.6419753086419755, |
| "grad_norm": 0.040421555953721704, |
| "learning_rate": 1.5913662697395583e-07, |
| "loss": 0.0011, |
| "step": 538 |
| }, |
| { |
| "epoch": 6.654320987654321, |
| "grad_norm": 0.0854680374793536, |
| "learning_rate": 1.4837766532493469e-07, |
| "loss": 0.0015, |
| "step": 539 |
| }, |
| { |
| "epoch": 6.666666666666667, |
| "grad_norm": 0.0622890200616722, |
| "learning_rate": 1.3799252646597428e-07, |
| "loss": 0.0013, |
| "step": 540 |
| }, |
| { |
| "epoch": 6.679012345679013, |
| "grad_norm": 0.04487325280230035, |
| "learning_rate": 1.2798160446431006e-07, |
| "loss": 0.001, |
| "step": 541 |
| }, |
| { |
| "epoch": 6.6913580246913575, |
| "grad_norm": 0.06827865424951797, |
| "learning_rate": 1.1834527918740624e-07, |
| "loss": 0.0024, |
| "step": 542 |
| }, |
| { |
| "epoch": 6.703703703703704, |
| "grad_norm": 0.05828814401863449, |
| "learning_rate": 1.0908391628854042e-07, |
| "loss": 0.002, |
| "step": 543 |
| }, |
| { |
| "epoch": 6.716049382716049, |
| "grad_norm": 0.04071748268828138, |
| "learning_rate": 1.0019786719293046e-07, |
| "loss": 0.0012, |
| "step": 544 |
| }, |
| { |
| "epoch": 6.728395061728395, |
| "grad_norm": 0.0457370541103018, |
| "learning_rate": 9.168746908439718e-08, |
| "loss": 0.0013, |
| "step": 545 |
| }, |
| { |
| "epoch": 6.7407407407407405, |
| "grad_norm": 0.03663041381389654, |
| "learning_rate": 8.355304489257254e-08, |
| "loss": 0.0011, |
| "step": 546 |
| }, |
| { |
| "epoch": 6.753086419753086, |
| "grad_norm": 0.07030810856213271, |
| "learning_rate": 7.579490328064265e-08, |
| "loss": 0.0013, |
| "step": 547 |
| }, |
| { |
| "epoch": 6.765432098765432, |
| "grad_norm": 0.07476184707674007, |
| "learning_rate": 6.841333863363831e-08, |
| "loss": 0.0013, |
| "step": 548 |
| }, |
| { |
| "epoch": 6.777777777777778, |
| "grad_norm": 0.039877687798977456, |
| "learning_rate": 6.140863104726391e-08, |
| "loss": 0.0008, |
| "step": 549 |
| }, |
| { |
| "epoch": 6.790123456790123, |
| "grad_norm": 0.0692536388183646, |
| "learning_rate": 5.4781046317267103e-08, |
| "loss": 0.0074, |
| "step": 550 |
| }, |
| { |
| "epoch": 6.802469135802469, |
| "grad_norm": 0.05771681097265902, |
| "learning_rate": 4.85308359293557e-08, |
| "loss": 0.0034, |
| "step": 551 |
| }, |
| { |
| "epoch": 6.814814814814815, |
| "grad_norm": 0.07405017087059966, |
| "learning_rate": 4.2658237049655325e-08, |
| "loss": 0.0011, |
| "step": 552 |
| }, |
| { |
| "epoch": 6.827160493827161, |
| "grad_norm": 0.03321213886254247, |
| "learning_rate": 3.716347251570551e-08, |
| "loss": 0.0011, |
| "step": 553 |
| }, |
| { |
| "epoch": 6.839506172839506, |
| "grad_norm": 0.061503207253362814, |
| "learning_rate": 3.204675082800979e-08, |
| "loss": 0.0014, |
| "step": 554 |
| }, |
| { |
| "epoch": 6.851851851851852, |
| "grad_norm": 0.04618914106100694, |
| "learning_rate": 2.7308266142119788e-08, |
| "loss": 0.0012, |
| "step": 555 |
| }, |
| { |
| "epoch": 6.864197530864198, |
| "grad_norm": 0.06693954396383099, |
| "learning_rate": 2.294819826127115e-08, |
| "loss": 0.002, |
| "step": 556 |
| }, |
| { |
| "epoch": 6.8765432098765435, |
| "grad_norm": 0.1037551779412373, |
| "learning_rate": 1.896671262955896e-08, |
| "loss": 0.002, |
| "step": 557 |
| }, |
| { |
| "epoch": 6.888888888888889, |
| "grad_norm": 0.049967199152869485, |
| "learning_rate": 1.5363960325660565e-08, |
| "loss": 0.0023, |
| "step": 558 |
| }, |
| { |
| "epoch": 6.901234567901234, |
| "grad_norm": 0.0840544069774677, |
| "learning_rate": 1.2140078057101269e-08, |
| "loss": 0.0016, |
| "step": 559 |
| }, |
| { |
| "epoch": 6.91358024691358, |
| "grad_norm": 0.06330250298740656, |
| "learning_rate": 9.29518815506847e-09, |
| "loss": 0.0021, |
| "step": 560 |
| }, |
| { |
| "epoch": 6.925925925925926, |
| "grad_norm": 0.05729095992108939, |
| "learning_rate": 6.82939856977094e-09, |
| "loss": 0.0021, |
| "step": 561 |
| }, |
| { |
| "epoch": 6.938271604938271, |
| "grad_norm": 0.053351401675753425, |
| "learning_rate": 4.74280286634099e-09, |
| "loss": 0.0014, |
| "step": 562 |
| }, |
| { |
| "epoch": 6.950617283950617, |
| "grad_norm": 0.08262341994499242, |
| "learning_rate": 3.0354802212839705e-09, |
| "loss": 0.0044, |
| "step": 563 |
| }, |
| { |
| "epoch": 6.962962962962963, |
| "grad_norm": 0.058488381808320514, |
| "learning_rate": 1.7074954194729044e-09, |
| "loss": 0.0012, |
| "step": 564 |
| }, |
| { |
| "epoch": 6.9753086419753085, |
| "grad_norm": 0.08520368149220187, |
| "learning_rate": 7.588988516937789e-10, |
| "loss": 0.0021, |
| "step": 565 |
| }, |
| { |
| "epoch": 6.987654320987654, |
| "grad_norm": 0.05967553760304721, |
| "learning_rate": 1.897265127315251e-10, |
| "loss": 0.0029, |
| "step": 566 |
| }, |
| { |
| "epoch": 7.0, |
| "grad_norm": 0.06556025137721225, |
| "learning_rate": 0.0, |
| "loss": 0.0019, |
| "step": 567 |
| }, |
| { |
| "epoch": 7.0, |
| "step": 567, |
| "total_flos": 1.448974662653444e+18, |
| "train_loss": 0.046748905014567, |
| "train_runtime": 15715.0912, |
| "train_samples_per_second": 3.463, |
| "train_steps_per_second": 0.036 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 567, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 7, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.448974662653444e+18, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|