| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 312, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.009615384615384616, | |
| "grad_norm": 7.120902061462402, | |
| "learning_rate": 3.125e-07, | |
| "loss": 1.0619, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.019230769230769232, | |
| "grad_norm": 7.164572238922119, | |
| "learning_rate": 6.25e-07, | |
| "loss": 1.0454, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.028846153846153848, | |
| "grad_norm": 7.609449863433838, | |
| "learning_rate": 9.375000000000001e-07, | |
| "loss": 1.0873, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.038461538461538464, | |
| "grad_norm": 6.978214740753174, | |
| "learning_rate": 1.25e-06, | |
| "loss": 1.054, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.04807692307692308, | |
| "grad_norm": 6.851968765258789, | |
| "learning_rate": 1.5625e-06, | |
| "loss": 1.0342, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.057692307692307696, | |
| "grad_norm": 6.298832416534424, | |
| "learning_rate": 1.8750000000000003e-06, | |
| "loss": 1.0297, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.0673076923076923, | |
| "grad_norm": 5.126293659210205, | |
| "learning_rate": 2.1875000000000002e-06, | |
| "loss": 0.9876, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.07692307692307693, | |
| "grad_norm": 4.865529537200928, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.994, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.08653846153846154, | |
| "grad_norm": 3.1541359424591064, | |
| "learning_rate": 2.8125e-06, | |
| "loss": 0.934, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.09615384615384616, | |
| "grad_norm": 3.0584819316864014, | |
| "learning_rate": 3.125e-06, | |
| "loss": 0.9542, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.10576923076923077, | |
| "grad_norm": 2.6357908248901367, | |
| "learning_rate": 3.4375e-06, | |
| "loss": 0.9282, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.11538461538461539, | |
| "grad_norm": 3.6711061000823975, | |
| "learning_rate": 3.7500000000000005e-06, | |
| "loss": 0.8984, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.125, | |
| "grad_norm": 4.1532087326049805, | |
| "learning_rate": 4.0625000000000005e-06, | |
| "loss": 0.9081, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.1346153846153846, | |
| "grad_norm": 3.8308780193328857, | |
| "learning_rate": 4.3750000000000005e-06, | |
| "loss": 0.8815, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.14423076923076922, | |
| "grad_norm": 3.487043619155884, | |
| "learning_rate": 4.6875000000000004e-06, | |
| "loss": 0.8703, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.15384615384615385, | |
| "grad_norm": 2.434431314468384, | |
| "learning_rate": 5e-06, | |
| "loss": 0.8433, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.16346153846153846, | |
| "grad_norm": 2.0722246170043945, | |
| "learning_rate": 5.3125e-06, | |
| "loss": 0.8102, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.17307692307692307, | |
| "grad_norm": 1.800662636756897, | |
| "learning_rate": 5.625e-06, | |
| "loss": 0.799, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.18269230769230768, | |
| "grad_norm": 1.525373935699463, | |
| "learning_rate": 5.9375e-06, | |
| "loss": 0.7913, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.19230769230769232, | |
| "grad_norm": 1.42585289478302, | |
| "learning_rate": 6.25e-06, | |
| "loss": 0.7863, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.20192307692307693, | |
| "grad_norm": 1.2194604873657227, | |
| "learning_rate": 6.5625e-06, | |
| "loss": 0.7495, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.21153846153846154, | |
| "grad_norm": 1.333568811416626, | |
| "learning_rate": 6.875e-06, | |
| "loss": 0.7596, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.22115384615384615, | |
| "grad_norm": 1.3491787910461426, | |
| "learning_rate": 7.1875e-06, | |
| "loss": 0.7542, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.23076923076923078, | |
| "grad_norm": 1.1451213359832764, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.7316, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.2403846153846154, | |
| "grad_norm": 0.946185827255249, | |
| "learning_rate": 7.8125e-06, | |
| "loss": 0.7273, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.0927560329437256, | |
| "learning_rate": 8.125000000000001e-06, | |
| "loss": 0.7273, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.25961538461538464, | |
| "grad_norm": 1.0100075006484985, | |
| "learning_rate": 8.4375e-06, | |
| "loss": 0.713, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.2692307692307692, | |
| "grad_norm": 0.974131166934967, | |
| "learning_rate": 8.750000000000001e-06, | |
| "loss": 0.7092, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.27884615384615385, | |
| "grad_norm": 0.8509518504142761, | |
| "learning_rate": 9.0625e-06, | |
| "loss": 0.6919, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.28846153846153844, | |
| "grad_norm": 1.0555082559585571, | |
| "learning_rate": 9.375000000000001e-06, | |
| "loss": 0.7106, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.2980769230769231, | |
| "grad_norm": 1.0715065002441406, | |
| "learning_rate": 9.6875e-06, | |
| "loss": 0.6971, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.3076923076923077, | |
| "grad_norm": 0.8515343070030212, | |
| "learning_rate": 1e-05, | |
| "loss": 0.6849, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.3173076923076923, | |
| "grad_norm": 0.8755481839179993, | |
| "learning_rate": 9.999685283773504e-06, | |
| "loss": 0.6753, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.3269230769230769, | |
| "grad_norm": 1.0232303142547607, | |
| "learning_rate": 9.998741174712534e-06, | |
| "loss": 0.6717, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.33653846153846156, | |
| "grad_norm": 0.8989684581756592, | |
| "learning_rate": 9.997167791667668e-06, | |
| "loss": 0.6735, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.34615384615384615, | |
| "grad_norm": 1.0147842168807983, | |
| "learning_rate": 9.994965332706574e-06, | |
| "loss": 0.6842, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.3557692307692308, | |
| "grad_norm": 0.8389028310775757, | |
| "learning_rate": 9.992134075089085e-06, | |
| "loss": 0.6787, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.36538461538461536, | |
| "grad_norm": 0.7984700202941895, | |
| "learning_rate": 9.98867437523228e-06, | |
| "loss": 0.6834, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.375, | |
| "grad_norm": 1.041527509689331, | |
| "learning_rate": 9.984586668665641e-06, | |
| "loss": 0.6566, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.38461538461538464, | |
| "grad_norm": 0.7439405918121338, | |
| "learning_rate": 9.979871469976197e-06, | |
| "loss": 0.6797, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.3942307692307692, | |
| "grad_norm": 0.743121325969696, | |
| "learning_rate": 9.974529372743762e-06, | |
| "loss": 0.6739, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.40384615384615385, | |
| "grad_norm": 0.7846362590789795, | |
| "learning_rate": 9.968561049466214e-06, | |
| "loss": 0.6335, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.41346153846153844, | |
| "grad_norm": 0.7163788676261902, | |
| "learning_rate": 9.961967251474823e-06, | |
| "loss": 0.6466, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.4230769230769231, | |
| "grad_norm": 0.6745060086250305, | |
| "learning_rate": 9.954748808839675e-06, | |
| "loss": 0.6509, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.4326923076923077, | |
| "grad_norm": 0.6187793612480164, | |
| "learning_rate": 9.946906630265184e-06, | |
| "loss": 0.6494, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.4423076923076923, | |
| "grad_norm": 0.6987949013710022, | |
| "learning_rate": 9.938441702975689e-06, | |
| "loss": 0.6479, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.4519230769230769, | |
| "grad_norm": 0.7417098879814148, | |
| "learning_rate": 9.92935509259118e-06, | |
| "loss": 0.6611, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.46153846153846156, | |
| "grad_norm": 0.7351352572441101, | |
| "learning_rate": 9.91964794299315e-06, | |
| "loss": 0.654, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.47115384615384615, | |
| "grad_norm": 0.6541791558265686, | |
| "learning_rate": 9.909321476180594e-06, | |
| "loss": 0.6473, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.4807692307692308, | |
| "grad_norm": 0.8029395937919617, | |
| "learning_rate": 9.898376992116179e-06, | |
| "loss": 0.649, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.49038461538461536, | |
| "grad_norm": 0.7245095372200012, | |
| "learning_rate": 9.886815868562596e-06, | |
| "loss": 0.6413, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.7668218612670898, | |
| "learning_rate": 9.874639560909118e-06, | |
| "loss": 0.6328, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.5096153846153846, | |
| "grad_norm": 0.9320131540298462, | |
| "learning_rate": 9.861849601988384e-06, | |
| "loss": 0.6433, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.5192307692307693, | |
| "grad_norm": 0.6532019376754761, | |
| "learning_rate": 9.848447601883436e-06, | |
| "loss": 0.6201, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.5288461538461539, | |
| "grad_norm": 0.8737635612487793, | |
| "learning_rate": 9.834435247725032e-06, | |
| "loss": 0.6382, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.5384615384615384, | |
| "grad_norm": 0.7610065937042236, | |
| "learning_rate": 9.819814303479268e-06, | |
| "loss": 0.627, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.5480769230769231, | |
| "grad_norm": 0.606206476688385, | |
| "learning_rate": 9.804586609725499e-06, | |
| "loss": 0.6139, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.5576923076923077, | |
| "grad_norm": 0.7713996171951294, | |
| "learning_rate": 9.788754083424654e-06, | |
| "loss": 0.6335, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.5673076923076923, | |
| "grad_norm": 0.7641143202781677, | |
| "learning_rate": 9.772318717677905e-06, | |
| "loss": 0.6308, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.5769230769230769, | |
| "grad_norm": 0.687099814414978, | |
| "learning_rate": 9.755282581475769e-06, | |
| "loss": 0.6124, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.5865384615384616, | |
| "grad_norm": 0.8211157321929932, | |
| "learning_rate": 9.737647819437645e-06, | |
| "loss": 0.641, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.5961538461538461, | |
| "grad_norm": 0.6129670739173889, | |
| "learning_rate": 9.719416651541839e-06, | |
| "loss": 0.6286, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.6057692307692307, | |
| "grad_norm": 0.7806887626647949, | |
| "learning_rate": 9.700591372846096e-06, | |
| "loss": 0.6267, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.6153846153846154, | |
| "grad_norm": 0.7048196792602539, | |
| "learning_rate": 9.681174353198687e-06, | |
| "loss": 0.6215, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.625, | |
| "grad_norm": 0.6329473853111267, | |
| "learning_rate": 9.661168036940071e-06, | |
| "loss": 0.6195, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.6346153846153846, | |
| "grad_norm": 0.6297646760940552, | |
| "learning_rate": 9.640574942595195e-06, | |
| "loss": 0.618, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.6442307692307693, | |
| "grad_norm": 0.5605977773666382, | |
| "learning_rate": 9.619397662556434e-06, | |
| "loss": 0.6229, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.6538461538461539, | |
| "grad_norm": 0.6335819959640503, | |
| "learning_rate": 9.597638862757255e-06, | |
| "loss": 0.6329, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.6634615384615384, | |
| "grad_norm": 0.6819554567337036, | |
| "learning_rate": 9.5753012823366e-06, | |
| "loss": 0.6304, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.6730769230769231, | |
| "grad_norm": 0.6619697213172913, | |
| "learning_rate": 9.552387733294081e-06, | |
| "loss": 0.6304, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.6826923076923077, | |
| "grad_norm": 0.5414342284202576, | |
| "learning_rate": 9.528901100135971e-06, | |
| "loss": 0.6207, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.6923076923076923, | |
| "grad_norm": 0.6086860299110413, | |
| "learning_rate": 9.504844339512096e-06, | |
| "loss": 0.6081, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.7019230769230769, | |
| "grad_norm": 0.6108078956604004, | |
| "learning_rate": 9.480220479843627e-06, | |
| "loss": 0.615, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.7115384615384616, | |
| "grad_norm": 0.5954450368881226, | |
| "learning_rate": 9.45503262094184e-06, | |
| "loss": 0.6149, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.7211538461538461, | |
| "grad_norm": 0.7406807541847229, | |
| "learning_rate": 9.4292839336179e-06, | |
| "loss": 0.6255, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.7307692307692307, | |
| "grad_norm": 0.6158877015113831, | |
| "learning_rate": 9.40297765928369e-06, | |
| "loss": 0.6112, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.7403846153846154, | |
| "grad_norm": 0.6299143433570862, | |
| "learning_rate": 9.376117109543769e-06, | |
| "loss": 0.6422, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.5767729878425598, | |
| "learning_rate": 9.348705665778479e-06, | |
| "loss": 0.62, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.7596153846153846, | |
| "grad_norm": 0.5848022699356079, | |
| "learning_rate": 9.320746778718274e-06, | |
| "loss": 0.6203, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.7692307692307693, | |
| "grad_norm": 0.6434866786003113, | |
| "learning_rate": 9.292243968009332e-06, | |
| "loss": 0.6171, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.7788461538461539, | |
| "grad_norm": 0.6166576147079468, | |
| "learning_rate": 9.263200821770462e-06, | |
| "loss": 0.6134, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.7884615384615384, | |
| "grad_norm": 0.6476818919181824, | |
| "learning_rate": 9.233620996141421e-06, | |
| "loss": 0.6129, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.7980769230769231, | |
| "grad_norm": 0.5329578518867493, | |
| "learning_rate": 9.203508214822652e-06, | |
| "loss": 0.618, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.8076923076923077, | |
| "grad_norm": 0.6584686636924744, | |
| "learning_rate": 9.172866268606514e-06, | |
| "loss": 0.6082, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.8173076923076923, | |
| "grad_norm": 0.5995557308197021, | |
| "learning_rate": 9.141699014900084e-06, | |
| "loss": 0.5974, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.8269230769230769, | |
| "grad_norm": 0.6057348847389221, | |
| "learning_rate": 9.110010377239552e-06, | |
| "loss": 0.6157, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.8365384615384616, | |
| "grad_norm": 0.5697214007377625, | |
| "learning_rate": 9.077804344796302e-06, | |
| "loss": 0.6301, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.8461538461538461, | |
| "grad_norm": 0.6759421229362488, | |
| "learning_rate": 9.045084971874738e-06, | |
| "loss": 0.6226, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.8557692307692307, | |
| "grad_norm": 0.6390875577926636, | |
| "learning_rate": 9.011856377401891e-06, | |
| "loss": 0.6, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.8653846153846154, | |
| "grad_norm": 0.6852766871452332, | |
| "learning_rate": 8.978122744408905e-06, | |
| "loss": 0.6245, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.875, | |
| "grad_norm": 0.7117474675178528, | |
| "learning_rate": 8.943888319504456e-06, | |
| "loss": 0.6095, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.8846153846153846, | |
| "grad_norm": 0.5599138140678406, | |
| "learning_rate": 8.90915741234015e-06, | |
| "loss": 0.6093, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.8942307692307693, | |
| "grad_norm": 0.7238919138908386, | |
| "learning_rate": 8.873934395068006e-06, | |
| "loss": 0.6067, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.9038461538461539, | |
| "grad_norm": 0.6798980236053467, | |
| "learning_rate": 8.838223701790057e-06, | |
| "loss": 0.6219, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.9134615384615384, | |
| "grad_norm": 0.6069248914718628, | |
| "learning_rate": 8.802029828000157e-06, | |
| "loss": 0.5999, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.9230769230769231, | |
| "grad_norm": 0.6400412917137146, | |
| "learning_rate": 8.765357330018056e-06, | |
| "loss": 0.6132, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.9326923076923077, | |
| "grad_norm": 0.681963324546814, | |
| "learning_rate": 8.728210824415829e-06, | |
| "loss": 0.6014, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.9423076923076923, | |
| "grad_norm": 0.692444384098053, | |
| "learning_rate": 8.690594987436705e-06, | |
| "loss": 0.6062, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.9519230769230769, | |
| "grad_norm": 0.7312707901000977, | |
| "learning_rate": 8.652514554406388e-06, | |
| "loss": 0.6101, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.9615384615384616, | |
| "grad_norm": 0.6820095181465149, | |
| "learning_rate": 8.613974319136959e-06, | |
| "loss": 0.6134, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.9711538461538461, | |
| "grad_norm": 0.7500239014625549, | |
| "learning_rate": 8.574979133323378e-06, | |
| "loss": 0.599, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.9807692307692307, | |
| "grad_norm": 0.6154950261116028, | |
| "learning_rate": 8.535533905932739e-06, | |
| "loss": 0.6111, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.9903846153846154, | |
| "grad_norm": 0.6534649133682251, | |
| "learning_rate": 8.495643602586287e-06, | |
| "loss": 0.5939, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.6529111862182617, | |
| "learning_rate": 8.455313244934324e-06, | |
| "loss": 0.6095, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.0096153846153846, | |
| "grad_norm": 0.7005321383476257, | |
| "learning_rate": 8.414547910024035e-06, | |
| "loss": 0.5796, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.0192307692307692, | |
| "grad_norm": 0.5535159111022949, | |
| "learning_rate": 8.373352729660373e-06, | |
| "loss": 0.5544, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.0288461538461537, | |
| "grad_norm": 0.6195758581161499, | |
| "learning_rate": 8.331732889760021e-06, | |
| "loss": 0.5746, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.0384615384615385, | |
| "grad_norm": 0.5932073593139648, | |
| "learning_rate": 8.289693629698564e-06, | |
| "loss": 0.552, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.0480769230769231, | |
| "grad_norm": 0.6266739964485168, | |
| "learning_rate": 8.247240241650918e-06, | |
| "loss": 0.5587, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.0576923076923077, | |
| "grad_norm": 0.6072287559509277, | |
| "learning_rate": 8.204378069925121e-06, | |
| "loss": 0.5603, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.0673076923076923, | |
| "grad_norm": 0.5796938538551331, | |
| "learning_rate": 8.16111251028955e-06, | |
| "loss": 0.5683, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.0769230769230769, | |
| "grad_norm": 0.5911115407943726, | |
| "learning_rate": 8.117449009293668e-06, | |
| "loss": 0.5687, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.0865384615384615, | |
| "grad_norm": 0.5715712308883667, | |
| "learning_rate": 8.073393063582386e-06, | |
| "loss": 0.5493, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.0961538461538463, | |
| "grad_norm": 0.5958821773529053, | |
| "learning_rate": 8.0289502192041e-06, | |
| "loss": 0.5628, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.1057692307692308, | |
| "grad_norm": 0.5040147304534912, | |
| "learning_rate": 7.984126070912519e-06, | |
| "loss": 0.5859, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.1153846153846154, | |
| "grad_norm": 0.6042008996009827, | |
| "learning_rate": 7.938926261462366e-06, | |
| "loss": 0.55, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.125, | |
| "grad_norm": 0.562238872051239, | |
| "learning_rate": 7.89335648089903e-06, | |
| "loss": 0.5868, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.1346153846153846, | |
| "grad_norm": 0.5125684142112732, | |
| "learning_rate": 7.84742246584226e-06, | |
| "loss": 0.5477, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.1442307692307692, | |
| "grad_norm": 0.5927157998085022, | |
| "learning_rate": 7.801129998764014e-06, | |
| "loss": 0.5579, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.1538461538461537, | |
| "grad_norm": 0.6060910224914551, | |
| "learning_rate": 7.754484907260513e-06, | |
| "loss": 0.5689, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.1634615384615385, | |
| "grad_norm": 0.5392307043075562, | |
| "learning_rate": 7.70749306331863e-06, | |
| "loss": 0.5714, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.1730769230769231, | |
| "grad_norm": 0.5618191957473755, | |
| "learning_rate": 7.660160382576683e-06, | |
| "loss": 0.5655, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.1826923076923077, | |
| "grad_norm": 0.4931347370147705, | |
| "learning_rate": 7.612492823579744e-06, | |
| "loss": 0.566, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.1923076923076923, | |
| "grad_norm": 0.576999843120575, | |
| "learning_rate": 7.564496387029532e-06, | |
| "loss": 0.5569, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.2019230769230769, | |
| "grad_norm": 0.5783012509346008, | |
| "learning_rate": 7.516177115029002e-06, | |
| "loss": 0.5745, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.2115384615384615, | |
| "grad_norm": 0.49014097452163696, | |
| "learning_rate": 7.467541090321735e-06, | |
| "loss": 0.5762, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.2211538461538463, | |
| "grad_norm": 0.572468101978302, | |
| "learning_rate": 7.4185944355261996e-06, | |
| "loss": 0.5908, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.2307692307692308, | |
| "grad_norm": 0.5665020942687988, | |
| "learning_rate": 7.369343312364994e-06, | |
| "loss": 0.5605, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.2403846153846154, | |
| "grad_norm": 0.505368709564209, | |
| "learning_rate": 7.319793920889171e-06, | |
| "loss": 0.5707, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 0.5062214136123657, | |
| "learning_rate": 7.269952498697734e-06, | |
| "loss": 0.5698, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.2596153846153846, | |
| "grad_norm": 0.5319931507110596, | |
| "learning_rate": 7.219825320152411e-06, | |
| "loss": 0.5591, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.2692307692307692, | |
| "grad_norm": 0.5520954132080078, | |
| "learning_rate": 7.169418695587791e-06, | |
| "loss": 0.5639, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.2788461538461537, | |
| "grad_norm": 0.5157988667488098, | |
| "learning_rate": 7.118738970516944e-06, | |
| "loss": 0.5547, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.2884615384615383, | |
| "grad_norm": 0.5376039743423462, | |
| "learning_rate": 7.067792524832604e-06, | |
| "loss": 0.5705, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.2980769230769231, | |
| "grad_norm": 0.5486722588539124, | |
| "learning_rate": 7.016585772004026e-06, | |
| "loss": 0.5538, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.3076923076923077, | |
| "grad_norm": 0.549052357673645, | |
| "learning_rate": 6.965125158269619e-06, | |
| "loss": 0.5609, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.3173076923076923, | |
| "grad_norm": 0.5674235820770264, | |
| "learning_rate": 6.913417161825449e-06, | |
| "loss": 0.5609, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.3269230769230769, | |
| "grad_norm": 0.5145401954650879, | |
| "learning_rate": 6.8614682920097265e-06, | |
| "loss": 0.5614, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.3365384615384617, | |
| "grad_norm": 0.49938759207725525, | |
| "learning_rate": 6.809285088483361e-06, | |
| "loss": 0.5687, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.3461538461538463, | |
| "grad_norm": 0.5386798977851868, | |
| "learning_rate": 6.7568741204067145e-06, | |
| "loss": 0.5595, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.3557692307692308, | |
| "grad_norm": 0.5430698990821838, | |
| "learning_rate": 6.704241985612625e-06, | |
| "loss": 0.5651, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.3653846153846154, | |
| "grad_norm": 0.5340512990951538, | |
| "learning_rate": 6.651395309775837e-06, | |
| "loss": 0.5651, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.375, | |
| "grad_norm": 0.540773332118988, | |
| "learning_rate": 6.598340745578908e-06, | |
| "loss": 0.5513, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.3846153846153846, | |
| "grad_norm": 0.5508561134338379, | |
| "learning_rate": 6.545084971874738e-06, | |
| "loss": 0.5445, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.3942307692307692, | |
| "grad_norm": 0.5029264092445374, | |
| "learning_rate": 6.491634692845781e-06, | |
| "loss": 0.5613, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.4038461538461537, | |
| "grad_norm": 0.5184585452079773, | |
| "learning_rate": 6.437996637160086e-06, | |
| "loss": 0.5872, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.4134615384615383, | |
| "grad_norm": 0.4843497574329376, | |
| "learning_rate": 6.384177557124247e-06, | |
| "loss": 0.5705, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.4230769230769231, | |
| "grad_norm": 0.5138295888900757, | |
| "learning_rate": 6.330184227833376e-06, | |
| "loss": 0.5652, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.4326923076923077, | |
| "grad_norm": 0.46365150809288025, | |
| "learning_rate": 6.276023446318214e-06, | |
| "loss": 0.5534, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.4423076923076923, | |
| "grad_norm": 0.5488097667694092, | |
| "learning_rate": 6.2217020306894705e-06, | |
| "loss": 0.5449, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.4519230769230769, | |
| "grad_norm": 0.5190027952194214, | |
| "learning_rate": 6.1672268192795285e-06, | |
| "loss": 0.5382, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.4615384615384617, | |
| "grad_norm": 0.5642440915107727, | |
| "learning_rate": 6.112604669781572e-06, | |
| "loss": 0.5447, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.4711538461538463, | |
| "grad_norm": 0.5329025983810425, | |
| "learning_rate": 6.057842458386315e-06, | |
| "loss": 0.5586, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.4807692307692308, | |
| "grad_norm": 0.45776233077049255, | |
| "learning_rate": 6.002947078916365e-06, | |
| "loss": 0.5621, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.4903846153846154, | |
| "grad_norm": 0.5593656897544861, | |
| "learning_rate": 5.947925441958393e-06, | |
| "loss": 0.5517, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 0.49384182691574097, | |
| "learning_rate": 5.892784473993184e-06, | |
| "loss": 0.5713, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.5096153846153846, | |
| "grad_norm": 0.4902636706829071, | |
| "learning_rate": 5.837531116523683e-06, | |
| "loss": 0.5507, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.5192307692307692, | |
| "grad_norm": 0.46203383803367615, | |
| "learning_rate": 5.782172325201155e-06, | |
| "loss": 0.5472, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.5288461538461537, | |
| "grad_norm": 0.5147026181221008, | |
| "learning_rate": 5.726715068949564e-06, | |
| "loss": 0.5566, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.5384615384615383, | |
| "grad_norm": 0.5166094303131104, | |
| "learning_rate": 5.671166329088278e-06, | |
| "loss": 0.5324, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.5480769230769231, | |
| "grad_norm": 0.552338182926178, | |
| "learning_rate": 5.615533098453215e-06, | |
| "loss": 0.5681, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.5576923076923077, | |
| "grad_norm": 0.4641529619693756, | |
| "learning_rate": 5.559822380516539e-06, | |
| "loss": 0.5689, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.5673076923076923, | |
| "grad_norm": 0.4713779389858246, | |
| "learning_rate": 5.504041188505022e-06, | |
| "loss": 0.559, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.5769230769230769, | |
| "grad_norm": 0.5102840662002563, | |
| "learning_rate": 5.448196544517168e-06, | |
| "loss": 0.5607, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.5865384615384617, | |
| "grad_norm": 0.4957525432109833, | |
| "learning_rate": 5.392295478639226e-06, | |
| "loss": 0.558, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.5961538461538463, | |
| "grad_norm": 0.497121125459671, | |
| "learning_rate": 5.336345028060199e-06, | |
| "loss": 0.5576, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.6057692307692308, | |
| "grad_norm": 0.48833945393562317, | |
| "learning_rate": 5.2803522361859596e-06, | |
| "loss": 0.536, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.6153846153846154, | |
| "grad_norm": 0.5146234631538391, | |
| "learning_rate": 5.224324151752575e-06, | |
| "loss": 0.5529, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.625, | |
| "grad_norm": 0.47272878885269165, | |
| "learning_rate": 5.168267827938971e-06, | |
| "loss": 0.5571, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.6346153846153846, | |
| "grad_norm": 0.4539564251899719, | |
| "learning_rate": 5.112190321479026e-06, | |
| "loss": 0.547, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.6442307692307692, | |
| "grad_norm": 0.4633065164089203, | |
| "learning_rate": 5.05609869177323e-06, | |
| "loss": 0.5339, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.6538461538461537, | |
| "grad_norm": 0.5187256932258606, | |
| "learning_rate": 5e-06, | |
| "loss": 0.5534, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.6634615384615383, | |
| "grad_norm": 0.46274110674858093, | |
| "learning_rate": 4.943901308226771e-06, | |
| "loss": 0.5417, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.6730769230769231, | |
| "grad_norm": 0.5410740375518799, | |
| "learning_rate": 4.887809678520976e-06, | |
| "loss": 0.5713, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.6826923076923077, | |
| "grad_norm": 0.43668749928474426, | |
| "learning_rate": 4.831732172061032e-06, | |
| "loss": 0.5618, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.6923076923076923, | |
| "grad_norm": 0.4682793617248535, | |
| "learning_rate": 4.775675848247427e-06, | |
| "loss": 0.5711, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.7019230769230769, | |
| "grad_norm": 0.5237061381340027, | |
| "learning_rate": 4.719647763814041e-06, | |
| "loss": 0.5585, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.7115384615384617, | |
| "grad_norm": 0.49530908465385437, | |
| "learning_rate": 4.663654971939802e-06, | |
| "loss": 0.5547, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.7211538461538463, | |
| "grad_norm": 0.47508153319358826, | |
| "learning_rate": 4.6077045213607765e-06, | |
| "loss": 0.5493, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.7307692307692308, | |
| "grad_norm": 0.5441533923149109, | |
| "learning_rate": 4.551803455482833e-06, | |
| "loss": 0.5551, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.7403846153846154, | |
| "grad_norm": 0.4742332994937897, | |
| "learning_rate": 4.4959588114949785e-06, | |
| "loss": 0.5573, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 0.44225630164146423, | |
| "learning_rate": 4.4401776194834615e-06, | |
| "loss": 0.553, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.7596153846153846, | |
| "grad_norm": 0.4576217234134674, | |
| "learning_rate": 4.384466901546786e-06, | |
| "loss": 0.5784, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.7692307692307692, | |
| "grad_norm": 0.4958183467388153, | |
| "learning_rate": 4.3288336709117246e-06, | |
| "loss": 0.5558, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.7788461538461537, | |
| "grad_norm": 0.48444077372550964, | |
| "learning_rate": 4.273284931050438e-06, | |
| "loss": 0.5512, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.7884615384615383, | |
| "grad_norm": 0.4511057138442993, | |
| "learning_rate": 4.217827674798845e-06, | |
| "loss": 0.5564, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.7980769230769231, | |
| "grad_norm": 0.42453262209892273, | |
| "learning_rate": 4.162468883476319e-06, | |
| "loss": 0.5574, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.8076923076923077, | |
| "grad_norm": 0.43282291293144226, | |
| "learning_rate": 4.107215526006818e-06, | |
| "loss": 0.5426, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.8173076923076923, | |
| "grad_norm": 0.5064551830291748, | |
| "learning_rate": 4.052074558041608e-06, | |
| "loss": 0.5633, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.8269230769230769, | |
| "grad_norm": 0.4565158188343048, | |
| "learning_rate": 3.997052921083637e-06, | |
| "loss": 0.5825, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.8365384615384617, | |
| "grad_norm": 0.4503186047077179, | |
| "learning_rate": 3.9421575416136866e-06, | |
| "loss": 0.5554, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.8461538461538463, | |
| "grad_norm": 0.4373958110809326, | |
| "learning_rate": 3.887395330218429e-06, | |
| "loss": 0.5454, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.8557692307692308, | |
| "grad_norm": 0.46611514687538147, | |
| "learning_rate": 3.832773180720475e-06, | |
| "loss": 0.5568, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.8653846153846154, | |
| "grad_norm": 0.42608413100242615, | |
| "learning_rate": 3.778297969310529e-06, | |
| "loss": 0.5574, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.875, | |
| "grad_norm": 0.4223165214061737, | |
| "learning_rate": 3.723976553681787e-06, | |
| "loss": 0.5534, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.8846153846153846, | |
| "grad_norm": 0.4573526084423065, | |
| "learning_rate": 3.669815772166625e-06, | |
| "loss": 0.5635, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.8942307692307692, | |
| "grad_norm": 0.4243670701980591, | |
| "learning_rate": 3.6158224428757538e-06, | |
| "loss": 0.5561, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.9038461538461537, | |
| "grad_norm": 0.41425010561943054, | |
| "learning_rate": 3.562003362839914e-06, | |
| "loss": 0.5563, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.9134615384615383, | |
| "grad_norm": 0.4524295926094055, | |
| "learning_rate": 3.50836530715422e-06, | |
| "loss": 0.5522, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.9230769230769231, | |
| "grad_norm": 0.48653334379196167, | |
| "learning_rate": 3.4549150281252635e-06, | |
| "loss": 0.5593, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.9326923076923077, | |
| "grad_norm": 0.4305264949798584, | |
| "learning_rate": 3.4016592544210937e-06, | |
| "loss": 0.5606, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.9423076923076923, | |
| "grad_norm": 0.4900025725364685, | |
| "learning_rate": 3.3486046902241663e-06, | |
| "loss": 0.5724, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.9519230769230769, | |
| "grad_norm": 0.573262095451355, | |
| "learning_rate": 3.295758014387375e-06, | |
| "loss": 0.5296, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.9615384615384617, | |
| "grad_norm": 0.46434977650642395, | |
| "learning_rate": 3.2431258795932863e-06, | |
| "loss": 0.5358, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.9711538461538463, | |
| "grad_norm": 0.4425696134567261, | |
| "learning_rate": 3.1907149115166403e-06, | |
| "loss": 0.5427, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.9807692307692308, | |
| "grad_norm": 0.49864649772644043, | |
| "learning_rate": 3.1385317079902743e-06, | |
| "loss": 0.5494, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.9903846153846154, | |
| "grad_norm": 0.4962822496891022, | |
| "learning_rate": 3.0865828381745515e-06, | |
| "loss": 0.557, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.49204355478286743, | |
| "learning_rate": 3.0348748417303826e-06, | |
| "loss": 0.5498, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 2.0096153846153846, | |
| "grad_norm": 0.4751799404621124, | |
| "learning_rate": 2.9834142279959754e-06, | |
| "loss": 0.5271, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 2.019230769230769, | |
| "grad_norm": 0.43008437752723694, | |
| "learning_rate": 2.932207475167398e-06, | |
| "loss": 0.5306, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 2.0288461538461537, | |
| "grad_norm": 0.48447534441947937, | |
| "learning_rate": 2.8812610294830568e-06, | |
| "loss": 0.5352, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 2.0384615384615383, | |
| "grad_norm": 0.4532342851161957, | |
| "learning_rate": 2.83058130441221e-06, | |
| "loss": 0.5146, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 2.048076923076923, | |
| "grad_norm": 0.446346253156662, | |
| "learning_rate": 2.7801746798475905e-06, | |
| "loss": 0.5203, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 2.0576923076923075, | |
| "grad_norm": 0.4176790714263916, | |
| "learning_rate": 2.7300475013022666e-06, | |
| "loss": 0.519, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 2.0673076923076925, | |
| "grad_norm": 0.42237284779548645, | |
| "learning_rate": 2.6802060791108304e-06, | |
| "loss": 0.5243, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 2.076923076923077, | |
| "grad_norm": 0.43146151304244995, | |
| "learning_rate": 2.6306566876350072e-06, | |
| "loss": 0.5097, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 2.0865384615384617, | |
| "grad_norm": 0.44543007016181946, | |
| "learning_rate": 2.5814055644738013e-06, | |
| "loss": 0.5133, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 2.0961538461538463, | |
| "grad_norm": 0.4214424192905426, | |
| "learning_rate": 2.532458909678266e-06, | |
| "loss": 0.5249, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 2.105769230769231, | |
| "grad_norm": 0.4157821238040924, | |
| "learning_rate": 2.483822884971e-06, | |
| "loss": 0.5082, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 2.1153846153846154, | |
| "grad_norm": 0.4027000367641449, | |
| "learning_rate": 2.43550361297047e-06, | |
| "loss": 0.509, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 2.125, | |
| "grad_norm": 0.40891340374946594, | |
| "learning_rate": 2.387507176420256e-06, | |
| "loss": 0.5439, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 2.1346153846153846, | |
| "grad_norm": 0.42513319849967957, | |
| "learning_rate": 2.339839617423318e-06, | |
| "loss": 0.5229, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 2.144230769230769, | |
| "grad_norm": 0.4521211087703705, | |
| "learning_rate": 2.2925069366813718e-06, | |
| "loss": 0.518, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 2.1538461538461537, | |
| "grad_norm": 0.4449746012687683, | |
| "learning_rate": 2.245515092739488e-06, | |
| "loss": 0.5353, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 2.1634615384615383, | |
| "grad_norm": 0.45328670740127563, | |
| "learning_rate": 2.1988700012359865e-06, | |
| "loss": 0.5129, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 2.173076923076923, | |
| "grad_norm": 0.46686699986457825, | |
| "learning_rate": 2.1525775341577404e-06, | |
| "loss": 0.5032, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 2.1826923076923075, | |
| "grad_norm": 0.3987773060798645, | |
| "learning_rate": 2.1066435191009717e-06, | |
| "loss": 0.5279, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 2.1923076923076925, | |
| "grad_norm": 0.47221437096595764, | |
| "learning_rate": 2.061073738537635e-06, | |
| "loss": 0.4976, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 2.201923076923077, | |
| "grad_norm": 0.45039865374565125, | |
| "learning_rate": 2.0158739290874822e-06, | |
| "loss": 0.5322, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 2.2115384615384617, | |
| "grad_norm": 0.42744478583335876, | |
| "learning_rate": 1.971049780795901e-06, | |
| "loss": 0.5343, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 2.2211538461538463, | |
| "grad_norm": 0.37364616990089417, | |
| "learning_rate": 1.9266069364176144e-06, | |
| "loss": 0.5191, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 2.230769230769231, | |
| "grad_norm": 0.4516580104827881, | |
| "learning_rate": 1.8825509907063328e-06, | |
| "loss": 0.5067, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 2.2403846153846154, | |
| "grad_norm": 0.41889938712120056, | |
| "learning_rate": 1.838887489710452e-06, | |
| "loss": 0.5168, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "grad_norm": 0.4030407965183258, | |
| "learning_rate": 1.7956219300748796e-06, | |
| "loss": 0.512, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 2.2596153846153846, | |
| "grad_norm": 0.4093243181705475, | |
| "learning_rate": 1.7527597583490825e-06, | |
| "loss": 0.5021, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 2.269230769230769, | |
| "grad_norm": 0.40421709418296814, | |
| "learning_rate": 1.7103063703014372e-06, | |
| "loss": 0.5182, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 2.2788461538461537, | |
| "grad_norm": 0.40137532353401184, | |
| "learning_rate": 1.6682671102399806e-06, | |
| "loss": 0.5173, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 2.2884615384615383, | |
| "grad_norm": 0.4341326653957367, | |
| "learning_rate": 1.6266472703396286e-06, | |
| "loss": 0.4935, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 2.298076923076923, | |
| "grad_norm": 0.3878624141216278, | |
| "learning_rate": 1.5854520899759656e-06, | |
| "loss": 0.526, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 2.3076923076923075, | |
| "grad_norm": 0.38479188084602356, | |
| "learning_rate": 1.544686755065677e-06, | |
| "loss": 0.5167, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 2.3173076923076925, | |
| "grad_norm": 0.3775184154510498, | |
| "learning_rate": 1.5043563974137132e-06, | |
| "loss": 0.508, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 2.326923076923077, | |
| "grad_norm": 0.40884026885032654, | |
| "learning_rate": 1.4644660940672628e-06, | |
| "loss": 0.5225, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 2.3365384615384617, | |
| "grad_norm": 0.3940774202346802, | |
| "learning_rate": 1.4250208666766235e-06, | |
| "loss": 0.5046, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 2.3461538461538463, | |
| "grad_norm": 0.3791804015636444, | |
| "learning_rate": 1.3860256808630429e-06, | |
| "loss": 0.5249, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 2.355769230769231, | |
| "grad_norm": 0.41370531916618347, | |
| "learning_rate": 1.3474854455936126e-06, | |
| "loss": 0.5211, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 2.3653846153846154, | |
| "grad_norm": 0.38043004274368286, | |
| "learning_rate": 1.3094050125632973e-06, | |
| "loss": 0.527, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 2.375, | |
| "grad_norm": 0.3982435464859009, | |
| "learning_rate": 1.2717891755841722e-06, | |
| "loss": 0.513, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 2.3846153846153846, | |
| "grad_norm": 0.36393943428993225, | |
| "learning_rate": 1.234642669981946e-06, | |
| "loss": 0.5166, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 2.394230769230769, | |
| "grad_norm": 0.3941881060600281, | |
| "learning_rate": 1.1979701719998454e-06, | |
| "loss": 0.5247, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 2.4038461538461537, | |
| "grad_norm": 0.3887317180633545, | |
| "learning_rate": 1.1617762982099446e-06, | |
| "loss": 0.5176, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 2.4134615384615383, | |
| "grad_norm": 0.38710084557533264, | |
| "learning_rate": 1.1260656049319957e-06, | |
| "loss": 0.5139, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 2.423076923076923, | |
| "grad_norm": 0.4177461564540863, | |
| "learning_rate": 1.0908425876598512e-06, | |
| "loss": 0.5071, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 2.4326923076923075, | |
| "grad_norm": 0.41076067090034485, | |
| "learning_rate": 1.0561116804955451e-06, | |
| "loss": 0.5284, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 2.4423076923076925, | |
| "grad_norm": 0.3909740149974823, | |
| "learning_rate": 1.0218772555910955e-06, | |
| "loss": 0.5352, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 2.451923076923077, | |
| "grad_norm": 0.38169676065444946, | |
| "learning_rate": 9.881436225981107e-07, | |
| "loss": 0.5265, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 2.4615384615384617, | |
| "grad_norm": 0.37473157048225403, | |
| "learning_rate": 9.549150281252633e-07, | |
| "loss": 0.5255, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 2.4711538461538463, | |
| "grad_norm": 0.3663051724433899, | |
| "learning_rate": 9.221956552036992e-07, | |
| "loss": 0.5072, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 2.480769230769231, | |
| "grad_norm": 0.360029011964798, | |
| "learning_rate": 8.899896227604509e-07, | |
| "loss": 0.5319, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 2.4903846153846154, | |
| "grad_norm": 0.40786483883857727, | |
| "learning_rate": 8.58300985099918e-07, | |
| "loss": 0.5309, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 0.4109921455383301, | |
| "learning_rate": 8.271337313934869e-07, | |
| "loss": 0.5007, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 2.5096153846153846, | |
| "grad_norm": 0.4578860402107239, | |
| "learning_rate": 7.964917851773496e-07, | |
| "loss": 0.5209, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 2.519230769230769, | |
| "grad_norm": 0.3682916760444641, | |
| "learning_rate": 7.663790038585794e-07, | |
| "loss": 0.4987, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 2.5288461538461537, | |
| "grad_norm": 0.34660568833351135, | |
| "learning_rate": 7.367991782295392e-07, | |
| "loss": 0.5446, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 2.5384615384615383, | |
| "grad_norm": 0.366674542427063, | |
| "learning_rate": 7.077560319906696e-07, | |
| "loss": 0.513, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 2.5480769230769234, | |
| "grad_norm": 0.3877011239528656, | |
| "learning_rate": 6.792532212817271e-07, | |
| "loss": 0.5108, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 2.5576923076923075, | |
| "grad_norm": 0.37091997265815735, | |
| "learning_rate": 6.512943342215234e-07, | |
| "loss": 0.5043, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 2.5673076923076925, | |
| "grad_norm": 0.35695940256118774, | |
| "learning_rate": 6.238828904562316e-07, | |
| "loss": 0.5117, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 2.5769230769230766, | |
| "grad_norm": 0.391635000705719, | |
| "learning_rate": 5.9702234071631e-07, | |
| "loss": 0.4988, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 2.5865384615384617, | |
| "grad_norm": 0.3883398473262787, | |
| "learning_rate": 5.707160663821009e-07, | |
| "loss": 0.5036, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 2.5961538461538463, | |
| "grad_norm": 0.36026155948638916, | |
| "learning_rate": 5.449673790581611e-07, | |
| "loss": 0.5149, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 2.605769230769231, | |
| "grad_norm": 0.41427090764045715, | |
| "learning_rate": 5.197795201563744e-07, | |
| "loss": 0.5032, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 2.6153846153846154, | |
| "grad_norm": 0.3335127830505371, | |
| "learning_rate": 4.951556604879049e-07, | |
| "loss": 0.5278, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 2.625, | |
| "grad_norm": 0.3611414134502411, | |
| "learning_rate": 4.710988998640298e-07, | |
| "loss": 0.5148, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 2.6346153846153846, | |
| "grad_norm": 0.37885385751724243, | |
| "learning_rate": 4.4761226670592074e-07, | |
| "loss": 0.5019, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 2.644230769230769, | |
| "grad_norm": 0.3800183832645416, | |
| "learning_rate": 4.2469871766340096e-07, | |
| "loss": 0.5278, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 2.6538461538461537, | |
| "grad_norm": 0.3709475100040436, | |
| "learning_rate": 4.0236113724274716e-07, | |
| "loss": 0.5235, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 2.6634615384615383, | |
| "grad_norm": 0.3726697564125061, | |
| "learning_rate": 3.8060233744356634e-07, | |
| "loss": 0.5084, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 2.6730769230769234, | |
| "grad_norm": 0.4129122793674469, | |
| "learning_rate": 3.5942505740480583e-07, | |
| "loss": 0.5213, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 2.6826923076923075, | |
| "grad_norm": 0.36773934960365295, | |
| "learning_rate": 3.3883196305992906e-07, | |
| "loss": 0.5282, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 2.6923076923076925, | |
| "grad_norm": 0.3664220869541168, | |
| "learning_rate": 3.18825646801314e-07, | |
| "loss": 0.5129, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 2.7019230769230766, | |
| "grad_norm": 0.37792733311653137, | |
| "learning_rate": 2.9940862715390483e-07, | |
| "loss": 0.4986, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 2.7115384615384617, | |
| "grad_norm": 0.3562301993370056, | |
| "learning_rate": 2.8058334845816214e-07, | |
| "loss": 0.518, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 2.7211538461538463, | |
| "grad_norm": 0.3533957302570343, | |
| "learning_rate": 2.6235218056235633e-07, | |
| "loss": 0.5168, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 2.730769230769231, | |
| "grad_norm": 0.35810333490371704, | |
| "learning_rate": 2.447174185242324e-07, | |
| "loss": 0.5167, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 2.7403846153846154, | |
| "grad_norm": 0.37440261244773865, | |
| "learning_rate": 2.276812823220964e-07, | |
| "loss": 0.5098, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "grad_norm": 0.3287203311920166, | |
| "learning_rate": 2.1124591657534776e-07, | |
| "loss": 0.5397, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 2.7596153846153846, | |
| "grad_norm": 0.34640252590179443, | |
| "learning_rate": 1.9541339027450256e-07, | |
| "loss": 0.5334, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 2.769230769230769, | |
| "grad_norm": 0.3535205125808716, | |
| "learning_rate": 1.801856965207338e-07, | |
| "loss": 0.5207, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 2.7788461538461537, | |
| "grad_norm": 0.35911622643470764, | |
| "learning_rate": 1.6556475227496816e-07, | |
| "loss": 0.528, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 2.7884615384615383, | |
| "grad_norm": 0.37213701009750366, | |
| "learning_rate": 1.5155239811656562e-07, | |
| "loss": 0.4973, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 2.7980769230769234, | |
| "grad_norm": 0.3440645933151245, | |
| "learning_rate": 1.3815039801161723e-07, | |
| "loss": 0.5276, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 2.8076923076923075, | |
| "grad_norm": 0.3481317162513733, | |
| "learning_rate": 1.253604390908819e-07, | |
| "loss": 0.505, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 2.8173076923076925, | |
| "grad_norm": 0.35088083148002625, | |
| "learning_rate": 1.1318413143740436e-07, | |
| "loss": 0.525, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 2.8269230769230766, | |
| "grad_norm": 0.3465360105037689, | |
| "learning_rate": 1.0162300788382263e-07, | |
| "loss": 0.5264, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 2.8365384615384617, | |
| "grad_norm": 0.3661733865737915, | |
| "learning_rate": 9.0678523819408e-08, | |
| "loss": 0.5295, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 2.8461538461538463, | |
| "grad_norm": 0.3409578204154968, | |
| "learning_rate": 8.035205700685167e-08, | |
| "loss": 0.5167, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 2.855769230769231, | |
| "grad_norm": 0.36257433891296387, | |
| "learning_rate": 7.064490740882057e-08, | |
| "loss": 0.4976, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 2.8653846153846154, | |
| "grad_norm": 0.3648744523525238, | |
| "learning_rate": 6.15582970243117e-08, | |
| "loss": 0.5158, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 2.875, | |
| "grad_norm": 0.3494463860988617, | |
| "learning_rate": 5.3093369734816824e-08, | |
| "loss": 0.5027, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 2.8846153846153846, | |
| "grad_norm": 0.39388546347618103, | |
| "learning_rate": 4.52511911603265e-08, | |
| "loss": 0.5145, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 2.894230769230769, | |
| "grad_norm": 0.3406028151512146, | |
| "learning_rate": 3.8032748525179684e-08, | |
| "loss": 0.5336, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 2.9038461538461537, | |
| "grad_norm": 0.34781765937805176, | |
| "learning_rate": 3.143895053378698e-08, | |
| "loss": 0.5152, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 2.9134615384615383, | |
| "grad_norm": 0.35940438508987427, | |
| "learning_rate": 2.547062725623828e-08, | |
| "loss": 0.5023, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 2.9230769230769234, | |
| "grad_norm": 0.3313487768173218, | |
| "learning_rate": 2.012853002380466e-08, | |
| "loss": 0.5332, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 2.9326923076923075, | |
| "grad_norm": 0.3401786983013153, | |
| "learning_rate": 1.541333133436018e-08, | |
| "loss": 0.5028, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 2.9423076923076925, | |
| "grad_norm": 0.36166515946388245, | |
| "learning_rate": 1.132562476771959e-08, | |
| "loss": 0.5202, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 2.9519230769230766, | |
| "grad_norm": 0.37917229533195496, | |
| "learning_rate": 7.865924910916977e-09, | |
| "loss": 0.5138, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 2.9615384615384617, | |
| "grad_norm": 0.36633941531181335, | |
| "learning_rate": 5.034667293427053e-09, | |
| "loss": 0.5177, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 2.9711538461538463, | |
| "grad_norm": 0.3561273217201233, | |
| "learning_rate": 2.8322083323334417e-09, | |
| "loss": 0.512, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 2.980769230769231, | |
| "grad_norm": 0.33336392045021057, | |
| "learning_rate": 1.2588252874673469e-09, | |
| "loss": 0.5214, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 2.9903846153846154, | |
| "grad_norm": 0.3431507647037506, | |
| "learning_rate": 3.147162264971471e-10, | |
| "loss": 0.5239, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.3575623333454132, | |
| "learning_rate": 0.0, | |
| "loss": 0.5068, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 312, | |
| "total_flos": 489026734391296.0, | |
| "train_loss": 0.5917462170697175, | |
| "train_runtime": 8001.2912, | |
| "train_samples_per_second": 3.731, | |
| "train_steps_per_second": 0.039 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 312, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 489026734391296.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |