| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 96, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.03125, | |
| "grad_norm": 5.212899684906006, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 0.5701, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0625, | |
| "grad_norm": 5.509668827056885, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.6002, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.09375, | |
| "grad_norm": 5.713456630706787, | |
| "learning_rate": 3e-06, | |
| "loss": 0.578, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.125, | |
| "grad_norm": 4.650047779083252, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.5471, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.15625, | |
| "grad_norm": 4.2114577293396, | |
| "learning_rate": 5e-06, | |
| "loss": 0.6588, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.1875, | |
| "grad_norm": 3.0272881984710693, | |
| "learning_rate": 6e-06, | |
| "loss": 0.5074, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.21875, | |
| "grad_norm": 2.395081043243408, | |
| "learning_rate": 7e-06, | |
| "loss": 0.484, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 5.17173957824707, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 0.6328, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.28125, | |
| "grad_norm": 2.2946617603302, | |
| "learning_rate": 9e-06, | |
| "loss": 0.47, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.3125, | |
| "grad_norm": 3.2116971015930176, | |
| "learning_rate": 1e-05, | |
| "loss": 0.4675, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.34375, | |
| "grad_norm": 7.591786861419678, | |
| "learning_rate": 9.996664241851197e-06, | |
| "loss": 0.6794, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.375, | |
| "grad_norm": 2.43316912651062, | |
| "learning_rate": 9.986661418317759e-06, | |
| "loss": 0.4242, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.40625, | |
| "grad_norm": 2.3921120166778564, | |
| "learning_rate": 9.970004876199731e-06, | |
| "loss": 0.4481, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.4375, | |
| "grad_norm": 6.6794328689575195, | |
| "learning_rate": 9.946716840375552e-06, | |
| "loss": 0.5388, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.46875, | |
| "grad_norm": 1.9656387567520142, | |
| "learning_rate": 9.91682838414733e-06, | |
| "loss": 0.4292, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.9465067386627197, | |
| "learning_rate": 9.880379387779637e-06, | |
| "loss": 0.3977, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.53125, | |
| "grad_norm": 1.9357396364212036, | |
| "learning_rate": 9.837418485287126e-06, | |
| "loss": 0.418, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.5625, | |
| "grad_norm": 1.4802380800247192, | |
| "learning_rate": 9.78800299954203e-06, | |
| "loss": 0.382, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.59375, | |
| "grad_norm": 1.3597159385681152, | |
| "learning_rate": 9.732198865788047e-06, | |
| "loss": 0.3844, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.625, | |
| "grad_norm": 1.699665904045105, | |
| "learning_rate": 9.670080543662742e-06, | |
| "loss": 0.3707, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.65625, | |
| "grad_norm": 2.6564078330993652, | |
| "learning_rate": 9.601730917845798e-06, | |
| "loss": 0.5049, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.6875, | |
| "grad_norm": 1.8858695030212402, | |
| "learning_rate": 9.527241187465735e-06, | |
| "loss": 0.3874, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.71875, | |
| "grad_norm": 1.5440151691436768, | |
| "learning_rate": 9.446710744412595e-06, | |
| "loss": 0.3295, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 2.438760280609131, | |
| "learning_rate": 9.36024704071904e-06, | |
| "loss": 0.3313, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.78125, | |
| "grad_norm": 1.462939739227295, | |
| "learning_rate": 9.267965445186733e-06, | |
| "loss": 0.3649, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.8125, | |
| "grad_norm": 3.8231046199798584, | |
| "learning_rate": 9.16998908944939e-06, | |
| "loss": 0.4084, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.84375, | |
| "grad_norm": 1.501857876777649, | |
| "learning_rate": 9.066448703677828e-06, | |
| "loss": 0.3463, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.875, | |
| "grad_norm": 4.742476463317871, | |
| "learning_rate": 8.957482442146271e-06, | |
| "loss": 0.4789, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.90625, | |
| "grad_norm": 2.2912559509277344, | |
| "learning_rate": 8.843235698892661e-06, | |
| "loss": 0.4739, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.9375, | |
| "grad_norm": 1.3147192001342773, | |
| "learning_rate": 8.72386091371891e-06, | |
| "loss": 0.3864, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.96875, | |
| "grad_norm": 1.2147444486618042, | |
| "learning_rate": 8.599517368789981e-06, | |
| "loss": 0.3181, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.0684988498687744, | |
| "learning_rate": 8.470370976103171e-06, | |
| "loss": 0.2821, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 1.03125, | |
| "grad_norm": 2.0786163806915283, | |
| "learning_rate": 8.336594056111197e-06, | |
| "loss": 0.288, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 1.0625, | |
| "grad_norm": 1.4894177913665771, | |
| "learning_rate": 8.198365107794457e-06, | |
| "loss": 0.275, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 1.09375, | |
| "grad_norm": 1.039533019065857, | |
| "learning_rate": 8.055868570489247e-06, | |
| "loss": 0.2725, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 1.125, | |
| "grad_norm": 1.105832815170288, | |
| "learning_rate": 7.909294577789765e-06, | |
| "loss": 0.2298, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 1.15625, | |
| "grad_norm": 0.9874384999275208, | |
| "learning_rate": 7.75883870385223e-06, | |
| "loss": 0.2389, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 1.1875, | |
| "grad_norm": 1.0676255226135254, | |
| "learning_rate": 7.604701702439652e-06, | |
| "loss": 0.2394, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 1.21875, | |
| "grad_norm": 1.0422248840332031, | |
| "learning_rate": 7.447089239055428e-06, | |
| "loss": 0.2535, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 1.4323464632034302, | |
| "learning_rate": 7.286211616523193e-06, | |
| "loss": 0.2704, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 1.28125, | |
| "grad_norm": 1.0653886795043945, | |
| "learning_rate": 7.122283494379076e-06, | |
| "loss": 0.2416, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 1.3125, | |
| "grad_norm": 3.6688082218170166, | |
| "learning_rate": 6.95552360245078e-06, | |
| "loss": 0.2793, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 1.34375, | |
| "grad_norm": 2.467761993408203, | |
| "learning_rate": 6.786154449005664e-06, | |
| "loss": 0.2504, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 1.375, | |
| "grad_norm": 1.1717958450317383, | |
| "learning_rate": 6.614402023857231e-06, | |
| "loss": 0.2549, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 1.40625, | |
| "grad_norm": 2.00671648979187, | |
| "learning_rate": 6.440495496826189e-06, | |
| "loss": 0.2179, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 1.4375, | |
| "grad_norm": 1.0691853761672974, | |
| "learning_rate": 6.264666911958404e-06, | |
| "loss": 0.1993, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 1.46875, | |
| "grad_norm": 2.143359661102295, | |
| "learning_rate": 6.087150877907786e-06, | |
| "loss": 0.2553, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 3.1730265617370605, | |
| "learning_rate": 5.908184254897183e-06, | |
| "loss": 0.2949, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 1.53125, | |
| "grad_norm": 1.2100728750228882, | |
| "learning_rate": 5.728005838675026e-06, | |
| "loss": 0.2204, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 1.5625, | |
| "grad_norm": 1.1710706949234009, | |
| "learning_rate": 5.546856041889374e-06, | |
| "loss": 0.2082, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 1.59375, | |
| "grad_norm": 1.1728243827819824, | |
| "learning_rate": 5.364976573304538e-06, | |
| "loss": 0.2052, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 1.625, | |
| "grad_norm": 1.1219596862792969, | |
| "learning_rate": 5.182610115288296e-06, | |
| "loss": 0.2308, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 1.65625, | |
| "grad_norm": 1.3272408246994019, | |
| "learning_rate": 5e-06, | |
| "loss": 0.2448, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 1.6875, | |
| "grad_norm": 1.3118982315063477, | |
| "learning_rate": 4.817389884711706e-06, | |
| "loss": 0.2229, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 1.71875, | |
| "grad_norm": 1.0828900337219238, | |
| "learning_rate": 4.635023426695462e-06, | |
| "loss": 0.216, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 1.1662155389785767, | |
| "learning_rate": 4.4531439581106295e-06, | |
| "loss": 0.1926, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 1.78125, | |
| "grad_norm": 1.1041769981384277, | |
| "learning_rate": 4.271994161324977e-06, | |
| "loss": 0.2045, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 1.8125, | |
| "grad_norm": 1.1366336345672607, | |
| "learning_rate": 4.091815745102818e-06, | |
| "loss": 0.2125, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 1.84375, | |
| "grad_norm": 1.0897059440612793, | |
| "learning_rate": 3.912849122092216e-06, | |
| "loss": 0.2282, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 1.875, | |
| "grad_norm": 0.9965717196464539, | |
| "learning_rate": 3.7353330880415963e-06, | |
| "loss": 0.2317, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.90625, | |
| "grad_norm": 1.1308876276016235, | |
| "learning_rate": 3.5595045031738123e-06, | |
| "loss": 0.2459, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 1.9375, | |
| "grad_norm": 1.449512243270874, | |
| "learning_rate": 3.3855979761427705e-06, | |
| "loss": 0.2836, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 1.96875, | |
| "grad_norm": 1.0659527778625488, | |
| "learning_rate": 3.2138455509943365e-06, | |
| "loss": 0.2508, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 1.1291404962539673, | |
| "learning_rate": 3.044476397549221e-06, | |
| "loss": 0.1681, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 2.03125, | |
| "grad_norm": 0.9167552590370178, | |
| "learning_rate": 2.8777165056209256e-06, | |
| "loss": 0.1305, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 2.0625, | |
| "grad_norm": 0.9367494583129883, | |
| "learning_rate": 2.7137883834768076e-06, | |
| "loss": 0.1462, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 2.09375, | |
| "grad_norm": 0.7913087010383606, | |
| "learning_rate": 2.5529107609445737e-06, | |
| "loss": 0.1384, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 2.125, | |
| "grad_norm": 0.8486475944519043, | |
| "learning_rate": 2.3952982975603494e-06, | |
| "loss": 0.1262, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 2.15625, | |
| "grad_norm": 0.8827183246612549, | |
| "learning_rate": 2.2411612961477704e-06, | |
| "loss": 0.1297, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 2.1875, | |
| "grad_norm": 0.9331244230270386, | |
| "learning_rate": 2.0907054222102367e-06, | |
| "loss": 0.127, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 2.21875, | |
| "grad_norm": 0.8335605263710022, | |
| "learning_rate": 1.944131429510754e-06, | |
| "loss": 0.131, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "grad_norm": 0.7966263890266418, | |
| "learning_rate": 1.8016348922055448e-06, | |
| "loss": 0.1119, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 2.28125, | |
| "grad_norm": 0.9054017066955566, | |
| "learning_rate": 1.6634059438888034e-06, | |
| "loss": 0.1131, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 2.3125, | |
| "grad_norm": 1.0144445896148682, | |
| "learning_rate": 1.5296290238968303e-06, | |
| "loss": 0.1147, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 2.34375, | |
| "grad_norm": 0.9999849796295166, | |
| "learning_rate": 1.4004826312100218e-06, | |
| "loss": 0.1219, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 2.375, | |
| "grad_norm": 1.117247462272644, | |
| "learning_rate": 1.2761390862810907e-06, | |
| "loss": 0.1178, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 2.40625, | |
| "grad_norm": 1.0464344024658203, | |
| "learning_rate": 1.1567643011073393e-06, | |
| "loss": 0.1102, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 2.4375, | |
| "grad_norm": 1.0447325706481934, | |
| "learning_rate": 1.04251755785373e-06, | |
| "loss": 0.1077, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 2.46875, | |
| "grad_norm": 1.0977072715759277, | |
| "learning_rate": 9.335512963221732e-07, | |
| "loss": 0.1045, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 1.6026099920272827, | |
| "learning_rate": 8.30010910550611e-07, | |
| "loss": 0.1298, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 2.53125, | |
| "grad_norm": 0.9658783674240112, | |
| "learning_rate": 7.320345548132679e-07, | |
| "loss": 0.1336, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 2.5625, | |
| "grad_norm": 1.047508716583252, | |
| "learning_rate": 6.397529592809615e-07, | |
| "loss": 0.1017, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 2.59375, | |
| "grad_norm": 1.0574610233306885, | |
| "learning_rate": 5.532892555874059e-07, | |
| "loss": 0.1058, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 2.625, | |
| "grad_norm": 1.155909776687622, | |
| "learning_rate": 4.727588125342669e-07, | |
| "loss": 0.1174, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 2.65625, | |
| "grad_norm": 1.0230557918548584, | |
| "learning_rate": 3.9826908215420344e-07, | |
| "loss": 0.1076, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 2.6875, | |
| "grad_norm": 0.9394001960754395, | |
| "learning_rate": 3.299194563372604e-07, | |
| "loss": 0.1411, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 2.71875, | |
| "grad_norm": 0.9740204811096191, | |
| "learning_rate": 2.67801134211953e-07, | |
| "loss": 0.1101, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "grad_norm": 1.1990844011306763, | |
| "learning_rate": 2.1199700045797077e-07, | |
| "loss": 0.1397, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 2.78125, | |
| "grad_norm": 0.9166547060012817, | |
| "learning_rate": 1.6258151471287397e-07, | |
| "loss": 0.1053, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 2.8125, | |
| "grad_norm": 1.7290699481964111, | |
| "learning_rate": 1.196206122203647e-07, | |
| "loss": 0.1197, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 2.84375, | |
| "grad_norm": 0.9263373017311096, | |
| "learning_rate": 8.317161585266964e-08, | |
| "loss": 0.1004, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 2.875, | |
| "grad_norm": 0.9613826870918274, | |
| "learning_rate": 5.3283159624448745e-08, | |
| "loss": 0.1161, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 2.90625, | |
| "grad_norm": 0.9571884870529175, | |
| "learning_rate": 2.9995123800270476e-08, | |
| "loss": 0.1109, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 2.9375, | |
| "grad_norm": 0.9712323546409607, | |
| "learning_rate": 1.333858168224178e-08, | |
| "loss": 0.109, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 2.96875, | |
| "grad_norm": 0.9421584606170654, | |
| "learning_rate": 3.3357581488030476e-09, | |
| "loss": 0.1002, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.7927206158638, | |
| "learning_rate": 0.0, | |
| "loss": 0.0853, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 96, | |
| "total_flos": 5914053476352.0, | |
| "train_loss": 0.2707547560178985, | |
| "train_runtime": 421.6069, | |
| "train_samples_per_second": 7.116, | |
| "train_steps_per_second": 0.228 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 96, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 5914053476352.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |