| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.976, |
| "eval_steps": 500, |
| "global_step": 93, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.032, |
| "grad_norm": 5.84940669389189, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 0.8186, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.064, |
| "grad_norm": 5.896903453045604, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.8458, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.096, |
| "grad_norm": 5.37344284173292, |
| "learning_rate": 6e-06, |
| "loss": 0.8188, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.128, |
| "grad_norm": 4.074873898614128, |
| "learning_rate": 8.000000000000001e-06, |
| "loss": 0.8172, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 2.5041237379672117, |
| "learning_rate": 1e-05, |
| "loss": 0.7248, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.192, |
| "grad_norm": 4.734108824557778, |
| "learning_rate": 1.2e-05, |
| "loss": 0.8667, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.224, |
| "grad_norm": 4.9352242442739644, |
| "learning_rate": 1.4e-05, |
| "loss": 0.8164, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.256, |
| "grad_norm": 5.9000233595966245, |
| "learning_rate": 1.6000000000000003e-05, |
| "loss": 0.7703, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.288, |
| "grad_norm": 4.734616460225017, |
| "learning_rate": 1.8e-05, |
| "loss": 0.7283, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 2.69112201268641, |
| "learning_rate": 2e-05, |
| "loss": 0.7136, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.352, |
| "grad_norm": 2.8584464139967802, |
| "learning_rate": 1.9992837548163315e-05, |
| "loss": 0.7436, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.384, |
| "grad_norm": 2.2914048959495643, |
| "learning_rate": 1.9971360452796523e-05, |
| "loss": 0.6954, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.416, |
| "grad_norm": 1.5463017334976934, |
| "learning_rate": 1.993559947963185e-05, |
| "loss": 0.6304, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.448, |
| "grad_norm": 1.6244483288113523, |
| "learning_rate": 1.9885605855918887e-05, |
| "loss": 0.661, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 1.3804731125445038, |
| "learning_rate": 1.9821451197042028e-05, |
| "loss": 0.621, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.512, |
| "grad_norm": 1.2049483348062489, |
| "learning_rate": 1.9743227403932135e-05, |
| "loss": 0.6491, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.544, |
| "grad_norm": 1.0219653970996074, |
| "learning_rate": 1.9651046531419335e-05, |
| "loss": 0.6351, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.576, |
| "grad_norm": 0.9718497121562397, |
| "learning_rate": 1.9545040627715554e-05, |
| "loss": 0.6125, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.608, |
| "grad_norm": 1.1088690421377412, |
| "learning_rate": 1.942536154525673e-05, |
| "loss": 0.6397, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 0.9455104536203861, |
| "learning_rate": 1.9292180723175656e-05, |
| "loss": 0.5623, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.672, |
| "grad_norm": 1.1136726884833483, |
| "learning_rate": 1.9145688941717074e-05, |
| "loss": 0.5951, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.704, |
| "grad_norm": 0.9962571929358678, |
| "learning_rate": 1.8986096048946826e-05, |
| "loss": 0.6159, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.736, |
| "grad_norm": 1.0516488446346473, |
| "learning_rate": 1.881363066014649e-05, |
| "loss": 0.5522, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.768, |
| "grad_norm": 1.2789484698437357, |
| "learning_rate": 1.862853983032423e-05, |
| "loss": 0.6563, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 0.9726925447037389, |
| "learning_rate": 1.8431088700310846e-05, |
| "loss": 0.5965, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.832, |
| "grad_norm": 1.3114557015981112, |
| "learning_rate": 1.8221560116948103e-05, |
| "loss": 0.6577, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.864, |
| "grad_norm": 1.2317571230112236, |
| "learning_rate": 1.8000254227913346e-05, |
| "loss": 0.5977, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.896, |
| "grad_norm": 0.9966337703489971, |
| "learning_rate": 1.7767488051760858e-05, |
| "loss": 0.5929, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.928, |
| "grad_norm": 0.9244122578843538, |
| "learning_rate": 1.7523595023795814e-05, |
| "loss": 0.6165, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 0.9902344039161873, |
| "learning_rate": 1.7268924518431437e-05, |
| "loss": 0.6351, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.992, |
| "grad_norm": 1.1385173043161212, |
| "learning_rate": 1.700384134871351e-05, |
| "loss": 0.5994, |
| "step": 31 |
| }, |
| { |
| "epoch": 1.024, |
| "grad_norm": 1.7131457418457308, |
| "learning_rate": 1.672872524372919e-05, |
| "loss": 0.8536, |
| "step": 32 |
| }, |
| { |
| "epoch": 1.056, |
| "grad_norm": 0.9887680712926848, |
| "learning_rate": 1.644397030464877e-05, |
| "loss": 0.4804, |
| "step": 33 |
| }, |
| { |
| "epoch": 1.088, |
| "grad_norm": 0.7381068345404209, |
| "learning_rate": 1.614998444017954e-05, |
| "loss": 0.4955, |
| "step": 34 |
| }, |
| { |
| "epoch": 1.12, |
| "grad_norm": 0.9618580890399832, |
| "learning_rate": 1.5847188782240473e-05, |
| "loss": 0.5212, |
| "step": 35 |
| }, |
| { |
| "epoch": 1.152, |
| "grad_norm": 0.8787637974871335, |
| "learning_rate": 1.5536017082694846e-05, |
| "loss": 0.4318, |
| "step": 36 |
| }, |
| { |
| "epoch": 1.184, |
| "grad_norm": 0.9577670072498041, |
| "learning_rate": 1.5216915092004847e-05, |
| "loss": 0.5298, |
| "step": 37 |
| }, |
| { |
| "epoch": 1.216, |
| "grad_norm": 0.9473329613695382, |
| "learning_rate": 1.4890339920698334e-05, |
| "loss": 0.5354, |
| "step": 38 |
| }, |
| { |
| "epoch": 1.248, |
| "grad_norm": 0.8919615677703958, |
| "learning_rate": 1.4556759384562418e-05, |
| "loss": 0.4242, |
| "step": 39 |
| }, |
| { |
| "epoch": 1.28, |
| "grad_norm": 0.8567752824441381, |
| "learning_rate": 1.421665133450184e-05, |
| "loss": 0.4599, |
| "step": 40 |
| }, |
| { |
| "epoch": 1.312, |
| "grad_norm": 0.7949595792187416, |
| "learning_rate": 1.3870502972022175e-05, |
| "loss": 0.4125, |
| "step": 41 |
| }, |
| { |
| "epoch": 1.3439999999999999, |
| "grad_norm": 1.3123308076615146, |
| "learning_rate": 1.351881015131833e-05, |
| "loss": 0.5169, |
| "step": 42 |
| }, |
| { |
| "epoch": 1.376, |
| "grad_norm": 0.7318049114768788, |
| "learning_rate": 1.316207666896824e-05, |
| "loss": 0.4106, |
| "step": 43 |
| }, |
| { |
| "epoch": 1.408, |
| "grad_norm": 0.9425576795296489, |
| "learning_rate": 1.2800813542249073e-05, |
| "loss": 0.5101, |
| "step": 44 |
| }, |
| { |
| "epoch": 1.44, |
| "grad_norm": 1.021797619309871, |
| "learning_rate": 1.2435538277109919e-05, |
| "loss": 0.5469, |
| "step": 45 |
| }, |
| { |
| "epoch": 1.472, |
| "grad_norm": 0.7986312330336192, |
| "learning_rate": 1.206677412684953e-05, |
| "loss": 0.4318, |
| "step": 46 |
| }, |
| { |
| "epoch": 1.504, |
| "grad_norm": 0.9215393491076125, |
| "learning_rate": 1.1695049342560969e-05, |
| "loss": 0.4563, |
| "step": 47 |
| }, |
| { |
| "epoch": 1.536, |
| "grad_norm": 0.9830101157364164, |
| "learning_rate": 1.1320896416417026e-05, |
| "loss": 0.5076, |
| "step": 48 |
| }, |
| { |
| "epoch": 1.568, |
| "grad_norm": 0.8438722321960386, |
| "learning_rate": 1.0944851318880314e-05, |
| "loss": 0.516, |
| "step": 49 |
| }, |
| { |
| "epoch": 1.6, |
| "grad_norm": 0.8787060713492011, |
| "learning_rate": 1.0567452730930743e-05, |
| "loss": 0.462, |
| "step": 50 |
| }, |
| { |
| "epoch": 1.6320000000000001, |
| "grad_norm": 0.9033596868361128, |
| "learning_rate": 1.0189241272410191e-05, |
| "loss": 0.4514, |
| "step": 51 |
| }, |
| { |
| "epoch": 1.6640000000000001, |
| "grad_norm": 0.8693144222274771, |
| "learning_rate": 9.810758727589814e-06, |
| "loss": 0.5001, |
| "step": 52 |
| }, |
| { |
| "epoch": 1.696, |
| "grad_norm": 1.1029488992826821, |
| "learning_rate": 9.43254726906926e-06, |
| "loss": 0.4482, |
| "step": 53 |
| }, |
| { |
| "epoch": 1.728, |
| "grad_norm": 0.9000678022323099, |
| "learning_rate": 9.055148681119688e-06, |
| "loss": 0.4322, |
| "step": 54 |
| }, |
| { |
| "epoch": 1.76, |
| "grad_norm": 0.7216561884978123, |
| "learning_rate": 8.67910358358298e-06, |
| "loss": 0.4316, |
| "step": 55 |
| }, |
| { |
| "epoch": 1.792, |
| "grad_norm": 0.8319129824124409, |
| "learning_rate": 8.304950657439034e-06, |
| "loss": 0.4805, |
| "step": 56 |
| }, |
| { |
| "epoch": 1.8239999999999998, |
| "grad_norm": 0.7932990650658038, |
| "learning_rate": 7.93322587315047e-06, |
| "loss": 0.433, |
| "step": 57 |
| }, |
| { |
| "epoch": 1.8559999999999999, |
| "grad_norm": 0.7262577838163997, |
| "learning_rate": 7.564461722890082e-06, |
| "loss": 0.4647, |
| "step": 58 |
| }, |
| { |
| "epoch": 1.888, |
| "grad_norm": 0.6953985526658658, |
| "learning_rate": 7.199186457750931e-06, |
| "loss": 0.3784, |
| "step": 59 |
| }, |
| { |
| "epoch": 1.92, |
| "grad_norm": 0.6944361567845883, |
| "learning_rate": 6.837923331031761e-06, |
| "loss": 0.4661, |
| "step": 60 |
| }, |
| { |
| "epoch": 1.952, |
| "grad_norm": 0.8214460648196689, |
| "learning_rate": 6.48118984868167e-06, |
| "loss": 0.4882, |
| "step": 61 |
| }, |
| { |
| "epoch": 1.984, |
| "grad_norm": 0.6777654072216326, |
| "learning_rate": 6.129497027977829e-06, |
| "loss": 0.4669, |
| "step": 62 |
| }, |
| { |
| "epoch": 2.016, |
| "grad_norm": 1.2880141378220575, |
| "learning_rate": 5.78334866549816e-06, |
| "loss": 0.6206, |
| "step": 63 |
| }, |
| { |
| "epoch": 2.048, |
| "grad_norm": 0.7107669765897068, |
| "learning_rate": 5.443240615437586e-06, |
| "loss": 0.3816, |
| "step": 64 |
| }, |
| { |
| "epoch": 2.08, |
| "grad_norm": 0.801586001212043, |
| "learning_rate": 5.109660079301668e-06, |
| "loss": 0.402, |
| "step": 65 |
| }, |
| { |
| "epoch": 2.112, |
| "grad_norm": 0.564818291485468, |
| "learning_rate": 4.783084907995156e-06, |
| "loss": 0.3663, |
| "step": 66 |
| }, |
| { |
| "epoch": 2.144, |
| "grad_norm": 0.6168891844281424, |
| "learning_rate": 4.463982917305155e-06, |
| "loss": 0.3979, |
| "step": 67 |
| }, |
| { |
| "epoch": 2.176, |
| "grad_norm": 0.5270099669215162, |
| "learning_rate": 4.152811217759529e-06, |
| "loss": 0.2757, |
| "step": 68 |
| }, |
| { |
| "epoch": 2.208, |
| "grad_norm": 0.5965579042575382, |
| "learning_rate": 3.850015559820465e-06, |
| "loss": 0.3429, |
| "step": 69 |
| }, |
| { |
| "epoch": 2.24, |
| "grad_norm": 0.7256008814807648, |
| "learning_rate": 3.5560296953512296e-06, |
| "loss": 0.3981, |
| "step": 70 |
| }, |
| { |
| "epoch": 2.2720000000000002, |
| "grad_norm": 0.7161869101911601, |
| "learning_rate": 3.2712747562708115e-06, |
| "loss": 0.37, |
| "step": 71 |
| }, |
| { |
| "epoch": 2.304, |
| "grad_norm": 0.7454025067253681, |
| "learning_rate": 2.9961586512864947e-06, |
| "loss": 0.4165, |
| "step": 72 |
| }, |
| { |
| "epoch": 2.336, |
| "grad_norm": 0.5449062983518381, |
| "learning_rate": 2.7310754815685627e-06, |
| "loss": 0.3389, |
| "step": 73 |
| }, |
| { |
| "epoch": 2.368, |
| "grad_norm": 0.5651340234901367, |
| "learning_rate": 2.4764049762041874e-06, |
| "loss": 0.3643, |
| "step": 74 |
| }, |
| { |
| "epoch": 2.4, |
| "grad_norm": 0.5223439740041005, |
| "learning_rate": 2.2325119482391466e-06, |
| "loss": 0.2996, |
| "step": 75 |
| }, |
| { |
| "epoch": 2.432, |
| "grad_norm": 0.6691922082645556, |
| "learning_rate": 1.9997457720866554e-06, |
| "loss": 0.4327, |
| "step": 76 |
| }, |
| { |
| "epoch": 2.464, |
| "grad_norm": 0.5466794083662679, |
| "learning_rate": 1.7784398830519002e-06, |
| "loss": 0.315, |
| "step": 77 |
| }, |
| { |
| "epoch": 2.496, |
| "grad_norm": 0.513132554481501, |
| "learning_rate": 1.5689112996891576e-06, |
| "loss": 0.343, |
| "step": 78 |
| }, |
| { |
| "epoch": 2.528, |
| "grad_norm": 0.5748198370777368, |
| "learning_rate": 1.3714601696757713e-06, |
| "loss": 0.3484, |
| "step": 79 |
| }, |
| { |
| "epoch": 2.56, |
| "grad_norm": 0.6254281609397252, |
| "learning_rate": 1.1863693398535115e-06, |
| "loss": 0.4159, |
| "step": 80 |
| }, |
| { |
| "epoch": 2.592, |
| "grad_norm": 0.6161841782227268, |
| "learning_rate": 1.01390395105318e-06, |
| "loss": 0.384, |
| "step": 81 |
| }, |
| { |
| "epoch": 2.624, |
| "grad_norm": 0.5502099035422106, |
| "learning_rate": 8.543110582829272e-07, |
| "loss": 0.3486, |
| "step": 82 |
| }, |
| { |
| "epoch": 2.656, |
| "grad_norm": 0.5545865136491874, |
| "learning_rate": 7.078192768243486e-07, |
| "loss": 0.3445, |
| "step": 83 |
| }, |
| { |
| "epoch": 2.6879999999999997, |
| "grad_norm": 0.5443978320460648, |
| "learning_rate": 5.746384547432738e-07, |
| "loss": 0.3915, |
| "step": 84 |
| }, |
| { |
| "epoch": 2.7199999999999998, |
| "grad_norm": 0.5314622355578005, |
| "learning_rate": 4.549593722844492e-07, |
| "loss": 0.4302, |
| "step": 85 |
| }, |
| { |
| "epoch": 2.752, |
| "grad_norm": 0.4457783159087401, |
| "learning_rate": 3.4895346858066723e-07, |
| "loss": 0.2589, |
| "step": 86 |
| }, |
| { |
| "epoch": 2.784, |
| "grad_norm": 0.5941803419468483, |
| "learning_rate": 2.5677259606786686e-07, |
| "loss": 0.3742, |
| "step": 87 |
| }, |
| { |
| "epoch": 2.816, |
| "grad_norm": 0.5084378111189841, |
| "learning_rate": 1.7854880295797406e-07, |
| "loss": 0.3535, |
| "step": 88 |
| }, |
| { |
| "epoch": 2.848, |
| "grad_norm": 0.4740275071409764, |
| "learning_rate": 1.1439414408111471e-07, |
| "loss": 0.3458, |
| "step": 89 |
| }, |
| { |
| "epoch": 2.88, |
| "grad_norm": 0.6664191454899007, |
| "learning_rate": 6.440052036815081e-08, |
| "loss": 0.4665, |
| "step": 90 |
| }, |
| { |
| "epoch": 2.912, |
| "grad_norm": 0.4715443059942868, |
| "learning_rate": 2.86395472034795e-08, |
| "loss": 0.3022, |
| "step": 91 |
| }, |
| { |
| "epoch": 2.944, |
| "grad_norm": 0.5808333713792009, |
| "learning_rate": 7.162451836685291e-09, |
| "loss": 0.3701, |
| "step": 92 |
| }, |
| { |
| "epoch": 2.976, |
| "grad_norm": 0.5008683379834068, |
| "learning_rate": 0.0, |
| "loss": 0.295, |
| "step": 93 |
| }, |
| { |
| "epoch": 2.976, |
| "step": 93, |
| "total_flos": 5.107931021941146e+16, |
| "train_loss": 0.5110146377676277, |
| "train_runtime": 3237.991, |
| "train_samples_per_second": 0.923, |
| "train_steps_per_second": 0.029 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 93, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 5.107931021941146e+16, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|