| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 6.850632911392405, |
| "eval_steps": 500, |
| "global_step": 224, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.030379746835443037, |
| "grad_norm": 6.048936306987692, |
| "learning_rate": 8.695652173913044e-07, |
| "loss": 0.8419, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.060759493670886074, |
| "grad_norm": 6.359706790120916, |
| "learning_rate": 1.7391304347826088e-06, |
| "loss": 0.8759, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.09113924050632911, |
| "grad_norm": 6.131019897142869, |
| "learning_rate": 2.6086956521739132e-06, |
| "loss": 0.8305, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.12151898734177215, |
| "grad_norm": 5.574055055372666, |
| "learning_rate": 3.4782608695652175e-06, |
| "loss": 0.8242, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.1518987341772152, |
| "grad_norm": 4.265951668457725, |
| "learning_rate": 4.347826086956522e-06, |
| "loss": 0.7634, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.18227848101265823, |
| "grad_norm": 2.464962480859038, |
| "learning_rate": 5.2173913043478265e-06, |
| "loss": 0.7492, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.21265822784810126, |
| "grad_norm": 2.077337266343512, |
| "learning_rate": 6.086956521739132e-06, |
| "loss": 0.7265, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.2430379746835443, |
| "grad_norm": 4.14036522612642, |
| "learning_rate": 6.956521739130435e-06, |
| "loss": 0.7563, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.27341772151898736, |
| "grad_norm": 4.45973658282638, |
| "learning_rate": 7.82608695652174e-06, |
| "loss": 0.7747, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.3037974683544304, |
| "grad_norm": 4.521818388157711, |
| "learning_rate": 8.695652173913044e-06, |
| "loss": 0.7341, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.3341772151898734, |
| "grad_norm": 4.422444866803943, |
| "learning_rate": 9.565217391304349e-06, |
| "loss": 0.6844, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.36455696202531646, |
| "grad_norm": 3.9402544164657844, |
| "learning_rate": 1.0434782608695653e-05, |
| "loss": 0.692, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.3949367088607595, |
| "grad_norm": 2.6928462031464337, |
| "learning_rate": 1.1304347826086957e-05, |
| "loss": 0.6945, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.4253164556962025, |
| "grad_norm": 1.8595232870527427, |
| "learning_rate": 1.2173913043478263e-05, |
| "loss": 0.654, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.45569620253164556, |
| "grad_norm": 2.613185640118858, |
| "learning_rate": 1.3043478260869566e-05, |
| "loss": 0.671, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.4860759493670886, |
| "grad_norm": 2.110958057017735, |
| "learning_rate": 1.391304347826087e-05, |
| "loss": 0.6169, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.5164556962025316, |
| "grad_norm": 1.737170053034775, |
| "learning_rate": 1.4782608695652174e-05, |
| "loss": 0.6203, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.5468354430379747, |
| "grad_norm": 1.5084674691399222, |
| "learning_rate": 1.565217391304348e-05, |
| "loss": 0.6149, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.5772151898734177, |
| "grad_norm": 1.3494948400477569, |
| "learning_rate": 1.6521739130434785e-05, |
| "loss": 0.6517, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.6075949367088608, |
| "grad_norm": 1.1751438322676817, |
| "learning_rate": 1.739130434782609e-05, |
| "loss": 0.6033, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.6379746835443038, |
| "grad_norm": 1.0526414875931804, |
| "learning_rate": 1.8260869565217393e-05, |
| "loss": 0.64, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.6683544303797468, |
| "grad_norm": 0.9715611441669768, |
| "learning_rate": 1.9130434782608697e-05, |
| "loss": 0.5927, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.6987341772151898, |
| "grad_norm": 0.8125624310216422, |
| "learning_rate": 2e-05, |
| "loss": 0.607, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.7291139240506329, |
| "grad_norm": 0.9223345735056782, |
| "learning_rate": 1.999877856940653e-05, |
| "loss": 0.561, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.759493670886076, |
| "grad_norm": 0.8243964896311464, |
| "learning_rate": 1.999511457600466e-05, |
| "loss": 0.5872, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.789873417721519, |
| "grad_norm": 0.7512429692602848, |
| "learning_rate": 1.9989008914857115e-05, |
| "loss": 0.5711, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.8202531645569621, |
| "grad_norm": 0.7459000930728743, |
| "learning_rate": 1.998046307749216e-05, |
| "loss": 0.5801, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.850632911392405, |
| "grad_norm": 0.6995303209474417, |
| "learning_rate": 1.9969479151539238e-05, |
| "loss": 0.5549, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.8810126582278481, |
| "grad_norm": 0.7124104382437041, |
| "learning_rate": 1.9956059820218982e-05, |
| "loss": 0.5468, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.9113924050632911, |
| "grad_norm": 0.7059343154396499, |
| "learning_rate": 1.9940208361687762e-05, |
| "loss": 0.5723, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.9417721518987342, |
| "grad_norm": 0.7009167310881045, |
| "learning_rate": 1.9921928648236855e-05, |
| "loss": 0.548, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.9721518987341772, |
| "grad_norm": 0.8724240967159428, |
| "learning_rate": 1.990122514534651e-05, |
| "loss": 0.5918, |
| "step": 32 |
| }, |
| { |
| "epoch": 1.010126582278481, |
| "grad_norm": 0.8468019563645511, |
| "learning_rate": 1.9878102910595097e-05, |
| "loss": 0.7317, |
| "step": 33 |
| }, |
| { |
| "epoch": 1.040506329113924, |
| "grad_norm": 0.5559186721993208, |
| "learning_rate": 1.985256759242359e-05, |
| "loss": 0.4849, |
| "step": 34 |
| }, |
| { |
| "epoch": 1.070886075949367, |
| "grad_norm": 0.5507474685198273, |
| "learning_rate": 1.982462542875576e-05, |
| "loss": 0.5334, |
| "step": 35 |
| }, |
| { |
| "epoch": 1.1012658227848102, |
| "grad_norm": 0.543870017484278, |
| "learning_rate": 1.979428324547432e-05, |
| "loss": 0.5487, |
| "step": 36 |
| }, |
| { |
| "epoch": 1.1316455696202532, |
| "grad_norm": 0.6225002865248337, |
| "learning_rate": 1.9761548454753455e-05, |
| "loss": 0.5186, |
| "step": 37 |
| }, |
| { |
| "epoch": 1.1620253164556962, |
| "grad_norm": 0.5335951367889052, |
| "learning_rate": 1.972642905324813e-05, |
| "loss": 0.5121, |
| "step": 38 |
| }, |
| { |
| "epoch": 1.1924050632911392, |
| "grad_norm": 0.569943007419904, |
| "learning_rate": 1.9688933620140638e-05, |
| "loss": 0.4835, |
| "step": 39 |
| }, |
| { |
| "epoch": 1.2227848101265824, |
| "grad_norm": 0.5616599542859847, |
| "learning_rate": 1.96490713150448e-05, |
| "loss": 0.5062, |
| "step": 40 |
| }, |
| { |
| "epoch": 1.2531645569620253, |
| "grad_norm": 0.5176704614796857, |
| "learning_rate": 1.9606851875768404e-05, |
| "loss": 0.4821, |
| "step": 41 |
| }, |
| { |
| "epoch": 1.2835443037974683, |
| "grad_norm": 0.6013616477281641, |
| "learning_rate": 1.956228561593441e-05, |
| "loss": 0.4862, |
| "step": 42 |
| }, |
| { |
| "epoch": 1.3139240506329113, |
| "grad_norm": 0.5799218399260377, |
| "learning_rate": 1.9515383422461457e-05, |
| "loss": 0.5455, |
| "step": 43 |
| }, |
| { |
| "epoch": 1.3443037974683545, |
| "grad_norm": 0.48791349493694947, |
| "learning_rate": 1.9466156752904344e-05, |
| "loss": 0.4558, |
| "step": 44 |
| }, |
| { |
| "epoch": 1.3746835443037975, |
| "grad_norm": 0.5460867482558168, |
| "learning_rate": 1.9414617632655114e-05, |
| "loss": 0.4949, |
| "step": 45 |
| }, |
| { |
| "epoch": 1.4050632911392404, |
| "grad_norm": 0.5200300630053336, |
| "learning_rate": 1.9360778652005416e-05, |
| "loss": 0.5226, |
| "step": 46 |
| }, |
| { |
| "epoch": 1.4354430379746836, |
| "grad_norm": 0.49835905260454255, |
| "learning_rate": 1.9304652963070868e-05, |
| "loss": 0.4988, |
| "step": 47 |
| }, |
| { |
| "epoch": 1.4658227848101266, |
| "grad_norm": 0.555402108007746, |
| "learning_rate": 1.9246254276578175e-05, |
| "loss": 0.4811, |
| "step": 48 |
| }, |
| { |
| "epoch": 1.4962025316455696, |
| "grad_norm": 0.5043821831864128, |
| "learning_rate": 1.9185596858515797e-05, |
| "loss": 0.5409, |
| "step": 49 |
| }, |
| { |
| "epoch": 1.5265822784810128, |
| "grad_norm": 0.5095173005875876, |
| "learning_rate": 1.9122695526648968e-05, |
| "loss": 0.4574, |
| "step": 50 |
| }, |
| { |
| "epoch": 1.5569620253164556, |
| "grad_norm": 0.5297774307671909, |
| "learning_rate": 1.905756564689991e-05, |
| "loss": 0.4825, |
| "step": 51 |
| }, |
| { |
| "epoch": 1.5873417721518988, |
| "grad_norm": 0.4497569232214527, |
| "learning_rate": 1.8990223129594146e-05, |
| "loss": 0.4634, |
| "step": 52 |
| }, |
| { |
| "epoch": 1.6177215189873417, |
| "grad_norm": 0.5108891340546695, |
| "learning_rate": 1.8920684425573865e-05, |
| "loss": 0.469, |
| "step": 53 |
| }, |
| { |
| "epoch": 1.6481012658227847, |
| "grad_norm": 0.5389740099999035, |
| "learning_rate": 1.884896652217917e-05, |
| "loss": 0.5486, |
| "step": 54 |
| }, |
| { |
| "epoch": 1.678481012658228, |
| "grad_norm": 0.5137420177298216, |
| "learning_rate": 1.877508693909831e-05, |
| "loss": 0.4983, |
| "step": 55 |
| }, |
| { |
| "epoch": 1.7088607594936709, |
| "grad_norm": 0.49925713696145874, |
| "learning_rate": 1.8699063724087905e-05, |
| "loss": 0.5272, |
| "step": 56 |
| }, |
| { |
| "epoch": 1.7392405063291139, |
| "grad_norm": 0.5914481358656738, |
| "learning_rate": 1.862091544856407e-05, |
| "loss": 0.4924, |
| "step": 57 |
| }, |
| { |
| "epoch": 1.769620253164557, |
| "grad_norm": 0.4664469979995995, |
| "learning_rate": 1.854066120306571e-05, |
| "loss": 0.4759, |
| "step": 58 |
| }, |
| { |
| "epoch": 1.8, |
| "grad_norm": 0.47080488802947457, |
| "learning_rate": 1.8458320592590976e-05, |
| "loss": 0.4812, |
| "step": 59 |
| }, |
| { |
| "epoch": 1.830379746835443, |
| "grad_norm": 0.5587905613183901, |
| "learning_rate": 1.837391373180801e-05, |
| "loss": 0.4888, |
| "step": 60 |
| }, |
| { |
| "epoch": 1.8607594936708862, |
| "grad_norm": 0.5050554675255995, |
| "learning_rate": 1.8287461240141217e-05, |
| "loss": 0.5033, |
| "step": 61 |
| }, |
| { |
| "epoch": 1.891139240506329, |
| "grad_norm": 0.47145338803039966, |
| "learning_rate": 1.8198984236734246e-05, |
| "loss": 0.4764, |
| "step": 62 |
| }, |
| { |
| "epoch": 1.9215189873417722, |
| "grad_norm": 0.5018576539215808, |
| "learning_rate": 1.8108504335290852e-05, |
| "loss": 0.4561, |
| "step": 63 |
| }, |
| { |
| "epoch": 1.9518987341772152, |
| "grad_norm": 0.45811612746325875, |
| "learning_rate": 1.8016043638794975e-05, |
| "loss": 0.5324, |
| "step": 64 |
| }, |
| { |
| "epoch": 1.9822784810126581, |
| "grad_norm": 0.6144932201086315, |
| "learning_rate": 1.7921624734111292e-05, |
| "loss": 0.578, |
| "step": 65 |
| }, |
| { |
| "epoch": 2.020253164556962, |
| "grad_norm": 0.5726068435884388, |
| "learning_rate": 1.7825270686467567e-05, |
| "loss": 0.544, |
| "step": 66 |
| }, |
| { |
| "epoch": 2.050632911392405, |
| "grad_norm": 0.4648598668226982, |
| "learning_rate": 1.7727005033820117e-05, |
| "loss": 0.4308, |
| "step": 67 |
| }, |
| { |
| "epoch": 2.081012658227848, |
| "grad_norm": 0.4721375558424858, |
| "learning_rate": 1.762685178110382e-05, |
| "loss": 0.4194, |
| "step": 68 |
| }, |
| { |
| "epoch": 2.1113924050632913, |
| "grad_norm": 0.4771552931363247, |
| "learning_rate": 1.752483539436807e-05, |
| "loss": 0.4197, |
| "step": 69 |
| }, |
| { |
| "epoch": 2.141772151898734, |
| "grad_norm": 0.4797766115828031, |
| "learning_rate": 1.7420980794800013e-05, |
| "loss": 0.434, |
| "step": 70 |
| }, |
| { |
| "epoch": 2.1721518987341772, |
| "grad_norm": 0.4847501246260971, |
| "learning_rate": 1.731531335263669e-05, |
| "loss": 0.4094, |
| "step": 71 |
| }, |
| { |
| "epoch": 2.2025316455696204, |
| "grad_norm": 0.5715606812513879, |
| "learning_rate": 1.720785888096743e-05, |
| "loss": 0.4107, |
| "step": 72 |
| }, |
| { |
| "epoch": 2.232911392405063, |
| "grad_norm": 0.4651167769261771, |
| "learning_rate": 1.7098643629428035e-05, |
| "loss": 0.4111, |
| "step": 73 |
| }, |
| { |
| "epoch": 2.2632911392405064, |
| "grad_norm": 0.45587437771999223, |
| "learning_rate": 1.698769427778842e-05, |
| "loss": 0.413, |
| "step": 74 |
| }, |
| { |
| "epoch": 2.293670886075949, |
| "grad_norm": 0.5442546749379886, |
| "learning_rate": 1.687503792943506e-05, |
| "loss": 0.4373, |
| "step": 75 |
| }, |
| { |
| "epoch": 2.3240506329113924, |
| "grad_norm": 0.4824118869055176, |
| "learning_rate": 1.6760702104750046e-05, |
| "loss": 0.4257, |
| "step": 76 |
| }, |
| { |
| "epoch": 2.3544303797468356, |
| "grad_norm": 0.4781817061972643, |
| "learning_rate": 1.664471473438822e-05, |
| "loss": 0.4014, |
| "step": 77 |
| }, |
| { |
| "epoch": 2.3848101265822783, |
| "grad_norm": 0.4088771271312156, |
| "learning_rate": 1.6527104152454096e-05, |
| "loss": 0.3802, |
| "step": 78 |
| }, |
| { |
| "epoch": 2.4151898734177215, |
| "grad_norm": 0.5516321100838191, |
| "learning_rate": 1.6407899089580263e-05, |
| "loss": 0.449, |
| "step": 79 |
| }, |
| { |
| "epoch": 2.4455696202531647, |
| "grad_norm": 0.4716858671975667, |
| "learning_rate": 1.628712866590885e-05, |
| "loss": 0.3853, |
| "step": 80 |
| }, |
| { |
| "epoch": 2.4759493670886075, |
| "grad_norm": 0.45334870047529424, |
| "learning_rate": 1.6164822383977912e-05, |
| "loss": 0.4197, |
| "step": 81 |
| }, |
| { |
| "epoch": 2.5063291139240507, |
| "grad_norm": 0.4650898860452062, |
| "learning_rate": 1.604101012151436e-05, |
| "loss": 0.4325, |
| "step": 82 |
| }, |
| { |
| "epoch": 2.536708860759494, |
| "grad_norm": 0.6733951603850473, |
| "learning_rate": 1.5915722124135227e-05, |
| "loss": 0.4346, |
| "step": 83 |
| }, |
| { |
| "epoch": 2.5670886075949366, |
| "grad_norm": 0.3741787332470752, |
| "learning_rate": 1.5788988997959115e-05, |
| "loss": 0.4168, |
| "step": 84 |
| }, |
| { |
| "epoch": 2.59746835443038, |
| "grad_norm": 0.4673023466091711, |
| "learning_rate": 1.5660841702129533e-05, |
| "loss": 0.4387, |
| "step": 85 |
| }, |
| { |
| "epoch": 2.6278481012658226, |
| "grad_norm": 0.5113903636282513, |
| "learning_rate": 1.5531311541251995e-05, |
| "loss": 0.3882, |
| "step": 86 |
| }, |
| { |
| "epoch": 2.6582278481012658, |
| "grad_norm": 0.3987776866070054, |
| "learning_rate": 1.540043015774676e-05, |
| "loss": 0.4384, |
| "step": 87 |
| }, |
| { |
| "epoch": 2.688607594936709, |
| "grad_norm": 0.5000328088996094, |
| "learning_rate": 1.5268229524119007e-05, |
| "loss": 0.4427, |
| "step": 88 |
| }, |
| { |
| "epoch": 2.7189873417721517, |
| "grad_norm": 0.42774760061205236, |
| "learning_rate": 1.513474193514842e-05, |
| "loss": 0.4242, |
| "step": 89 |
| }, |
| { |
| "epoch": 2.749367088607595, |
| "grad_norm": 0.4770182422848908, |
| "learning_rate": 1.5000000000000002e-05, |
| "loss": 0.4079, |
| "step": 90 |
| }, |
| { |
| "epoch": 2.779746835443038, |
| "grad_norm": 0.5326476152699332, |
| "learning_rate": 1.4864036634258112e-05, |
| "loss": 0.4264, |
| "step": 91 |
| }, |
| { |
| "epoch": 2.810126582278481, |
| "grad_norm": 0.4142295571778557, |
| "learning_rate": 1.4726885051885654e-05, |
| "loss": 0.4049, |
| "step": 92 |
| }, |
| { |
| "epoch": 2.840506329113924, |
| "grad_norm": 0.5296388184052468, |
| "learning_rate": 1.4588578757110359e-05, |
| "loss": 0.4073, |
| "step": 93 |
| }, |
| { |
| "epoch": 2.8708860759493673, |
| "grad_norm": 0.4359077641154996, |
| "learning_rate": 1.4449151536240167e-05, |
| "loss": 0.4054, |
| "step": 94 |
| }, |
| { |
| "epoch": 2.90126582278481, |
| "grad_norm": 0.53763398646354, |
| "learning_rate": 1.4308637449409705e-05, |
| "loss": 0.3915, |
| "step": 95 |
| }, |
| { |
| "epoch": 2.9316455696202532, |
| "grad_norm": 0.5156253519333039, |
| "learning_rate": 1.4167070822259868e-05, |
| "loss": 0.4718, |
| "step": 96 |
| }, |
| { |
| "epoch": 2.962025316455696, |
| "grad_norm": 0.4317302365758521, |
| "learning_rate": 1.402448623755254e-05, |
| "loss": 0.3884, |
| "step": 97 |
| }, |
| { |
| "epoch": 2.992405063291139, |
| "grad_norm": 0.5536609198782718, |
| "learning_rate": 1.3880918526722497e-05, |
| "loss": 0.5473, |
| "step": 98 |
| }, |
| { |
| "epoch": 3.030379746835443, |
| "grad_norm": 0.5545528571506598, |
| "learning_rate": 1.3736402761368597e-05, |
| "loss": 0.3687, |
| "step": 99 |
| }, |
| { |
| "epoch": 3.060759493670886, |
| "grad_norm": 0.47540586403474266, |
| "learning_rate": 1.3590974244686248e-05, |
| "loss": 0.3472, |
| "step": 100 |
| }, |
| { |
| "epoch": 3.091139240506329, |
| "grad_norm": 0.515690188434661, |
| "learning_rate": 1.344466850284333e-05, |
| "loss": 0.3524, |
| "step": 101 |
| }, |
| { |
| "epoch": 3.1215189873417724, |
| "grad_norm": 0.5494008492815136, |
| "learning_rate": 1.3297521276301666e-05, |
| "loss": 0.3492, |
| "step": 102 |
| }, |
| { |
| "epoch": 3.151898734177215, |
| "grad_norm": 0.5613723693617616, |
| "learning_rate": 1.3149568511086104e-05, |
| "loss": 0.3475, |
| "step": 103 |
| }, |
| { |
| "epoch": 3.1822784810126583, |
| "grad_norm": 0.5459731136472022, |
| "learning_rate": 1.300084635000341e-05, |
| "loss": 0.3516, |
| "step": 104 |
| }, |
| { |
| "epoch": 3.212658227848101, |
| "grad_norm": 0.4928481785761879, |
| "learning_rate": 1.2851391123813075e-05, |
| "loss": 0.3367, |
| "step": 105 |
| }, |
| { |
| "epoch": 3.2430379746835443, |
| "grad_norm": 0.5784610134826562, |
| "learning_rate": 1.2701239342352223e-05, |
| "loss": 0.3711, |
| "step": 106 |
| }, |
| { |
| "epoch": 3.2734177215189875, |
| "grad_norm": 0.5334142496880356, |
| "learning_rate": 1.2550427685616767e-05, |
| "loss": 0.3632, |
| "step": 107 |
| }, |
| { |
| "epoch": 3.3037974683544302, |
| "grad_norm": 0.45364002557641886, |
| "learning_rate": 1.239899299480098e-05, |
| "loss": 0.3456, |
| "step": 108 |
| }, |
| { |
| "epoch": 3.3341772151898734, |
| "grad_norm": 0.5342722692936496, |
| "learning_rate": 1.2246972263297718e-05, |
| "loss": 0.3521, |
| "step": 109 |
| }, |
| { |
| "epoch": 3.3645569620253166, |
| "grad_norm": 0.45373690200799116, |
| "learning_rate": 1.2094402627661447e-05, |
| "loss": 0.3482, |
| "step": 110 |
| }, |
| { |
| "epoch": 3.3949367088607594, |
| "grad_norm": 0.5348735784600784, |
| "learning_rate": 1.1941321358536278e-05, |
| "loss": 0.347, |
| "step": 111 |
| }, |
| { |
| "epoch": 3.4253164556962026, |
| "grad_norm": 0.42289751752473753, |
| "learning_rate": 1.1787765851551296e-05, |
| "loss": 0.3523, |
| "step": 112 |
| }, |
| { |
| "epoch": 3.4556962025316453, |
| "grad_norm": 0.4787683178291163, |
| "learning_rate": 1.1633773618185302e-05, |
| "loss": 0.3406, |
| "step": 113 |
| }, |
| { |
| "epoch": 3.4860759493670885, |
| "grad_norm": 0.5114943466720573, |
| "learning_rate": 1.14793822766033e-05, |
| "loss": 0.3488, |
| "step": 114 |
| }, |
| { |
| "epoch": 3.5164556962025317, |
| "grad_norm": 0.4725452864287685, |
| "learning_rate": 1.132462954246688e-05, |
| "loss": 0.3417, |
| "step": 115 |
| }, |
| { |
| "epoch": 3.546835443037975, |
| "grad_norm": 0.42049657665542955, |
| "learning_rate": 1.1169553219720828e-05, |
| "loss": 0.3619, |
| "step": 116 |
| }, |
| { |
| "epoch": 3.5772151898734177, |
| "grad_norm": 0.4686354978703038, |
| "learning_rate": 1.1014191191358118e-05, |
| "loss": 0.357, |
| "step": 117 |
| }, |
| { |
| "epoch": 3.607594936708861, |
| "grad_norm": 0.4205375090893493, |
| "learning_rate": 1.085858141016566e-05, |
| "loss": 0.353, |
| "step": 118 |
| }, |
| { |
| "epoch": 3.6379746835443036, |
| "grad_norm": 0.48202907492458946, |
| "learning_rate": 1.070276188945293e-05, |
| "loss": 0.3616, |
| "step": 119 |
| }, |
| { |
| "epoch": 3.668354430379747, |
| "grad_norm": 0.43220350995269835, |
| "learning_rate": 1.0546770693765859e-05, |
| "loss": 0.3573, |
| "step": 120 |
| }, |
| { |
| "epoch": 3.69873417721519, |
| "grad_norm": 0.4056113080170557, |
| "learning_rate": 1.0390645929588197e-05, |
| "loss": 0.3527, |
| "step": 121 |
| }, |
| { |
| "epoch": 3.729113924050633, |
| "grad_norm": 0.38542910276260156, |
| "learning_rate": 1.0234425736032607e-05, |
| "loss": 0.3546, |
| "step": 122 |
| }, |
| { |
| "epoch": 3.759493670886076, |
| "grad_norm": 0.41172976094134517, |
| "learning_rate": 1.007814827552384e-05, |
| "loss": 0.3383, |
| "step": 123 |
| }, |
| { |
| "epoch": 3.7898734177215188, |
| "grad_norm": 0.3422368161340203, |
| "learning_rate": 9.92185172447616e-06, |
| "loss": 0.3325, |
| "step": 124 |
| }, |
| { |
| "epoch": 3.820253164556962, |
| "grad_norm": 0.4074688839173423, |
| "learning_rate": 9.765574263967397e-06, |
| "loss": 0.3437, |
| "step": 125 |
| }, |
| { |
| "epoch": 3.850632911392405, |
| "grad_norm": 0.34849002000782986, |
| "learning_rate": 9.609354070411807e-06, |
| "loss": 0.3607, |
| "step": 126 |
| }, |
| { |
| "epoch": 3.8810126582278484, |
| "grad_norm": 0.4029722649627817, |
| "learning_rate": 9.453229306234143e-06, |
| "loss": 0.3485, |
| "step": 127 |
| }, |
| { |
| "epoch": 3.911392405063291, |
| "grad_norm": 0.36141335416240483, |
| "learning_rate": 9.297238110547075e-06, |
| "loss": 0.3584, |
| "step": 128 |
| }, |
| { |
| "epoch": 3.9417721518987343, |
| "grad_norm": 0.37260661041502396, |
| "learning_rate": 9.14141858983434e-06, |
| "loss": 0.3416, |
| "step": 129 |
| }, |
| { |
| "epoch": 3.972151898734177, |
| "grad_norm": 0.32216912148540366, |
| "learning_rate": 8.985808808641883e-06, |
| "loss": 0.3286, |
| "step": 130 |
| }, |
| { |
| "epoch": 4.010126582278481, |
| "grad_norm": 0.5313463942187743, |
| "learning_rate": 8.830446780279175e-06, |
| "loss": 0.4363, |
| "step": 131 |
| }, |
| { |
| "epoch": 4.040506329113924, |
| "grad_norm": 0.4707890932151999, |
| "learning_rate": 8.675370457533122e-06, |
| "loss": 0.2624, |
| "step": 132 |
| }, |
| { |
| "epoch": 4.0708860759493675, |
| "grad_norm": 0.4041133834341088, |
| "learning_rate": 8.520617723396702e-06, |
| "loss": 0.2915, |
| "step": 133 |
| }, |
| { |
| "epoch": 4.10126582278481, |
| "grad_norm": 0.6431830303938959, |
| "learning_rate": 8.366226381814698e-06, |
| "loss": 0.2931, |
| "step": 134 |
| }, |
| { |
| "epoch": 4.131645569620253, |
| "grad_norm": 0.453099306620913, |
| "learning_rate": 8.212234148448708e-06, |
| "loss": 0.2703, |
| "step": 135 |
| }, |
| { |
| "epoch": 4.162025316455696, |
| "grad_norm": 0.41936391731434275, |
| "learning_rate": 8.058678641463724e-06, |
| "loss": 0.2771, |
| "step": 136 |
| }, |
| { |
| "epoch": 4.192405063291139, |
| "grad_norm": 0.4505732726130199, |
| "learning_rate": 7.905597372338558e-06, |
| "loss": 0.2985, |
| "step": 137 |
| }, |
| { |
| "epoch": 4.222784810126583, |
| "grad_norm": 0.4103967350890258, |
| "learning_rate": 7.753027736702283e-06, |
| "loss": 0.2758, |
| "step": 138 |
| }, |
| { |
| "epoch": 4.253164556962025, |
| "grad_norm": 0.3973605581799497, |
| "learning_rate": 7.601007005199022e-06, |
| "loss": 0.2813, |
| "step": 139 |
| }, |
| { |
| "epoch": 4.283544303797468, |
| "grad_norm": 0.4589582439764549, |
| "learning_rate": 7.449572314383237e-06, |
| "loss": 0.3221, |
| "step": 140 |
| }, |
| { |
| "epoch": 4.313924050632911, |
| "grad_norm": 0.4274255668686267, |
| "learning_rate": 7.298760657647779e-06, |
| "loss": 0.3047, |
| "step": 141 |
| }, |
| { |
| "epoch": 4.3443037974683545, |
| "grad_norm": 0.35657773449481966, |
| "learning_rate": 7.148608876186931e-06, |
| "loss": 0.2554, |
| "step": 142 |
| }, |
| { |
| "epoch": 4.374683544303798, |
| "grad_norm": 0.3620039303083875, |
| "learning_rate": 6.999153649996595e-06, |
| "loss": 0.2675, |
| "step": 143 |
| }, |
| { |
| "epoch": 4.405063291139241, |
| "grad_norm": 0.3989026378463725, |
| "learning_rate": 6.8504314889138956e-06, |
| "loss": 0.2979, |
| "step": 144 |
| }, |
| { |
| "epoch": 4.435443037974683, |
| "grad_norm": 0.3873971390047792, |
| "learning_rate": 6.702478723698336e-06, |
| "loss": 0.3001, |
| "step": 145 |
| }, |
| { |
| "epoch": 4.465822784810126, |
| "grad_norm": 0.3638331605928393, |
| "learning_rate": 6.555331497156671e-06, |
| "loss": 0.2889, |
| "step": 146 |
| }, |
| { |
| "epoch": 4.49620253164557, |
| "grad_norm": 0.3838325042159884, |
| "learning_rate": 6.4090257553137566e-06, |
| "loss": 0.2867, |
| "step": 147 |
| }, |
| { |
| "epoch": 4.526582278481013, |
| "grad_norm": 0.36671976978412335, |
| "learning_rate": 6.263597238631405e-06, |
| "loss": 0.28, |
| "step": 148 |
| }, |
| { |
| "epoch": 4.556962025316456, |
| "grad_norm": 0.3529133852651092, |
| "learning_rate": 6.119081473277502e-06, |
| "loss": 0.2734, |
| "step": 149 |
| }, |
| { |
| "epoch": 4.587341772151898, |
| "grad_norm": 0.36640770826259494, |
| "learning_rate": 5.975513762447465e-06, |
| "loss": 0.3092, |
| "step": 150 |
| }, |
| { |
| "epoch": 4.6177215189873415, |
| "grad_norm": 0.3100594224902787, |
| "learning_rate": 5.832929177740134e-06, |
| "loss": 0.2669, |
| "step": 151 |
| }, |
| { |
| "epoch": 4.648101265822785, |
| "grad_norm": 0.377115982323163, |
| "learning_rate": 5.6913625505902966e-06, |
| "loss": 0.3186, |
| "step": 152 |
| }, |
| { |
| "epoch": 4.678481012658228, |
| "grad_norm": 0.33216684017323544, |
| "learning_rate": 5.550848463759835e-06, |
| "loss": 0.286, |
| "step": 153 |
| }, |
| { |
| "epoch": 4.708860759493671, |
| "grad_norm": 0.40784371682031095, |
| "learning_rate": 5.411421242889643e-06, |
| "loss": 0.2838, |
| "step": 154 |
| }, |
| { |
| "epoch": 4.739240506329114, |
| "grad_norm": 0.3120609511166079, |
| "learning_rate": 5.273114948114346e-06, |
| "loss": 0.2676, |
| "step": 155 |
| }, |
| { |
| "epoch": 4.769620253164557, |
| "grad_norm": 0.35298019160098854, |
| "learning_rate": 5.135963365741892e-06, |
| "loss": 0.3151, |
| "step": 156 |
| }, |
| { |
| "epoch": 4.8, |
| "grad_norm": 0.3237915668775047, |
| "learning_rate": 5.000000000000003e-06, |
| "loss": 0.2875, |
| "step": 157 |
| }, |
| { |
| "epoch": 4.830379746835443, |
| "grad_norm": 0.3341363814264824, |
| "learning_rate": 4.865258064851579e-06, |
| "loss": 0.2942, |
| "step": 158 |
| }, |
| { |
| "epoch": 4.860759493670886, |
| "grad_norm": 0.36919767508786167, |
| "learning_rate": 4.731770475880995e-06, |
| "loss": 0.3048, |
| "step": 159 |
| }, |
| { |
| "epoch": 4.891139240506329, |
| "grad_norm": 0.3091966055573231, |
| "learning_rate": 4.599569842253244e-06, |
| "loss": 0.2449, |
| "step": 160 |
| }, |
| { |
| "epoch": 4.921518987341772, |
| "grad_norm": 0.30134927381043464, |
| "learning_rate": 4.468688458748006e-06, |
| "loss": 0.2842, |
| "step": 161 |
| }, |
| { |
| "epoch": 4.951898734177215, |
| "grad_norm": 0.33724978042903153, |
| "learning_rate": 4.339158297870469e-06, |
| "loss": 0.2991, |
| "step": 162 |
| }, |
| { |
| "epoch": 4.982278481012658, |
| "grad_norm": 0.37910349336098464, |
| "learning_rate": 4.211011002040885e-06, |
| "loss": 0.3041, |
| "step": 163 |
| }, |
| { |
| "epoch": 5.020253164556962, |
| "grad_norm": 0.4523545633061669, |
| "learning_rate": 4.084277875864776e-06, |
| "loss": 0.309, |
| "step": 164 |
| }, |
| { |
| "epoch": 5.050632911392405, |
| "grad_norm": 0.4730924017069626, |
| "learning_rate": 3.958989878485644e-06, |
| "loss": 0.2502, |
| "step": 165 |
| }, |
| { |
| "epoch": 5.0810126582278485, |
| "grad_norm": 0.32201262114708323, |
| "learning_rate": 3.83517761602209e-06, |
| "loss": 0.2423, |
| "step": 166 |
| }, |
| { |
| "epoch": 5.111392405063291, |
| "grad_norm": 0.3821460864301905, |
| "learning_rate": 3.712871334091154e-06, |
| "loss": 0.2684, |
| "step": 167 |
| }, |
| { |
| "epoch": 5.141772151898734, |
| "grad_norm": 0.5256405500408324, |
| "learning_rate": 3.592100910419738e-06, |
| "loss": 0.2356, |
| "step": 168 |
| }, |
| { |
| "epoch": 5.172151898734177, |
| "grad_norm": 0.4491399278571054, |
| "learning_rate": 3.4728958475459052e-06, |
| "loss": 0.2432, |
| "step": 169 |
| }, |
| { |
| "epoch": 5.2025316455696204, |
| "grad_norm": 0.3335828076072433, |
| "learning_rate": 3.355285265611784e-06, |
| "loss": 0.2306, |
| "step": 170 |
| }, |
| { |
| "epoch": 5.232911392405064, |
| "grad_norm": 0.35927027153636726, |
| "learning_rate": 3.2392978952499553e-06, |
| "loss": 0.2499, |
| "step": 171 |
| }, |
| { |
| "epoch": 5.263291139240506, |
| "grad_norm": 0.37950495707252624, |
| "learning_rate": 3.1249620705649417e-06, |
| "loss": 0.268, |
| "step": 172 |
| }, |
| { |
| "epoch": 5.293670886075949, |
| "grad_norm": 0.38081595421286196, |
| "learning_rate": 3.0123057222115835e-06, |
| "loss": 0.2262, |
| "step": 173 |
| }, |
| { |
| "epoch": 5.324050632911392, |
| "grad_norm": 0.3490511429252917, |
| "learning_rate": 2.9013563705719673e-06, |
| "loss": 0.2461, |
| "step": 174 |
| }, |
| { |
| "epoch": 5.3544303797468356, |
| "grad_norm": 0.3185986215642651, |
| "learning_rate": 2.7921411190325753e-06, |
| "loss": 0.2493, |
| "step": 175 |
| }, |
| { |
| "epoch": 5.384810126582279, |
| "grad_norm": 0.3417114934255241, |
| "learning_rate": 2.6846866473633126e-06, |
| "loss": 0.2504, |
| "step": 176 |
| }, |
| { |
| "epoch": 5.415189873417722, |
| "grad_norm": 0.3607610625513573, |
| "learning_rate": 2.579019205199992e-06, |
| "loss": 0.2463, |
| "step": 177 |
| }, |
| { |
| "epoch": 5.445569620253164, |
| "grad_norm": 0.4170005526871216, |
| "learning_rate": 2.4751646056319334e-06, |
| "loss": 0.2606, |
| "step": 178 |
| }, |
| { |
| "epoch": 5.4759493670886075, |
| "grad_norm": 0.3233946450979762, |
| "learning_rate": 2.373148218896182e-06, |
| "loss": 0.2228, |
| "step": 179 |
| }, |
| { |
| "epoch": 5.506329113924051, |
| "grad_norm": 0.3177352084454099, |
| "learning_rate": 2.2729949661798876e-06, |
| "loss": 0.2477, |
| "step": 180 |
| }, |
| { |
| "epoch": 5.536708860759494, |
| "grad_norm": 0.3141459292831218, |
| "learning_rate": 2.174729313532433e-06, |
| "loss": 0.2358, |
| "step": 181 |
| }, |
| { |
| "epoch": 5.567088607594937, |
| "grad_norm": 0.3133255531748914, |
| "learning_rate": 2.078375265888707e-06, |
| "loss": 0.2372, |
| "step": 182 |
| }, |
| { |
| "epoch": 5.597468354430379, |
| "grad_norm": 0.33090769348276167, |
| "learning_rate": 1.9839563612050273e-06, |
| "loss": 0.2578, |
| "step": 183 |
| }, |
| { |
| "epoch": 5.627848101265823, |
| "grad_norm": 0.30752461939556636, |
| "learning_rate": 1.8914956647091497e-06, |
| "loss": 0.2225, |
| "step": 184 |
| }, |
| { |
| "epoch": 5.658227848101266, |
| "grad_norm": 0.3212700019589481, |
| "learning_rate": 1.8010157632657544e-06, |
| "loss": 0.2503, |
| "step": 185 |
| }, |
| { |
| "epoch": 5.688607594936709, |
| "grad_norm": 0.30346268594769565, |
| "learning_rate": 1.7125387598587862e-06, |
| "loss": 0.2596, |
| "step": 186 |
| }, |
| { |
| "epoch": 5.718987341772152, |
| "grad_norm": 0.3006756556681579, |
| "learning_rate": 1.6260862681919965e-06, |
| "loss": 0.2301, |
| "step": 187 |
| }, |
| { |
| "epoch": 5.749367088607595, |
| "grad_norm": 0.31423052889864206, |
| "learning_rate": 1.5416794074090258e-06, |
| "loss": 0.2429, |
| "step": 188 |
| }, |
| { |
| "epoch": 5.779746835443038, |
| "grad_norm": 0.30980107031353704, |
| "learning_rate": 1.459338796934293e-06, |
| "loss": 0.2291, |
| "step": 189 |
| }, |
| { |
| "epoch": 5.810126582278481, |
| "grad_norm": 0.33434803432375376, |
| "learning_rate": 1.3790845514359363e-06, |
| "loss": 0.2571, |
| "step": 190 |
| }, |
| { |
| "epoch": 5.840506329113924, |
| "grad_norm": 0.292122548663931, |
| "learning_rate": 1.300936275912098e-06, |
| "loss": 0.218, |
| "step": 191 |
| }, |
| { |
| "epoch": 5.870886075949367, |
| "grad_norm": 0.297080299478135, |
| "learning_rate": 1.224913060901688e-06, |
| "loss": 0.2215, |
| "step": 192 |
| }, |
| { |
| "epoch": 5.9012658227848105, |
| "grad_norm": 0.30222638091145526, |
| "learning_rate": 1.1510334778208332e-06, |
| "loss": 0.2474, |
| "step": 193 |
| }, |
| { |
| "epoch": 5.931645569620253, |
| "grad_norm": 0.3085881315778646, |
| "learning_rate": 1.0793155744261352e-06, |
| "loss": 0.2553, |
| "step": 194 |
| }, |
| { |
| "epoch": 5.962025316455696, |
| "grad_norm": 0.28842240439901007, |
| "learning_rate": 1.0097768704058542e-06, |
| "loss": 0.2308, |
| "step": 195 |
| }, |
| { |
| "epoch": 5.992405063291139, |
| "grad_norm": 0.38844373768880613, |
| "learning_rate": 9.424343531000968e-07, |
| "loss": 0.3175, |
| "step": 196 |
| }, |
| { |
| "epoch": 6.030379746835443, |
| "grad_norm": 0.3756548730251041, |
| "learning_rate": 8.773044733510338e-07, |
| "loss": 0.2141, |
| "step": 197 |
| }, |
| { |
| "epoch": 6.060759493670886, |
| "grad_norm": 0.36880100887113676, |
| "learning_rate": 8.144031414842012e-07, |
| "loss": 0.2378, |
| "step": 198 |
| }, |
| { |
| "epoch": 6.091139240506329, |
| "grad_norm": 0.3740777777425036, |
| "learning_rate": 7.537457234218271e-07, |
| "loss": 0.2105, |
| "step": 199 |
| }, |
| { |
| "epoch": 6.121518987341772, |
| "grad_norm": 0.2916833710946818, |
| "learning_rate": 6.953470369291349e-07, |
| "loss": 0.2225, |
| "step": 200 |
| }, |
| { |
| "epoch": 6.151898734177215, |
| "grad_norm": 0.2877930871488456, |
| "learning_rate": 6.392213479945852e-07, |
| "loss": 0.2296, |
| "step": 201 |
| }, |
| { |
| "epoch": 6.182278481012658, |
| "grad_norm": 0.28872553378838456, |
| "learning_rate": 5.853823673448877e-07, |
| "loss": 0.2133, |
| "step": 202 |
| }, |
| { |
| "epoch": 6.2126582278481015, |
| "grad_norm": 0.2954608978578038, |
| "learning_rate": 5.33843247095659e-07, |
| "loss": 0.2275, |
| "step": 203 |
| }, |
| { |
| "epoch": 6.243037974683545, |
| "grad_norm": 0.30402408500591077, |
| "learning_rate": 4.846165775385459e-07, |
| "loss": 0.2442, |
| "step": 204 |
| }, |
| { |
| "epoch": 6.273417721518987, |
| "grad_norm": 0.30076848845189774, |
| "learning_rate": 4.3771438406559173e-07, |
| "loss": 0.2196, |
| "step": 205 |
| }, |
| { |
| "epoch": 6.30379746835443, |
| "grad_norm": 0.3416948675216863, |
| "learning_rate": 3.931481242315993e-07, |
| "loss": 0.227, |
| "step": 206 |
| }, |
| { |
| "epoch": 6.334177215189873, |
| "grad_norm": 0.30696108823223234, |
| "learning_rate": 3.5092868495520294e-07, |
| "loss": 0.2125, |
| "step": 207 |
| }, |
| { |
| "epoch": 6.364556962025317, |
| "grad_norm": 0.336665786751688, |
| "learning_rate": 3.110663798593616e-07, |
| "loss": 0.244, |
| "step": 208 |
| }, |
| { |
| "epoch": 6.39493670886076, |
| "grad_norm": 0.2882724454027941, |
| "learning_rate": 2.735709467518699e-07, |
| "loss": 0.2124, |
| "step": 209 |
| }, |
| { |
| "epoch": 6.425316455696202, |
| "grad_norm": 0.29760382515379885, |
| "learning_rate": 2.384515452465475e-07, |
| "loss": 0.2194, |
| "step": 210 |
| }, |
| { |
| "epoch": 6.455696202531645, |
| "grad_norm": 0.295342001002325, |
| "learning_rate": 2.0571675452567997e-07, |
| "loss": 0.2325, |
| "step": 211 |
| }, |
| { |
| "epoch": 6.4860759493670885, |
| "grad_norm": 0.2835777661113264, |
| "learning_rate": 1.7537457124423896e-07, |
| "loss": 0.2102, |
| "step": 212 |
| }, |
| { |
| "epoch": 6.516455696202532, |
| "grad_norm": 0.27479736988784964, |
| "learning_rate": 1.474324075764111e-07, |
| "loss": 0.2152, |
| "step": 213 |
| }, |
| { |
| "epoch": 6.546835443037975, |
| "grad_norm": 0.27910917975904814, |
| "learning_rate": 1.2189708940490653e-07, |
| "loss": 0.2158, |
| "step": 214 |
| }, |
| { |
| "epoch": 6.577215189873417, |
| "grad_norm": 0.2828526623147071, |
| "learning_rate": 9.877485465349057e-08, |
| "loss": 0.2276, |
| "step": 215 |
| }, |
| { |
| "epoch": 6.6075949367088604, |
| "grad_norm": 0.28225492331747004, |
| "learning_rate": 7.807135176314707e-08, |
| "loss": 0.2327, |
| "step": 216 |
| }, |
| { |
| "epoch": 6.637974683544304, |
| "grad_norm": 0.28194784767364817, |
| "learning_rate": 5.979163831223988e-08, |
| "loss": 0.23, |
| "step": 217 |
| }, |
| { |
| "epoch": 6.668354430379747, |
| "grad_norm": 0.28029183381258665, |
| "learning_rate": 4.394017978101905e-08, |
| "loss": 0.2219, |
| "step": 218 |
| }, |
| { |
| "epoch": 6.69873417721519, |
| "grad_norm": 0.28779964462670415, |
| "learning_rate": 3.0520848460765525e-08, |
| "loss": 0.2191, |
| "step": 219 |
| }, |
| { |
| "epoch": 6.729113924050633, |
| "grad_norm": 0.2873186242341778, |
| "learning_rate": 1.9536922507841227e-08, |
| "loss": 0.2228, |
| "step": 220 |
| }, |
| { |
| "epoch": 6.759493670886076, |
| "grad_norm": 0.29578442444567216, |
| "learning_rate": 1.099108514288627e-08, |
| "loss": 0.2269, |
| "step": 221 |
| }, |
| { |
| "epoch": 6.789873417721519, |
| "grad_norm": 0.27426473251329325, |
| "learning_rate": 4.885423995341088e-09, |
| "loss": 0.2095, |
| "step": 222 |
| }, |
| { |
| "epoch": 6.820253164556962, |
| "grad_norm": 0.27895748053180836, |
| "learning_rate": 1.2214305934699078e-09, |
| "loss": 0.2283, |
| "step": 223 |
| }, |
| { |
| "epoch": 6.850632911392405, |
| "grad_norm": 0.2729179279839403, |
| "learning_rate": 0.0, |
| "loss": 0.222, |
| "step": 224 |
| }, |
| { |
| "epoch": 6.850632911392405, |
| "step": 224, |
| "total_flos": 5.277578878380933e+17, |
| "train_loss": 0.39017832059679286, |
| "train_runtime": 21047.8381, |
| "train_samples_per_second": 1.051, |
| "train_steps_per_second": 0.011 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 224, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 7, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 5.277578878380933e+17, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|