| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.09386407091952025, | |
| "eval_steps": 150, | |
| "global_step": 270, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0006952894142186686, | |
| "grad_norm": 5.413117383066536, | |
| "learning_rate": 6.944444444444445e-07, | |
| "loss": 2.3653, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0013905788284373371, | |
| "grad_norm": 5.092870612337868, | |
| "learning_rate": 1.388888888888889e-06, | |
| "loss": 2.5001, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.0020858682426560054, | |
| "grad_norm": 3.6285625961711943, | |
| "learning_rate": 2.0833333333333334e-06, | |
| "loss": 2.3759, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.0027811576568746743, | |
| "grad_norm": 2.5025911526151075, | |
| "learning_rate": 2.777777777777778e-06, | |
| "loss": 1.944, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.0034764470710933427, | |
| "grad_norm": 3.077266049542496, | |
| "learning_rate": 3.4722222222222224e-06, | |
| "loss": 2.2504, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.004171736485312011, | |
| "grad_norm": 4.068243939187174, | |
| "learning_rate": 4.166666666666667e-06, | |
| "loss": 2.0637, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.00486702589953068, | |
| "grad_norm": 3.511444738830971, | |
| "learning_rate": 4.861111111111111e-06, | |
| "loss": 2.3604, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.0055623153137493485, | |
| "grad_norm": 5.925744892256934, | |
| "learning_rate": 5.555555555555556e-06, | |
| "loss": 2.6528, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.0062576047279680165, | |
| "grad_norm": 2.7950114871483405, | |
| "learning_rate": 6.25e-06, | |
| "loss": 2.3229, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.006952894142186685, | |
| "grad_norm": 6.586211843766182, | |
| "learning_rate": 6.944444444444445e-06, | |
| "loss": 2.342, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0076481835564053535, | |
| "grad_norm": 4.902839955269193, | |
| "learning_rate": 7.63888888888889e-06, | |
| "loss": 2.4188, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.008343472970624021, | |
| "grad_norm": 4.257062809771645, | |
| "learning_rate": 8.333333333333334e-06, | |
| "loss": 1.7957, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.009038762384842691, | |
| "grad_norm": 4.460352004615699, | |
| "learning_rate": 9.027777777777777e-06, | |
| "loss": 2.0726, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.00973405179906136, | |
| "grad_norm": 5.858061506133739, | |
| "learning_rate": 9.722222222222223e-06, | |
| "loss": 2.0455, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.010429341213280027, | |
| "grad_norm": 4.331946668100709, | |
| "learning_rate": 1.0416666666666668e-05, | |
| "loss": 1.7645, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.011124630627498697, | |
| "grad_norm": 5.428742204187391, | |
| "learning_rate": 1.1111111111111112e-05, | |
| "loss": 1.7446, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.011819920041717365, | |
| "grad_norm": 2.485909286541028, | |
| "learning_rate": 1.1805555555555555e-05, | |
| "loss": 1.5885, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.012515209455936033, | |
| "grad_norm": 3.2602949308730222, | |
| "learning_rate": 1.25e-05, | |
| "loss": 1.1014, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.013210498870154701, | |
| "grad_norm": 4.962187747415964, | |
| "learning_rate": 1.3194444444444446e-05, | |
| "loss": 1.3588, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.01390578828437337, | |
| "grad_norm": 7.627756894198461, | |
| "learning_rate": 1.388888888888889e-05, | |
| "loss": 1.4014, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.014601077698592039, | |
| "grad_norm": 2.821848388410092, | |
| "learning_rate": 1.4583333333333335e-05, | |
| "loss": 1.0959, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.015296367112810707, | |
| "grad_norm": 10.989320944549025, | |
| "learning_rate": 1.527777777777778e-05, | |
| "loss": 1.739, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.015991656527029375, | |
| "grad_norm": 4.300936602591115, | |
| "learning_rate": 1.597222222222222e-05, | |
| "loss": 1.1078, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.016686945941248043, | |
| "grad_norm": 3.7539663261019856, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 1.2701, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.017382235355466714, | |
| "grad_norm": 2.589248169352173, | |
| "learning_rate": 1.736111111111111e-05, | |
| "loss": 1.4452, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.018077524769685382, | |
| "grad_norm": 3.6679301322156177, | |
| "learning_rate": 1.8055555555555555e-05, | |
| "loss": 1.4243, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.01877281418390405, | |
| "grad_norm": 2.0885660923860074, | |
| "learning_rate": 1.8750000000000002e-05, | |
| "loss": 1.4453, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.01946810359812272, | |
| "grad_norm": 2.955353237610474, | |
| "learning_rate": 1.9444444444444445e-05, | |
| "loss": 1.5388, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.020163393012341387, | |
| "grad_norm": 3.0527006398487018, | |
| "learning_rate": 2.013888888888889e-05, | |
| "loss": 1.4043, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.020858682426560055, | |
| "grad_norm": 1.9862208864154767, | |
| "learning_rate": 2.0833333333333336e-05, | |
| "loss": 1.1007, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.021553971840778723, | |
| "grad_norm": 2.347361178472164, | |
| "learning_rate": 2.152777777777778e-05, | |
| "loss": 0.9291, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.022249261254997394, | |
| "grad_norm": 2.4812993223105995, | |
| "learning_rate": 2.2222222222222223e-05, | |
| "loss": 0.9592, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.022944550669216062, | |
| "grad_norm": 1.639333831845777, | |
| "learning_rate": 2.2916666666666667e-05, | |
| "loss": 1.0645, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.02363984008343473, | |
| "grad_norm": 2.92858351082494, | |
| "learning_rate": 2.361111111111111e-05, | |
| "loss": 1.4269, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.024335129497653398, | |
| "grad_norm": 2.8503434812871604, | |
| "learning_rate": 2.4305555555555558e-05, | |
| "loss": 0.7829, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.025030418911872066, | |
| "grad_norm": 2.4875590459354107, | |
| "learning_rate": 2.5e-05, | |
| "loss": 0.9733, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.025725708326090734, | |
| "grad_norm": 5.2567545525905075, | |
| "learning_rate": 2.5694444444444445e-05, | |
| "loss": 1.3559, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.026420997740309402, | |
| "grad_norm": 4.68745753567611, | |
| "learning_rate": 2.6388888888888892e-05, | |
| "loss": 1.1499, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.027116287154528074, | |
| "grad_norm": 2.810345453706711, | |
| "learning_rate": 2.7083333333333332e-05, | |
| "loss": 0.8636, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.02781157656874674, | |
| "grad_norm": 3.0144493626195388, | |
| "learning_rate": 2.777777777777778e-05, | |
| "loss": 0.9274, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.02850686598296541, | |
| "grad_norm": 2.3263036535508523, | |
| "learning_rate": 2.8472222222222223e-05, | |
| "loss": 1.3979, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.029202155397184078, | |
| "grad_norm": 1.6009019341419857, | |
| "learning_rate": 2.916666666666667e-05, | |
| "loss": 0.9039, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.029897444811402746, | |
| "grad_norm": 2.0637506891442294, | |
| "learning_rate": 2.9861111111111113e-05, | |
| "loss": 1.5187, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.030592734225621414, | |
| "grad_norm": 1.5533001268209932, | |
| "learning_rate": 3.055555555555556e-05, | |
| "loss": 0.8937, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.03128802363984008, | |
| "grad_norm": 1.9974857545819733, | |
| "learning_rate": 3.125e-05, | |
| "loss": 0.8218, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.03198331305405875, | |
| "grad_norm": 1.871899331408953, | |
| "learning_rate": 3.194444444444444e-05, | |
| "loss": 1.2451, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.03267860246827742, | |
| "grad_norm": 1.591601830348497, | |
| "learning_rate": 3.263888888888889e-05, | |
| "loss": 0.7485, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.033373891882496086, | |
| "grad_norm": 2.404166647711005, | |
| "learning_rate": 3.3333333333333335e-05, | |
| "loss": 1.1587, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.03406918129671476, | |
| "grad_norm": 1.581080788392888, | |
| "learning_rate": 3.402777777777778e-05, | |
| "loss": 0.9578, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.03476447071093343, | |
| "grad_norm": 2.1551207338771547, | |
| "learning_rate": 3.472222222222222e-05, | |
| "loss": 1.1305, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.0354597601251521, | |
| "grad_norm": 4.2246272812704, | |
| "learning_rate": 3.541666666666667e-05, | |
| "loss": 1.04, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.036155049539370765, | |
| "grad_norm": 2.5526898161765588, | |
| "learning_rate": 3.611111111111111e-05, | |
| "loss": 1.0743, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.03685033895358943, | |
| "grad_norm": 1.9784815137623597, | |
| "learning_rate": 3.6805555555555556e-05, | |
| "loss": 0.9667, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.0375456283678081, | |
| "grad_norm": 1.9131648200880944, | |
| "learning_rate": 3.7500000000000003e-05, | |
| "loss": 0.8951, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.03824091778202677, | |
| "grad_norm": 3.9405296351174575, | |
| "learning_rate": 3.8194444444444444e-05, | |
| "loss": 1.222, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.03893620719624544, | |
| "grad_norm": 13.764861545850291, | |
| "learning_rate": 3.888888888888889e-05, | |
| "loss": 1.0497, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.039631496610464105, | |
| "grad_norm": 3.998123122175411, | |
| "learning_rate": 3.958333333333333e-05, | |
| "loss": 1.0901, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.04032678602468277, | |
| "grad_norm": 3.126193921423756, | |
| "learning_rate": 4.027777777777778e-05, | |
| "loss": 1.1143, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.04102207543890144, | |
| "grad_norm": 2.7839926692610613, | |
| "learning_rate": 4.0972222222222225e-05, | |
| "loss": 1.1637, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.04171736485312011, | |
| "grad_norm": 2.591162621161276, | |
| "learning_rate": 4.166666666666667e-05, | |
| "loss": 1.0624, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.04241265426733878, | |
| "grad_norm": 1.4930703711545332, | |
| "learning_rate": 4.236111111111111e-05, | |
| "loss": 1.0239, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.043107943681557445, | |
| "grad_norm": 4.355150070532966, | |
| "learning_rate": 4.305555555555556e-05, | |
| "loss": 1.266, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.04380323309577612, | |
| "grad_norm": 1.8285608876277135, | |
| "learning_rate": 4.375e-05, | |
| "loss": 1.0667, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.04449852250999479, | |
| "grad_norm": 2.1848832742617055, | |
| "learning_rate": 4.4444444444444447e-05, | |
| "loss": 0.7549, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.045193811924213456, | |
| "grad_norm": 3.547857947451226, | |
| "learning_rate": 4.5138888888888894e-05, | |
| "loss": 0.7974, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.045889101338432124, | |
| "grad_norm": 3.372457028128184, | |
| "learning_rate": 4.5833333333333334e-05, | |
| "loss": 1.0482, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.04658439075265079, | |
| "grad_norm": 2.2871138548091703, | |
| "learning_rate": 4.652777777777778e-05, | |
| "loss": 0.8246, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.04727968016686946, | |
| "grad_norm": 1.7489408054743605, | |
| "learning_rate": 4.722222222222222e-05, | |
| "loss": 1.0011, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.04797496958108813, | |
| "grad_norm": 1.944650349232646, | |
| "learning_rate": 4.791666666666667e-05, | |
| "loss": 0.7828, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.048670258995306796, | |
| "grad_norm": 2.321619459307342, | |
| "learning_rate": 4.8611111111111115e-05, | |
| "loss": 1.02, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.049365548409525464, | |
| "grad_norm": 5.317732559595606, | |
| "learning_rate": 4.930555555555556e-05, | |
| "loss": 0.9539, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.05006083782374413, | |
| "grad_norm": 1.5887292172279854, | |
| "learning_rate": 5e-05, | |
| "loss": 0.7961, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.0507561272379628, | |
| "grad_norm": 2.9987019111854964, | |
| "learning_rate": 5.069444444444444e-05, | |
| "loss": 1.0018, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.05145141665218147, | |
| "grad_norm": 5.63878906132749, | |
| "learning_rate": 5.138888888888889e-05, | |
| "loss": 0.9278, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.052146706066400136, | |
| "grad_norm": 2.4849685204332834, | |
| "learning_rate": 5.208333333333334e-05, | |
| "loss": 0.7531, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.052146706066400136, | |
| "eval_loss": 0.9286500215530396, | |
| "eval_runtime": 711.2619, | |
| "eval_samples_per_second": 6.808, | |
| "eval_steps_per_second": 0.214, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.052841995480618804, | |
| "grad_norm": 3.4927981580288776, | |
| "learning_rate": 5.2777777777777784e-05, | |
| "loss": 0.7422, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.05353728489483748, | |
| "grad_norm": 2.6593724943984682, | |
| "learning_rate": 5.3472222222222224e-05, | |
| "loss": 0.7762, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.05423257430905615, | |
| "grad_norm": 2.99709354088536, | |
| "learning_rate": 5.4166666666666664e-05, | |
| "loss": 0.8658, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.054927863723274815, | |
| "grad_norm": 3.9196361623391414, | |
| "learning_rate": 5.486111111111112e-05, | |
| "loss": 1.1783, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.05562315313749348, | |
| "grad_norm": 3.022562685608673, | |
| "learning_rate": 5.555555555555556e-05, | |
| "loss": 1.0602, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.05631844255171215, | |
| "grad_norm": 3.292942684053579, | |
| "learning_rate": 5.6250000000000005e-05, | |
| "loss": 0.8561, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.05701373196593082, | |
| "grad_norm": 2.397775023338686, | |
| "learning_rate": 5.6944444444444445e-05, | |
| "loss": 0.9561, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.05770902138014949, | |
| "grad_norm": 3.0546714312119643, | |
| "learning_rate": 5.7638888888888886e-05, | |
| "loss": 0.8274, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.058404310794368156, | |
| "grad_norm": 2.4021883380894393, | |
| "learning_rate": 5.833333333333334e-05, | |
| "loss": 0.8492, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.059099600208586824, | |
| "grad_norm": 2.6052829770116293, | |
| "learning_rate": 5.902777777777778e-05, | |
| "loss": 1.1005, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.05979488962280549, | |
| "grad_norm": 1.6838192892320467, | |
| "learning_rate": 5.972222222222223e-05, | |
| "loss": 1.0157, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.06049017903702416, | |
| "grad_norm": 3.98880214871885, | |
| "learning_rate": 6.041666666666667e-05, | |
| "loss": 0.8136, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.06118546845124283, | |
| "grad_norm": 4.81494345341073, | |
| "learning_rate": 6.111111111111112e-05, | |
| "loss": 0.9475, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.061880757865461496, | |
| "grad_norm": 1.9926278890091862, | |
| "learning_rate": 6.180555555555556e-05, | |
| "loss": 0.6549, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.06257604727968016, | |
| "grad_norm": 1.750353030728397, | |
| "learning_rate": 6.25e-05, | |
| "loss": 1.1442, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.06327133669389884, | |
| "grad_norm": 4.77249782692129, | |
| "learning_rate": 6.319444444444444e-05, | |
| "loss": 1.1927, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.0639666261081175, | |
| "grad_norm": 1.866901788617278, | |
| "learning_rate": 6.388888888888888e-05, | |
| "loss": 1.0032, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.06466191552233617, | |
| "grad_norm": 1.7495859751833545, | |
| "learning_rate": 6.458333333333334e-05, | |
| "loss": 0.8439, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.06535720493655484, | |
| "grad_norm": 2.971674018184174, | |
| "learning_rate": 6.527777777777778e-05, | |
| "loss": 0.9817, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.06605249435077351, | |
| "grad_norm": 2.3753292673540165, | |
| "learning_rate": 6.597222222222223e-05, | |
| "loss": 0.9919, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.06674778376499217, | |
| "grad_norm": 2.7019148741557744, | |
| "learning_rate": 6.666666666666667e-05, | |
| "loss": 1.1442, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.06744307317921085, | |
| "grad_norm": 1.8670113287712482, | |
| "learning_rate": 6.736111111111112e-05, | |
| "loss": 0.8905, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.06813836259342952, | |
| "grad_norm": 1.4199145680760579, | |
| "learning_rate": 6.805555555555556e-05, | |
| "loss": 0.7223, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.06883365200764818, | |
| "grad_norm": 3.894200902880186, | |
| "learning_rate": 6.875e-05, | |
| "loss": 0.9005, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.06952894142186686, | |
| "grad_norm": 3.2710376491241955, | |
| "learning_rate": 6.944444444444444e-05, | |
| "loss": 1.1605, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.07022423083608552, | |
| "grad_norm": 1.6459754670035065, | |
| "learning_rate": 7.013888888888888e-05, | |
| "loss": 0.8551, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.0709195202503042, | |
| "grad_norm": 4.361031640374508, | |
| "learning_rate": 7.083333333333334e-05, | |
| "loss": 0.7007, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.07161480966452285, | |
| "grad_norm": 3.573741549123141, | |
| "learning_rate": 7.152777777777778e-05, | |
| "loss": 1.1396, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.07231009907874153, | |
| "grad_norm": 3.296990311359108, | |
| "learning_rate": 7.222222222222222e-05, | |
| "loss": 0.8695, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.07300538849296019, | |
| "grad_norm": 6.769659823038884, | |
| "learning_rate": 7.291666666666667e-05, | |
| "loss": 1.0511, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.07370067790717887, | |
| "grad_norm": 1.6695233666860303, | |
| "learning_rate": 7.361111111111111e-05, | |
| "loss": 0.8174, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.07439596732139753, | |
| "grad_norm": 1.3543755216281146, | |
| "learning_rate": 7.430555555555557e-05, | |
| "loss": 0.7137, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.0750912567356162, | |
| "grad_norm": 4.861063813937456, | |
| "learning_rate": 7.500000000000001e-05, | |
| "loss": 1.0132, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.07578654614983486, | |
| "grad_norm": 5.715913176528681, | |
| "learning_rate": 7.569444444444445e-05, | |
| "loss": 1.1657, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.07648183556405354, | |
| "grad_norm": 4.0193839358302235, | |
| "learning_rate": 7.638888888888889e-05, | |
| "loss": 1.0547, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.0771771249782722, | |
| "grad_norm": 2.26849134517291, | |
| "learning_rate": 7.708333333333334e-05, | |
| "loss": 0.9073, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.07787241439249087, | |
| "grad_norm": 4.570943697810998, | |
| "learning_rate": 7.777777777777778e-05, | |
| "loss": 1.3726, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.07856770380670955, | |
| "grad_norm": 1.1942436910880105, | |
| "learning_rate": 7.847222222222222e-05, | |
| "loss": 0.8146, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.07926299322092821, | |
| "grad_norm": 2.111849588751211, | |
| "learning_rate": 7.916666666666666e-05, | |
| "loss": 1.1157, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.07995828263514689, | |
| "grad_norm": 1.0283088880069582, | |
| "learning_rate": 7.986111111111112e-05, | |
| "loss": 0.767, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.08065357204936555, | |
| "grad_norm": 1.2834055069208525, | |
| "learning_rate": 8.055555555555556e-05, | |
| "loss": 0.6625, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.08134886146358422, | |
| "grad_norm": 1.3772942873595098, | |
| "learning_rate": 8.125000000000001e-05, | |
| "loss": 0.8065, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.08204415087780288, | |
| "grad_norm": 1.5113456549735176, | |
| "learning_rate": 8.194444444444445e-05, | |
| "loss": 0.8606, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.08273944029202156, | |
| "grad_norm": 1.5765846352838255, | |
| "learning_rate": 8.263888888888889e-05, | |
| "loss": 0.8335, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.08343472970624022, | |
| "grad_norm": 2.4873462478329404, | |
| "learning_rate": 8.333333333333334e-05, | |
| "loss": 0.9705, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.0841300191204589, | |
| "grad_norm": 1.2369219925635513, | |
| "learning_rate": 8.402777777777778e-05, | |
| "loss": 0.6061, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.08482530853467755, | |
| "grad_norm": 2.542132212473201, | |
| "learning_rate": 8.472222222222222e-05, | |
| "loss": 0.9142, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.08552059794889623, | |
| "grad_norm": 2.0301734217803022, | |
| "learning_rate": 8.541666666666666e-05, | |
| "loss": 0.8997, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.08621588736311489, | |
| "grad_norm": 1.8605316982945626, | |
| "learning_rate": 8.611111111111112e-05, | |
| "loss": 1.005, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.08691117677733357, | |
| "grad_norm": 1.193555257951713, | |
| "learning_rate": 8.680555555555556e-05, | |
| "loss": 0.8617, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.08760646619155224, | |
| "grad_norm": 1.243815428863678, | |
| "learning_rate": 8.75e-05, | |
| "loss": 0.6261, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.0883017556057709, | |
| "grad_norm": 1.6487754861704442, | |
| "learning_rate": 8.819444444444445e-05, | |
| "loss": 0.9219, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.08899704501998958, | |
| "grad_norm": 1.1768410857322613, | |
| "learning_rate": 8.888888888888889e-05, | |
| "loss": 0.8563, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.08969233443420824, | |
| "grad_norm": 1.1155265191420587, | |
| "learning_rate": 8.958333333333335e-05, | |
| "loss": 0.998, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.09038762384842691, | |
| "grad_norm": 1.4432456616674065, | |
| "learning_rate": 9.027777777777779e-05, | |
| "loss": 0.8755, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.09108291326264557, | |
| "grad_norm": 1.6013486668654413, | |
| "learning_rate": 9.097222222222223e-05, | |
| "loss": 0.6998, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.09177820267686425, | |
| "grad_norm": 0.7869968770186737, | |
| "learning_rate": 9.166666666666667e-05, | |
| "loss": 0.7574, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.09247349209108291, | |
| "grad_norm": 2.1117903903864566, | |
| "learning_rate": 9.236111111111112e-05, | |
| "loss": 0.8436, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.09316878150530158, | |
| "grad_norm": 1.3582999584721895, | |
| "learning_rate": 9.305555555555556e-05, | |
| "loss": 0.8503, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.09386407091952025, | |
| "grad_norm": 1.5674581009005415, | |
| "learning_rate": 9.375e-05, | |
| "loss": 0.9561, | |
| "step": 270 | |
| } | |
| ], | |
| "logging_steps": 2, | |
| "max_steps": 5752, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 90, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1074300523118592.0, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |