| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 6.850632911392405, | |
| "eval_steps": 500, | |
| "global_step": 224, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.030379746835443037, | |
| "grad_norm": 6.142133362846723, | |
| "learning_rate": 8.695652173913044e-07, | |
| "loss": 0.7916, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.060759493670886074, | |
| "grad_norm": 6.357452301292556, | |
| "learning_rate": 1.7391304347826088e-06, | |
| "loss": 0.8104, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.09113924050632911, | |
| "grad_norm": 6.198951704234916, | |
| "learning_rate": 2.6086956521739132e-06, | |
| "loss": 0.8154, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.12151898734177215, | |
| "grad_norm": 5.65438381432537, | |
| "learning_rate": 3.4782608695652175e-06, | |
| "loss": 0.8006, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.1518987341772152, | |
| "grad_norm": 4.4279853468183585, | |
| "learning_rate": 4.347826086956522e-06, | |
| "loss": 0.7842, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.18227848101265823, | |
| "grad_norm": 2.4033605477843993, | |
| "learning_rate": 5.2173913043478265e-06, | |
| "loss": 0.7275, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.21265822784810126, | |
| "grad_norm": 2.008389646266792, | |
| "learning_rate": 6.086956521739132e-06, | |
| "loss": 0.7078, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.2430379746835443, | |
| "grad_norm": 3.856406389872771, | |
| "learning_rate": 6.956521739130435e-06, | |
| "loss": 0.7094, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.27341772151898736, | |
| "grad_norm": 4.236402916502628, | |
| "learning_rate": 7.82608695652174e-06, | |
| "loss": 0.7278, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.3037974683544304, | |
| "grad_norm": 4.000542971929127, | |
| "learning_rate": 8.695652173913044e-06, | |
| "loss": 0.6813, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.3341772151898734, | |
| "grad_norm": 4.178694394396282, | |
| "learning_rate": 9.565217391304349e-06, | |
| "loss": 0.6878, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.36455696202531646, | |
| "grad_norm": 3.2440423383286325, | |
| "learning_rate": 1.0434782608695653e-05, | |
| "loss": 0.668, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.3949367088607595, | |
| "grad_norm": 1.9081921773489765, | |
| "learning_rate": 1.1304347826086957e-05, | |
| "loss": 0.6509, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.4253164556962025, | |
| "grad_norm": 1.6856292551449288, | |
| "learning_rate": 1.2173913043478263e-05, | |
| "loss": 0.6206, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.45569620253164556, | |
| "grad_norm": 1.9652445016067075, | |
| "learning_rate": 1.3043478260869566e-05, | |
| "loss": 0.5931, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.4860759493670886, | |
| "grad_norm": 1.490317661212989, | |
| "learning_rate": 1.391304347826087e-05, | |
| "loss": 0.6108, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.5164556962025316, | |
| "grad_norm": 1.0152980978340718, | |
| "learning_rate": 1.4782608695652174e-05, | |
| "loss": 0.5983, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.5468354430379747, | |
| "grad_norm": 1.0762993241125118, | |
| "learning_rate": 1.565217391304348e-05, | |
| "loss": 0.5877, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.5772151898734177, | |
| "grad_norm": 0.8925505589092044, | |
| "learning_rate": 1.6521739130434785e-05, | |
| "loss": 0.5633, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.6075949367088608, | |
| "grad_norm": 0.9107043160755183, | |
| "learning_rate": 1.739130434782609e-05, | |
| "loss": 0.5642, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.6379746835443038, | |
| "grad_norm": 0.7731829103121682, | |
| "learning_rate": 1.8260869565217393e-05, | |
| "loss": 0.5615, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.6683544303797468, | |
| "grad_norm": 0.744129212592715, | |
| "learning_rate": 1.9130434782608697e-05, | |
| "loss": 0.5509, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.6987341772151898, | |
| "grad_norm": 0.8955523452131373, | |
| "learning_rate": 2e-05, | |
| "loss": 0.5549, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.7291139240506329, | |
| "grad_norm": 0.8183528477924088, | |
| "learning_rate": 1.999877856940653e-05, | |
| "loss": 0.5297, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.759493670886076, | |
| "grad_norm": 0.7240144765438188, | |
| "learning_rate": 1.999511457600466e-05, | |
| "loss": 0.5563, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.789873417721519, | |
| "grad_norm": 0.8022538993822405, | |
| "learning_rate": 1.9989008914857115e-05, | |
| "loss": 0.5324, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.8202531645569621, | |
| "grad_norm": 0.6940947230093911, | |
| "learning_rate": 1.998046307749216e-05, | |
| "loss": 0.5295, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.850632911392405, | |
| "grad_norm": 0.7414931560011572, | |
| "learning_rate": 1.9969479151539238e-05, | |
| "loss": 0.5312, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.8810126582278481, | |
| "grad_norm": 0.6914458295472939, | |
| "learning_rate": 1.9956059820218982e-05, | |
| "loss": 0.5265, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.9113924050632911, | |
| "grad_norm": 0.6406228399495421, | |
| "learning_rate": 1.9940208361687762e-05, | |
| "loss": 0.5311, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.9417721518987342, | |
| "grad_norm": 0.5010766427270893, | |
| "learning_rate": 1.9921928648236855e-05, | |
| "loss": 0.5279, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.9721518987341772, | |
| "grad_norm": 0.5559353823675928, | |
| "learning_rate": 1.990122514534651e-05, | |
| "loss": 0.4971, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 1.010126582278481, | |
| "grad_norm": 0.8149893932371196, | |
| "learning_rate": 1.9878102910595097e-05, | |
| "loss": 0.6966, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 1.040506329113924, | |
| "grad_norm": 0.5294742502288636, | |
| "learning_rate": 1.985256759242359e-05, | |
| "loss": 0.4646, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 1.070886075949367, | |
| "grad_norm": 0.6860122056058924, | |
| "learning_rate": 1.982462542875576e-05, | |
| "loss": 0.4899, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 1.1012658227848102, | |
| "grad_norm": 0.57893128793477, | |
| "learning_rate": 1.979428324547432e-05, | |
| "loss": 0.4967, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 1.1316455696202532, | |
| "grad_norm": 0.6199448384978387, | |
| "learning_rate": 1.9761548454753455e-05, | |
| "loss": 0.4847, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 1.1620253164556962, | |
| "grad_norm": 0.5282551845339171, | |
| "learning_rate": 1.972642905324813e-05, | |
| "loss": 0.4804, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 1.1924050632911392, | |
| "grad_norm": 0.582257344063008, | |
| "learning_rate": 1.9688933620140638e-05, | |
| "loss": 0.4881, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 1.2227848101265824, | |
| "grad_norm": 0.5077564908323113, | |
| "learning_rate": 1.96490713150448e-05, | |
| "loss": 0.4699, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 1.2531645569620253, | |
| "grad_norm": 0.5242356194926282, | |
| "learning_rate": 1.9606851875768404e-05, | |
| "loss": 0.4791, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 1.2835443037974683, | |
| "grad_norm": 0.5024134383389819, | |
| "learning_rate": 1.956228561593441e-05, | |
| "loss": 0.4732, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 1.3139240506329113, | |
| "grad_norm": 0.4483295211756338, | |
| "learning_rate": 1.9515383422461457e-05, | |
| "loss": 0.488, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 1.3443037974683545, | |
| "grad_norm": 0.4483088881234574, | |
| "learning_rate": 1.9466156752904344e-05, | |
| "loss": 0.4618, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 1.3746835443037975, | |
| "grad_norm": 0.3651600174872845, | |
| "learning_rate": 1.9414617632655114e-05, | |
| "loss": 0.4652, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 1.4050632911392404, | |
| "grad_norm": 0.5024846295352086, | |
| "learning_rate": 1.9360778652005416e-05, | |
| "loss": 0.4986, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 1.4354430379746836, | |
| "grad_norm": 0.3371982793352154, | |
| "learning_rate": 1.9304652963070868e-05, | |
| "loss": 0.4609, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 1.4658227848101266, | |
| "grad_norm": 0.3490217572797855, | |
| "learning_rate": 1.9246254276578175e-05, | |
| "loss": 0.4671, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 1.4962025316455696, | |
| "grad_norm": 0.38868867411004315, | |
| "learning_rate": 1.9185596858515797e-05, | |
| "loss": 0.4577, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 1.5265822784810128, | |
| "grad_norm": 0.42412542915142576, | |
| "learning_rate": 1.9122695526648968e-05, | |
| "loss": 0.486, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 1.5569620253164556, | |
| "grad_norm": 0.4088838516450633, | |
| "learning_rate": 1.905756564689991e-05, | |
| "loss": 0.4384, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 1.5873417721518988, | |
| "grad_norm": 0.3833188082344623, | |
| "learning_rate": 1.8990223129594146e-05, | |
| "loss": 0.4443, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 1.6177215189873417, | |
| "grad_norm": 0.4527768936233557, | |
| "learning_rate": 1.8920684425573865e-05, | |
| "loss": 0.4663, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 1.6481012658227847, | |
| "grad_norm": 0.38073567725179286, | |
| "learning_rate": 1.884896652217917e-05, | |
| "loss": 0.4824, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 1.678481012658228, | |
| "grad_norm": 0.385620749356317, | |
| "learning_rate": 1.877508693909831e-05, | |
| "loss": 0.4677, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 1.7088607594936709, | |
| "grad_norm": 0.4071801379977593, | |
| "learning_rate": 1.8699063724087905e-05, | |
| "loss": 0.4631, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 1.7392405063291139, | |
| "grad_norm": 0.42731940915469707, | |
| "learning_rate": 1.862091544856407e-05, | |
| "loss": 0.4568, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 1.769620253164557, | |
| "grad_norm": 0.3388107941825912, | |
| "learning_rate": 1.854066120306571e-05, | |
| "loss": 0.4375, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "grad_norm": 0.43264860692323054, | |
| "learning_rate": 1.8458320592590976e-05, | |
| "loss": 0.4795, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 1.830379746835443, | |
| "grad_norm": 0.3823967868290758, | |
| "learning_rate": 1.837391373180801e-05, | |
| "loss": 0.4611, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.8607594936708862, | |
| "grad_norm": 0.4186425213226183, | |
| "learning_rate": 1.8287461240141217e-05, | |
| "loss": 0.4669, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 1.891139240506329, | |
| "grad_norm": 0.3703454303387632, | |
| "learning_rate": 1.8198984236734246e-05, | |
| "loss": 0.4557, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 1.9215189873417722, | |
| "grad_norm": 0.3736106931482273, | |
| "learning_rate": 1.8108504335290852e-05, | |
| "loss": 0.4344, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 1.9518987341772152, | |
| "grad_norm": 0.39212975675707035, | |
| "learning_rate": 1.8016043638794975e-05, | |
| "loss": 0.4554, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 1.9822784810126581, | |
| "grad_norm": 0.4513720869681548, | |
| "learning_rate": 1.7921624734111292e-05, | |
| "loss": 0.5623, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 2.020253164556962, | |
| "grad_norm": 0.5971726879383622, | |
| "learning_rate": 1.7825270686467567e-05, | |
| "loss": 0.517, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 2.050632911392405, | |
| "grad_norm": 0.46712002103063616, | |
| "learning_rate": 1.7727005033820117e-05, | |
| "loss": 0.4231, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 2.081012658227848, | |
| "grad_norm": 0.4189386682745267, | |
| "learning_rate": 1.762685178110382e-05, | |
| "loss": 0.405, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 2.1113924050632913, | |
| "grad_norm": 0.5493762611884587, | |
| "learning_rate": 1.752483539436807e-05, | |
| "loss": 0.4388, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 2.141772151898734, | |
| "grad_norm": 0.5178546231991467, | |
| "learning_rate": 1.7420980794800013e-05, | |
| "loss": 0.4169, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 2.1721518987341772, | |
| "grad_norm": 0.40664079692056654, | |
| "learning_rate": 1.731531335263669e-05, | |
| "loss": 0.3984, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 2.2025316455696204, | |
| "grad_norm": 0.49767340979477503, | |
| "learning_rate": 1.720785888096743e-05, | |
| "loss": 0.4132, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 2.232911392405063, | |
| "grad_norm": 0.36495870737503733, | |
| "learning_rate": 1.7098643629428035e-05, | |
| "loss": 0.4062, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 2.2632911392405064, | |
| "grad_norm": 0.48978226775847694, | |
| "learning_rate": 1.698769427778842e-05, | |
| "loss": 0.4129, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 2.293670886075949, | |
| "grad_norm": 0.4112454996416562, | |
| "learning_rate": 1.687503792943506e-05, | |
| "loss": 0.4165, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 2.3240506329113924, | |
| "grad_norm": 0.48272726495216617, | |
| "learning_rate": 1.6760702104750046e-05, | |
| "loss": 0.3878, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 2.3544303797468356, | |
| "grad_norm": 0.4391820437847134, | |
| "learning_rate": 1.664471473438822e-05, | |
| "loss": 0.4001, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 2.3848101265822783, | |
| "grad_norm": 0.4542156038994038, | |
| "learning_rate": 1.6527104152454096e-05, | |
| "loss": 0.3871, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 2.4151898734177215, | |
| "grad_norm": 0.39387687088459095, | |
| "learning_rate": 1.6407899089580263e-05, | |
| "loss": 0.3907, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 2.4455696202531647, | |
| "grad_norm": 0.3511312711326436, | |
| "learning_rate": 1.628712866590885e-05, | |
| "loss": 0.4172, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 2.4759493670886075, | |
| "grad_norm": 0.41925040043693396, | |
| "learning_rate": 1.6164822383977912e-05, | |
| "loss": 0.4126, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 2.5063291139240507, | |
| "grad_norm": 0.37811810641092347, | |
| "learning_rate": 1.604101012151436e-05, | |
| "loss": 0.406, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 2.536708860759494, | |
| "grad_norm": 0.40554987345889937, | |
| "learning_rate": 1.5915722124135227e-05, | |
| "loss": 0.4078, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 2.5670886075949366, | |
| "grad_norm": 0.37778791497437947, | |
| "learning_rate": 1.5788988997959115e-05, | |
| "loss": 0.3957, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 2.59746835443038, | |
| "grad_norm": 0.33785598659802735, | |
| "learning_rate": 1.5660841702129533e-05, | |
| "loss": 0.4149, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 2.6278481012658226, | |
| "grad_norm": 0.4206720029808489, | |
| "learning_rate": 1.5531311541251995e-05, | |
| "loss": 0.41, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 2.6582278481012658, | |
| "grad_norm": 0.32357532653393634, | |
| "learning_rate": 1.540043015774676e-05, | |
| "loss": 0.4022, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 2.688607594936709, | |
| "grad_norm": 0.3855413521175904, | |
| "learning_rate": 1.5268229524119007e-05, | |
| "loss": 0.3878, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 2.7189873417721517, | |
| "grad_norm": 0.3606649896198645, | |
| "learning_rate": 1.513474193514842e-05, | |
| "loss": 0.4113, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 2.749367088607595, | |
| "grad_norm": 0.380809979287942, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.3945, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 2.779746835443038, | |
| "grad_norm": 0.3195477665445599, | |
| "learning_rate": 1.4864036634258112e-05, | |
| "loss": 0.3926, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 2.810126582278481, | |
| "grad_norm": 0.4127265000543691, | |
| "learning_rate": 1.4726885051885654e-05, | |
| "loss": 0.4017, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 2.840506329113924, | |
| "grad_norm": 0.37566148495430407, | |
| "learning_rate": 1.4588578757110359e-05, | |
| "loss": 0.4106, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 2.8708860759493673, | |
| "grad_norm": 0.3202753523663073, | |
| "learning_rate": 1.4449151536240167e-05, | |
| "loss": 0.401, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 2.90126582278481, | |
| "grad_norm": 0.35443238121748605, | |
| "learning_rate": 1.4308637449409705e-05, | |
| "loss": 0.3904, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 2.9316455696202532, | |
| "grad_norm": 0.2999572051502914, | |
| "learning_rate": 1.4167070822259868e-05, | |
| "loss": 0.4236, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 2.962025316455696, | |
| "grad_norm": 0.39881424254012526, | |
| "learning_rate": 1.402448623755254e-05, | |
| "loss": 0.4107, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 2.992405063291139, | |
| "grad_norm": 0.467202088309694, | |
| "learning_rate": 1.3880918526722497e-05, | |
| "loss": 0.5364, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 3.030379746835443, | |
| "grad_norm": 0.4785912096119782, | |
| "learning_rate": 1.3736402761368597e-05, | |
| "loss": 0.3627, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 3.060759493670886, | |
| "grad_norm": 0.4123328521253836, | |
| "learning_rate": 1.3590974244686248e-05, | |
| "loss": 0.3557, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 3.091139240506329, | |
| "grad_norm": 0.40421162698630236, | |
| "learning_rate": 1.344466850284333e-05, | |
| "loss": 0.3697, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 3.1215189873417724, | |
| "grad_norm": 0.433984861645439, | |
| "learning_rate": 1.3297521276301666e-05, | |
| "loss": 0.3646, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 3.151898734177215, | |
| "grad_norm": 0.4323593732774851, | |
| "learning_rate": 1.3149568511086104e-05, | |
| "loss": 0.3475, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 3.1822784810126583, | |
| "grad_norm": 0.4282473502024761, | |
| "learning_rate": 1.300084635000341e-05, | |
| "loss": 0.3735, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 3.212658227848101, | |
| "grad_norm": 0.450504914129418, | |
| "learning_rate": 1.2851391123813075e-05, | |
| "loss": 0.3667, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 3.2430379746835443, | |
| "grad_norm": 0.3607456231305978, | |
| "learning_rate": 1.2701239342352223e-05, | |
| "loss": 0.3497, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 3.2734177215189875, | |
| "grad_norm": 0.4006172961395924, | |
| "learning_rate": 1.2550427685616767e-05, | |
| "loss": 0.3627, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 3.3037974683544302, | |
| "grad_norm": 0.3644163458288102, | |
| "learning_rate": 1.239899299480098e-05, | |
| "loss": 0.3482, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 3.3341772151898734, | |
| "grad_norm": 0.3634646293817289, | |
| "learning_rate": 1.2246972263297718e-05, | |
| "loss": 0.3582, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 3.3645569620253166, | |
| "grad_norm": 0.34636760729479316, | |
| "learning_rate": 1.2094402627661447e-05, | |
| "loss": 0.3469, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 3.3949367088607594, | |
| "grad_norm": 0.350563977624681, | |
| "learning_rate": 1.1941321358536278e-05, | |
| "loss": 0.3578, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 3.4253164556962026, | |
| "grad_norm": 0.37023856212680784, | |
| "learning_rate": 1.1787765851551296e-05, | |
| "loss": 0.3623, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 3.4556962025316453, | |
| "grad_norm": 0.4226430510598916, | |
| "learning_rate": 1.1633773618185302e-05, | |
| "loss": 0.3601, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 3.4860759493670885, | |
| "grad_norm": 0.34139990505608003, | |
| "learning_rate": 1.14793822766033e-05, | |
| "loss": 0.354, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 3.5164556962025317, | |
| "grad_norm": 0.44156441256510204, | |
| "learning_rate": 1.132462954246688e-05, | |
| "loss": 0.3576, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 3.546835443037975, | |
| "grad_norm": 0.35855259068342393, | |
| "learning_rate": 1.1169553219720828e-05, | |
| "loss": 0.3524, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 3.5772151898734177, | |
| "grad_norm": 0.347968923156379, | |
| "learning_rate": 1.1014191191358118e-05, | |
| "loss": 0.3513, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 3.607594936708861, | |
| "grad_norm": 0.44418621181905155, | |
| "learning_rate": 1.085858141016566e-05, | |
| "loss": 0.3641, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 3.6379746835443036, | |
| "grad_norm": 0.31000630762886555, | |
| "learning_rate": 1.070276188945293e-05, | |
| "loss": 0.3495, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 3.668354430379747, | |
| "grad_norm": 0.3800328620288808, | |
| "learning_rate": 1.0546770693765859e-05, | |
| "loss": 0.3466, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 3.69873417721519, | |
| "grad_norm": 0.3529018173722371, | |
| "learning_rate": 1.0390645929588197e-05, | |
| "loss": 0.3515, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 3.729113924050633, | |
| "grad_norm": 0.3037778673267576, | |
| "learning_rate": 1.0234425736032607e-05, | |
| "loss": 0.3636, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 3.759493670886076, | |
| "grad_norm": 0.354487881138209, | |
| "learning_rate": 1.007814827552384e-05, | |
| "loss": 0.3526, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 3.7898734177215188, | |
| "grad_norm": 0.2786469757823622, | |
| "learning_rate": 9.92185172447616e-06, | |
| "loss": 0.3393, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 3.820253164556962, | |
| "grad_norm": 0.33617721823861874, | |
| "learning_rate": 9.765574263967397e-06, | |
| "loss": 0.3553, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 3.850632911392405, | |
| "grad_norm": 0.2736871828499611, | |
| "learning_rate": 9.609354070411807e-06, | |
| "loss": 0.3602, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 3.8810126582278484, | |
| "grad_norm": 0.323005194458033, | |
| "learning_rate": 9.453229306234143e-06, | |
| "loss": 0.3645, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 3.911392405063291, | |
| "grad_norm": 0.2893454787886965, | |
| "learning_rate": 9.297238110547075e-06, | |
| "loss": 0.3523, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 3.9417721518987343, | |
| "grad_norm": 0.30241738017140496, | |
| "learning_rate": 9.14141858983434e-06, | |
| "loss": 0.3565, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 3.972151898734177, | |
| "grad_norm": 0.279249192979288, | |
| "learning_rate": 8.985808808641883e-06, | |
| "loss": 0.3531, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 4.010126582278481, | |
| "grad_norm": 0.3994347972941027, | |
| "learning_rate": 8.830446780279175e-06, | |
| "loss": 0.4578, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 4.040506329113924, | |
| "grad_norm": 0.3710764347791464, | |
| "learning_rate": 8.675370457533122e-06, | |
| "loss": 0.322, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 4.0708860759493675, | |
| "grad_norm": 0.260181685428155, | |
| "learning_rate": 8.520617723396702e-06, | |
| "loss": 0.3249, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 4.10126582278481, | |
| "grad_norm": 0.3506943517407662, | |
| "learning_rate": 8.366226381814698e-06, | |
| "loss": 0.3139, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 4.131645569620253, | |
| "grad_norm": 0.3770019236558475, | |
| "learning_rate": 8.212234148448708e-06, | |
| "loss": 0.3174, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 4.162025316455696, | |
| "grad_norm": 0.31703490391329475, | |
| "learning_rate": 8.058678641463724e-06, | |
| "loss": 0.3118, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 4.192405063291139, | |
| "grad_norm": 0.29908697282259106, | |
| "learning_rate": 7.905597372338558e-06, | |
| "loss": 0.3161, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 4.222784810126583, | |
| "grad_norm": 0.31704079268137586, | |
| "learning_rate": 7.753027736702283e-06, | |
| "loss": 0.3322, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 4.253164556962025, | |
| "grad_norm": 0.30595858377507373, | |
| "learning_rate": 7.601007005199022e-06, | |
| "loss": 0.3239, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 4.283544303797468, | |
| "grad_norm": 0.31903380828850986, | |
| "learning_rate": 7.449572314383237e-06, | |
| "loss": 0.3231, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 4.313924050632911, | |
| "grad_norm": 0.26748898979731556, | |
| "learning_rate": 7.298760657647779e-06, | |
| "loss": 0.2972, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 4.3443037974683545, | |
| "grad_norm": 0.3238521787842106, | |
| "learning_rate": 7.148608876186931e-06, | |
| "loss": 0.3232, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 4.374683544303798, | |
| "grad_norm": 0.32648689746342563, | |
| "learning_rate": 6.999153649996595e-06, | |
| "loss": 0.3188, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 4.405063291139241, | |
| "grad_norm": 0.26613832347863436, | |
| "learning_rate": 6.8504314889138956e-06, | |
| "loss": 0.3095, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 4.435443037974683, | |
| "grad_norm": 0.29059459219393696, | |
| "learning_rate": 6.702478723698336e-06, | |
| "loss": 0.3095, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 4.465822784810126, | |
| "grad_norm": 0.28272423574830846, | |
| "learning_rate": 6.555331497156671e-06, | |
| "loss": 0.3231, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 4.49620253164557, | |
| "grad_norm": 0.27147630291281827, | |
| "learning_rate": 6.4090257553137566e-06, | |
| "loss": 0.3098, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 4.526582278481013, | |
| "grad_norm": 0.27218085631978883, | |
| "learning_rate": 6.263597238631405e-06, | |
| "loss": 0.3041, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 4.556962025316456, | |
| "grad_norm": 0.2907764850583011, | |
| "learning_rate": 6.119081473277502e-06, | |
| "loss": 0.3122, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 4.587341772151898, | |
| "grad_norm": 0.2610491444711985, | |
| "learning_rate": 5.975513762447465e-06, | |
| "loss": 0.299, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 4.6177215189873415, | |
| "grad_norm": 0.28102771407725835, | |
| "learning_rate": 5.832929177740134e-06, | |
| "loss": 0.3003, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 4.648101265822785, | |
| "grad_norm": 0.2853109911022994, | |
| "learning_rate": 5.6913625505902966e-06, | |
| "loss": 0.3173, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 4.678481012658228, | |
| "grad_norm": 0.25899162810004384, | |
| "learning_rate": 5.550848463759835e-06, | |
| "loss": 0.3037, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 4.708860759493671, | |
| "grad_norm": 0.2856251013932412, | |
| "learning_rate": 5.411421242889643e-06, | |
| "loss": 0.312, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 4.739240506329114, | |
| "grad_norm": 0.28563131510003925, | |
| "learning_rate": 5.273114948114346e-06, | |
| "loss": 0.3124, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 4.769620253164557, | |
| "grad_norm": 0.2904879803329038, | |
| "learning_rate": 5.135963365741892e-06, | |
| "loss": 0.3056, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "grad_norm": 0.27167234153720476, | |
| "learning_rate": 5.000000000000003e-06, | |
| "loss": 0.3114, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 4.830379746835443, | |
| "grad_norm": 0.2781315527452187, | |
| "learning_rate": 4.865258064851579e-06, | |
| "loss": 0.3129, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 4.860759493670886, | |
| "grad_norm": 0.2815845627503612, | |
| "learning_rate": 4.731770475880995e-06, | |
| "loss": 0.3112, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 4.891139240506329, | |
| "grad_norm": 0.24314085632282065, | |
| "learning_rate": 4.599569842253244e-06, | |
| "loss": 0.2981, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 4.921518987341772, | |
| "grad_norm": 0.2771334038160195, | |
| "learning_rate": 4.468688458748006e-06, | |
| "loss": 0.3217, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 4.951898734177215, | |
| "grad_norm": 0.2778708149835448, | |
| "learning_rate": 4.339158297870469e-06, | |
| "loss": 0.3005, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 4.982278481012658, | |
| "grad_norm": 0.3181763462270913, | |
| "learning_rate": 4.211011002040885e-06, | |
| "loss": 0.3692, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 5.020253164556962, | |
| "grad_norm": 0.3380744967105364, | |
| "learning_rate": 4.084277875864776e-06, | |
| "loss": 0.3518, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 5.050632911392405, | |
| "grad_norm": 0.3077116531141232, | |
| "learning_rate": 3.958989878485644e-06, | |
| "loss": 0.273, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 5.0810126582278485, | |
| "grad_norm": 0.2817477362200768, | |
| "learning_rate": 3.83517761602209e-06, | |
| "loss": 0.2816, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 5.111392405063291, | |
| "grad_norm": 0.25534644431709314, | |
| "learning_rate": 3.712871334091154e-06, | |
| "loss": 0.2676, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 5.141772151898734, | |
| "grad_norm": 0.39733207815191407, | |
| "learning_rate": 3.592100910419738e-06, | |
| "loss": 0.2798, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 5.172151898734177, | |
| "grad_norm": 0.35253652256958323, | |
| "learning_rate": 3.4728958475459052e-06, | |
| "loss": 0.2829, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 5.2025316455696204, | |
| "grad_norm": 0.27761876273132563, | |
| "learning_rate": 3.355285265611784e-06, | |
| "loss": 0.2695, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 5.232911392405064, | |
| "grad_norm": 0.27194117271731544, | |
| "learning_rate": 3.2392978952499553e-06, | |
| "loss": 0.287, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 5.263291139240506, | |
| "grad_norm": 0.2638988503117021, | |
| "learning_rate": 3.1249620705649417e-06, | |
| "loss": 0.2743, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 5.293670886075949, | |
| "grad_norm": 0.3254658079971883, | |
| "learning_rate": 3.0123057222115835e-06, | |
| "loss": 0.3088, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 5.324050632911392, | |
| "grad_norm": 0.2630037510542853, | |
| "learning_rate": 2.9013563705719673e-06, | |
| "loss": 0.2873, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 5.3544303797468356, | |
| "grad_norm": 0.2528822714263439, | |
| "learning_rate": 2.7921411190325753e-06, | |
| "loss": 0.2927, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 5.384810126582279, | |
| "grad_norm": 0.2628939766078353, | |
| "learning_rate": 2.6846866473633126e-06, | |
| "loss": 0.2771, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 5.415189873417722, | |
| "grad_norm": 0.26653726514044274, | |
| "learning_rate": 2.579019205199992e-06, | |
| "loss": 0.2753, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 5.445569620253164, | |
| "grad_norm": 0.2743528017367845, | |
| "learning_rate": 2.4751646056319334e-06, | |
| "loss": 0.2903, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 5.4759493670886075, | |
| "grad_norm": 0.2563760087561412, | |
| "learning_rate": 2.373148218896182e-06, | |
| "loss": 0.2743, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 5.506329113924051, | |
| "grad_norm": 0.2563819896924902, | |
| "learning_rate": 2.2729949661798876e-06, | |
| "loss": 0.3062, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 5.536708860759494, | |
| "grad_norm": 0.22888829009403563, | |
| "learning_rate": 2.174729313532433e-06, | |
| "loss": 0.265, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 5.567088607594937, | |
| "grad_norm": 0.2641442442655395, | |
| "learning_rate": 2.078375265888707e-06, | |
| "loss": 0.2912, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 5.597468354430379, | |
| "grad_norm": 0.24293783978668715, | |
| "learning_rate": 1.9839563612050273e-06, | |
| "loss": 0.2788, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 5.627848101265823, | |
| "grad_norm": 0.234365720480904, | |
| "learning_rate": 1.8914956647091497e-06, | |
| "loss": 0.2738, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 5.658227848101266, | |
| "grad_norm": 0.23733298634685637, | |
| "learning_rate": 1.8010157632657544e-06, | |
| "loss": 0.2886, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 5.688607594936709, | |
| "grad_norm": 0.2320333532934732, | |
| "learning_rate": 1.7125387598587862e-06, | |
| "loss": 0.281, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 5.718987341772152, | |
| "grad_norm": 0.2347008234627476, | |
| "learning_rate": 1.6260862681919965e-06, | |
| "loss": 0.2833, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 5.749367088607595, | |
| "grad_norm": 0.23470344799587836, | |
| "learning_rate": 1.5416794074090258e-06, | |
| "loss": 0.2924, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 5.779746835443038, | |
| "grad_norm": 0.22704919723280656, | |
| "learning_rate": 1.459338796934293e-06, | |
| "loss": 0.2824, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 5.810126582278481, | |
| "grad_norm": 0.22649545986397604, | |
| "learning_rate": 1.3790845514359363e-06, | |
| "loss": 0.2829, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 5.840506329113924, | |
| "grad_norm": 0.23366827962350076, | |
| "learning_rate": 1.300936275912098e-06, | |
| "loss": 0.274, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 5.870886075949367, | |
| "grad_norm": 0.23325744323695088, | |
| "learning_rate": 1.224913060901688e-06, | |
| "loss": 0.2854, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 5.9012658227848105, | |
| "grad_norm": 0.23452041472020713, | |
| "learning_rate": 1.1510334778208332e-06, | |
| "loss": 0.2862, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 5.931645569620253, | |
| "grad_norm": 0.2128850574376154, | |
| "learning_rate": 1.0793155744261352e-06, | |
| "loss": 0.2716, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 5.962025316455696, | |
| "grad_norm": 0.21756339881849357, | |
| "learning_rate": 1.0097768704058542e-06, | |
| "loss": 0.2769, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 5.992405063291139, | |
| "grad_norm": 0.5731366409998244, | |
| "learning_rate": 9.424343531000968e-07, | |
| "loss": 0.3663, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 6.030379746835443, | |
| "grad_norm": 0.2593936366293042, | |
| "learning_rate": 8.773044733510338e-07, | |
| "loss": 0.2701, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 6.060759493670886, | |
| "grad_norm": 0.24007943216008748, | |
| "learning_rate": 8.144031414842012e-07, | |
| "loss": 0.271, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 6.091139240506329, | |
| "grad_norm": 0.24480502414592123, | |
| "learning_rate": 7.537457234218271e-07, | |
| "loss": 0.2807, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 6.121518987341772, | |
| "grad_norm": 0.22161317113429632, | |
| "learning_rate": 6.953470369291349e-07, | |
| "loss": 0.2718, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 6.151898734177215, | |
| "grad_norm": 0.21942095908540368, | |
| "learning_rate": 6.392213479945852e-07, | |
| "loss": 0.2631, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 6.182278481012658, | |
| "grad_norm": 0.22385509803686687, | |
| "learning_rate": 5.853823673448877e-07, | |
| "loss": 0.2736, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 6.2126582278481015, | |
| "grad_norm": 0.21766702781637903, | |
| "learning_rate": 5.33843247095659e-07, | |
| "loss": 0.2682, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 6.243037974683545, | |
| "grad_norm": 0.22207085212457575, | |
| "learning_rate": 4.846165775385459e-07, | |
| "loss": 0.2605, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 6.273417721518987, | |
| "grad_norm": 0.21458752614863258, | |
| "learning_rate": 4.3771438406559173e-07, | |
| "loss": 0.2703, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 6.30379746835443, | |
| "grad_norm": 0.2271561890743592, | |
| "learning_rate": 3.931481242315993e-07, | |
| "loss": 0.2604, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 6.334177215189873, | |
| "grad_norm": 0.2305522662239651, | |
| "learning_rate": 3.5092868495520294e-07, | |
| "loss": 0.2732, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 6.364556962025317, | |
| "grad_norm": 0.23576799695501874, | |
| "learning_rate": 3.110663798593616e-07, | |
| "loss": 0.2753, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 6.39493670886076, | |
| "grad_norm": 0.21533008163072734, | |
| "learning_rate": 2.735709467518699e-07, | |
| "loss": 0.2767, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 6.425316455696202, | |
| "grad_norm": 0.21999408184797783, | |
| "learning_rate": 2.384515452465475e-07, | |
| "loss": 0.2718, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 6.455696202531645, | |
| "grad_norm": 0.2170059043336272, | |
| "learning_rate": 2.0571675452567997e-07, | |
| "loss": 0.2727, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 6.4860759493670885, | |
| "grad_norm": 0.22180241918305427, | |
| "learning_rate": 1.7537457124423896e-07, | |
| "loss": 0.2674, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 6.516455696202532, | |
| "grad_norm": 0.22975040085185996, | |
| "learning_rate": 1.474324075764111e-07, | |
| "loss": 0.2807, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 6.546835443037975, | |
| "grad_norm": 0.2130343186081862, | |
| "learning_rate": 1.2189708940490653e-07, | |
| "loss": 0.2633, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 6.577215189873417, | |
| "grad_norm": 0.21741201160529977, | |
| "learning_rate": 9.877485465349057e-08, | |
| "loss": 0.2715, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 6.6075949367088604, | |
| "grad_norm": 0.21709840614985526, | |
| "learning_rate": 7.807135176314707e-08, | |
| "loss": 0.2759, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 6.637974683544304, | |
| "grad_norm": 0.21754224290691965, | |
| "learning_rate": 5.979163831223988e-08, | |
| "loss": 0.2647, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 6.668354430379747, | |
| "grad_norm": 0.21962783026780733, | |
| "learning_rate": 4.394017978101905e-08, | |
| "loss": 0.2679, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 6.69873417721519, | |
| "grad_norm": 0.21340658097304008, | |
| "learning_rate": 3.0520848460765525e-08, | |
| "loss": 0.2739, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 6.729113924050633, | |
| "grad_norm": 0.22462643761247802, | |
| "learning_rate": 1.9536922507841227e-08, | |
| "loss": 0.2761, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 6.759493670886076, | |
| "grad_norm": 0.21469231815944442, | |
| "learning_rate": 1.099108514288627e-08, | |
| "loss": 0.2678, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 6.789873417721519, | |
| "grad_norm": 0.2151131540272051, | |
| "learning_rate": 4.885423995341088e-09, | |
| "loss": 0.2651, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 6.820253164556962, | |
| "grad_norm": 0.21049176928568308, | |
| "learning_rate": 1.2214305934699078e-09, | |
| "loss": 0.257, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 6.850632911392405, | |
| "grad_norm": 0.21512970249569155, | |
| "learning_rate": 0.0, | |
| "loss": 0.2692, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 6.850632911392405, | |
| "step": 224, | |
| "total_flos": 8.762232759008625e+17, | |
| "train_loss": 0.39511572915528503, | |
| "train_runtime": 26231.4222, | |
| "train_samples_per_second": 0.843, | |
| "train_steps_per_second": 0.009 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 224, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 7, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 8.762232759008625e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |