| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.9943851768669285, | |
| "eval_steps": 500, | |
| "global_step": 666, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.004491858506457047, | |
| "grad_norm": 5.756138573429592, | |
| "learning_rate": 5.970149253731343e-07, | |
| "loss": 0.8288, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.008983717012914094, | |
| "grad_norm": 5.947338437868834, | |
| "learning_rate": 1.1940298507462686e-06, | |
| "loss": 0.8734, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.01347557551937114, | |
| "grad_norm": 5.771096501859239, | |
| "learning_rate": 1.791044776119403e-06, | |
| "loss": 0.8544, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.017967434025828188, | |
| "grad_norm": 5.702592300459005, | |
| "learning_rate": 2.3880597014925373e-06, | |
| "loss": 0.863, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.022459292532285232, | |
| "grad_norm": 5.367273709147744, | |
| "learning_rate": 2.9850746268656716e-06, | |
| "loss": 0.8559, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.02695115103874228, | |
| "grad_norm": 4.052739653869728, | |
| "learning_rate": 3.582089552238806e-06, | |
| "loss": 0.8, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.031443009545199324, | |
| "grad_norm": 2.214560916912982, | |
| "learning_rate": 4.17910447761194e-06, | |
| "loss": 0.7627, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.035934868051656375, | |
| "grad_norm": 2.0231923208383082, | |
| "learning_rate": 4.7761194029850745e-06, | |
| "loss": 0.7628, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.04042672655811342, | |
| "grad_norm": 3.336964088049228, | |
| "learning_rate": 5.37313432835821e-06, | |
| "loss": 0.7395, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.044918585064570464, | |
| "grad_norm": 3.916479374443678, | |
| "learning_rate": 5.970149253731343e-06, | |
| "loss": 0.7524, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.049410443571027515, | |
| "grad_norm": 3.874503373060969, | |
| "learning_rate": 6.567164179104478e-06, | |
| "loss": 0.7485, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.05390230207748456, | |
| "grad_norm": 3.1020583943944127, | |
| "learning_rate": 7.164179104477612e-06, | |
| "loss": 0.7087, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.058394160583941604, | |
| "grad_norm": 2.788877719932503, | |
| "learning_rate": 7.761194029850747e-06, | |
| "loss": 0.686, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.06288601909039865, | |
| "grad_norm": 2.186775595109852, | |
| "learning_rate": 8.35820895522388e-06, | |
| "loss": 0.6804, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.06737787759685569, | |
| "grad_norm": 1.4560752672355353, | |
| "learning_rate": 8.955223880597016e-06, | |
| "loss": 0.6614, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.07186973610331275, | |
| "grad_norm": 1.4704165106061833, | |
| "learning_rate": 9.552238805970149e-06, | |
| "loss": 0.6521, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.0763615946097698, | |
| "grad_norm": 1.8335883147044605, | |
| "learning_rate": 1.0149253731343284e-05, | |
| "loss": 0.6559, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.08085345311622684, | |
| "grad_norm": 1.5128353405087756, | |
| "learning_rate": 1.074626865671642e-05, | |
| "loss": 0.6373, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.08534531162268388, | |
| "grad_norm": 0.9579208876340166, | |
| "learning_rate": 1.1343283582089553e-05, | |
| "loss": 0.6237, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.08983717012914093, | |
| "grad_norm": 1.1626948313414722, | |
| "learning_rate": 1.1940298507462686e-05, | |
| "loss": 0.6278, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.09432902863559797, | |
| "grad_norm": 1.0883847182592112, | |
| "learning_rate": 1.2537313432835823e-05, | |
| "loss": 0.6078, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.09882088714205503, | |
| "grad_norm": 0.7691109586699584, | |
| "learning_rate": 1.3134328358208957e-05, | |
| "loss": 0.6022, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.10331274564851207, | |
| "grad_norm": 0.7992398552607973, | |
| "learning_rate": 1.373134328358209e-05, | |
| "loss": 0.6001, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.10780460415496912, | |
| "grad_norm": 0.7301939069877366, | |
| "learning_rate": 1.4328358208955224e-05, | |
| "loss": 0.589, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.11229646266142616, | |
| "grad_norm": 0.6462170534998106, | |
| "learning_rate": 1.492537313432836e-05, | |
| "loss": 0.589, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.11678832116788321, | |
| "grad_norm": 0.6277123262149377, | |
| "learning_rate": 1.5522388059701494e-05, | |
| "loss": 0.5683, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.12128017967434025, | |
| "grad_norm": 0.5752127054219104, | |
| "learning_rate": 1.6119402985074627e-05, | |
| "loss": 0.5769, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.1257720381807973, | |
| "grad_norm": 0.5203323573801774, | |
| "learning_rate": 1.671641791044776e-05, | |
| "loss": 0.575, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.13026389668725435, | |
| "grad_norm": 0.5421047450106409, | |
| "learning_rate": 1.7313432835820894e-05, | |
| "loss": 0.5724, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.13475575519371139, | |
| "grad_norm": 0.5078178864848362, | |
| "learning_rate": 1.791044776119403e-05, | |
| "loss": 0.5712, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.13924761370016844, | |
| "grad_norm": 0.5224270138071347, | |
| "learning_rate": 1.8507462686567165e-05, | |
| "loss": 0.5711, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.1437394722066255, | |
| "grad_norm": 0.583912305634674, | |
| "learning_rate": 1.9104477611940298e-05, | |
| "loss": 0.5612, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.14823133071308253, | |
| "grad_norm": 0.4450476433589138, | |
| "learning_rate": 1.9701492537313435e-05, | |
| "loss": 0.5479, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.1527231892195396, | |
| "grad_norm": 0.49474697311200155, | |
| "learning_rate": 2.029850746268657e-05, | |
| "loss": 0.5551, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.15721504772599662, | |
| "grad_norm": 0.4059937960937521, | |
| "learning_rate": 2.0895522388059702e-05, | |
| "loss": 0.5498, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.16170690623245368, | |
| "grad_norm": 0.5246368923875162, | |
| "learning_rate": 2.149253731343284e-05, | |
| "loss": 0.5409, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.16619876473891074, | |
| "grad_norm": 0.42826044856694495, | |
| "learning_rate": 2.2089552238805972e-05, | |
| "loss": 0.5391, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.17069062324536777, | |
| "grad_norm": 0.46611137531604413, | |
| "learning_rate": 2.2686567164179106e-05, | |
| "loss": 0.5464, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.17518248175182483, | |
| "grad_norm": 0.5259113884940679, | |
| "learning_rate": 2.3283582089552242e-05, | |
| "loss": 0.5429, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.17967434025828186, | |
| "grad_norm": 0.5172211284549297, | |
| "learning_rate": 2.3880597014925373e-05, | |
| "loss": 0.5463, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.18416619876473891, | |
| "grad_norm": 0.3807141798876446, | |
| "learning_rate": 2.447761194029851e-05, | |
| "loss": 0.5422, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.18865805727119594, | |
| "grad_norm": 0.3584751239980687, | |
| "learning_rate": 2.5074626865671646e-05, | |
| "loss": 0.529, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.193149915777653, | |
| "grad_norm": 0.3801129734399443, | |
| "learning_rate": 2.5671641791044776e-05, | |
| "loss": 0.5278, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.19764177428411006, | |
| "grad_norm": 0.48200014011099895, | |
| "learning_rate": 2.6268656716417913e-05, | |
| "loss": 0.5353, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.2021336327905671, | |
| "grad_norm": 0.48977570173401624, | |
| "learning_rate": 2.686567164179105e-05, | |
| "loss": 0.5216, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.20662549129702415, | |
| "grad_norm": 0.6081641770560221, | |
| "learning_rate": 2.746268656716418e-05, | |
| "loss": 0.5284, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.21111734980348118, | |
| "grad_norm": 0.5555338053801713, | |
| "learning_rate": 2.8059701492537317e-05, | |
| "loss": 0.5207, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.21560920830993824, | |
| "grad_norm": 0.4184618550275552, | |
| "learning_rate": 2.8656716417910447e-05, | |
| "loss": 0.5263, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.2201010668163953, | |
| "grad_norm": 0.4075035793011261, | |
| "learning_rate": 2.9253731343283584e-05, | |
| "loss": 0.5132, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.22459292532285233, | |
| "grad_norm": 0.5568979954866807, | |
| "learning_rate": 2.985074626865672e-05, | |
| "loss": 0.5338, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.22908478382930939, | |
| "grad_norm": 0.4487298011354864, | |
| "learning_rate": 3.044776119402985e-05, | |
| "loss": 0.5178, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.23357664233576642, | |
| "grad_norm": 0.49580101191679427, | |
| "learning_rate": 3.104477611940299e-05, | |
| "loss": 0.5231, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.23806850084222347, | |
| "grad_norm": 0.6837789723633053, | |
| "learning_rate": 3.164179104477612e-05, | |
| "loss": 0.5167, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.2425603593486805, | |
| "grad_norm": 1.090652931341267, | |
| "learning_rate": 3.2238805970149255e-05, | |
| "loss": 0.5227, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.24705221785513756, | |
| "grad_norm": 1.204852499046471, | |
| "learning_rate": 3.2835820895522395e-05, | |
| "loss": 0.5026, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.2515440763615946, | |
| "grad_norm": 0.9029314208020349, | |
| "learning_rate": 3.343283582089552e-05, | |
| "loss": 0.5159, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.25603593486805165, | |
| "grad_norm": 1.4349651685684675, | |
| "learning_rate": 3.402985074626866e-05, | |
| "loss": 0.5197, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.2605277933745087, | |
| "grad_norm": 1.2597161023329808, | |
| "learning_rate": 3.462686567164179e-05, | |
| "loss": 0.5072, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.26501965188096577, | |
| "grad_norm": 1.0097203538944823, | |
| "learning_rate": 3.522388059701493e-05, | |
| "loss": 0.5179, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.26951151038742277, | |
| "grad_norm": 1.062445258148939, | |
| "learning_rate": 3.582089552238806e-05, | |
| "loss": 0.511, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.27400336889387983, | |
| "grad_norm": 1.2798620869050041, | |
| "learning_rate": 3.6417910447761196e-05, | |
| "loss": 0.5083, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.2784952274003369, | |
| "grad_norm": 0.9960672092978539, | |
| "learning_rate": 3.701492537313433e-05, | |
| "loss": 0.5079, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.28298708590679394, | |
| "grad_norm": 1.2514106567391985, | |
| "learning_rate": 3.761194029850747e-05, | |
| "loss": 0.5198, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.287478944413251, | |
| "grad_norm": 0.9143361911512172, | |
| "learning_rate": 3.8208955223880596e-05, | |
| "loss": 0.503, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.291970802919708, | |
| "grad_norm": 1.0605374713348559, | |
| "learning_rate": 3.8805970149253736e-05, | |
| "loss": 0.5076, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.29646266142616506, | |
| "grad_norm": 1.061984715918565, | |
| "learning_rate": 3.940298507462687e-05, | |
| "loss": 0.4962, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.3009545199326221, | |
| "grad_norm": 0.6145009607029202, | |
| "learning_rate": 4e-05, | |
| "loss": 0.5068, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.3054463784390792, | |
| "grad_norm": 0.9037512303336684, | |
| "learning_rate": 3.9999724928810754e-05, | |
| "loss": 0.4961, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.30993823694553624, | |
| "grad_norm": 1.136568835651128, | |
| "learning_rate": 3.999889972280943e-05, | |
| "loss": 0.5037, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.31443009545199324, | |
| "grad_norm": 0.6894760993858712, | |
| "learning_rate": 3.999752440469506e-05, | |
| "loss": 0.4986, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.3189219539584503, | |
| "grad_norm": 1.2157874151617858, | |
| "learning_rate": 3.999559901229868e-05, | |
| "loss": 0.5016, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.32341381246490736, | |
| "grad_norm": 0.8382017878264113, | |
| "learning_rate": 3.99931235985823e-05, | |
| "loss": 0.5132, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.3279056709713644, | |
| "grad_norm": 0.8447582246825469, | |
| "learning_rate": 3.999009823163741e-05, | |
| "loss": 0.4989, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.3323975294778215, | |
| "grad_norm": 0.8475952100417256, | |
| "learning_rate": 3.998652299468315e-05, | |
| "loss": 0.5007, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.3368893879842785, | |
| "grad_norm": 0.68655726909334, | |
| "learning_rate": 3.9982397986063984e-05, | |
| "loss": 0.5044, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.34138124649073553, | |
| "grad_norm": 0.681448342387516, | |
| "learning_rate": 3.9977723319247e-05, | |
| "loss": 0.4931, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.3458731049971926, | |
| "grad_norm": 0.6973928515400792, | |
| "learning_rate": 3.997249912281883e-05, | |
| "loss": 0.5071, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.35036496350364965, | |
| "grad_norm": 0.7534446877100698, | |
| "learning_rate": 3.996672554048206e-05, | |
| "loss": 0.489, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.35485682201010665, | |
| "grad_norm": 0.5876864562704012, | |
| "learning_rate": 3.996040273105132e-05, | |
| "loss": 0.5025, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.3593486805165637, | |
| "grad_norm": 0.7509417522679511, | |
| "learning_rate": 3.995353086844885e-05, | |
| "loss": 0.5037, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.36384053902302077, | |
| "grad_norm": 0.9370720736320995, | |
| "learning_rate": 3.9946110141699824e-05, | |
| "loss": 0.5015, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.36833239752947783, | |
| "grad_norm": 0.8746680364480922, | |
| "learning_rate": 3.993814075492704e-05, | |
| "loss": 0.5057, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.3728242560359349, | |
| "grad_norm": 0.6883866514947267, | |
| "learning_rate": 3.992962292734537e-05, | |
| "loss": 0.4969, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.3773161145423919, | |
| "grad_norm": 0.6632486075655415, | |
| "learning_rate": 3.99205568932557e-05, | |
| "loss": 0.4958, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.38180797304884895, | |
| "grad_norm": 0.657939795964604, | |
| "learning_rate": 3.991094290203853e-05, | |
| "loss": 0.4988, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.386299831555306, | |
| "grad_norm": 0.5909326412012158, | |
| "learning_rate": 3.990078121814704e-05, | |
| "loss": 0.4814, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.39079169006176306, | |
| "grad_norm": 0.7264954531824589, | |
| "learning_rate": 3.989007212109988e-05, | |
| "loss": 0.4814, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.3952835485682201, | |
| "grad_norm": 0.6058324232051195, | |
| "learning_rate": 3.987881590547347e-05, | |
| "loss": 0.4855, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.3997754070746771, | |
| "grad_norm": 0.41517960793578307, | |
| "learning_rate": 3.9867012880893864e-05, | |
| "loss": 0.4942, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.4042672655811342, | |
| "grad_norm": 0.5301520567555827, | |
| "learning_rate": 3.985466337202826e-05, | |
| "loss": 0.4918, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.40875912408759124, | |
| "grad_norm": 0.4803058585218722, | |
| "learning_rate": 3.984176771857607e-05, | |
| "loss": 0.4924, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.4132509825940483, | |
| "grad_norm": 0.5827819420549525, | |
| "learning_rate": 3.982832627525956e-05, | |
| "loss": 0.4845, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.41774284110050536, | |
| "grad_norm": 0.4942144551780411, | |
| "learning_rate": 3.981433941181412e-05, | |
| "loss": 0.4866, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.42223469960696236, | |
| "grad_norm": 0.4922884710509112, | |
| "learning_rate": 3.979980751297806e-05, | |
| "loss": 0.4943, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.4267265581134194, | |
| "grad_norm": 0.5177350196275904, | |
| "learning_rate": 3.9784730978482046e-05, | |
| "loss": 0.493, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.4312184166198765, | |
| "grad_norm": 0.4157875710819774, | |
| "learning_rate": 3.976911022303812e-05, | |
| "loss": 0.4733, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.43571027512633353, | |
| "grad_norm": 0.34462169915815744, | |
| "learning_rate": 3.975294567632824e-05, | |
| "loss": 0.4915, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.4402021336327906, | |
| "grad_norm": 0.4251664365468233, | |
| "learning_rate": 3.9736237782992535e-05, | |
| "loss": 0.4858, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.4446939921392476, | |
| "grad_norm": 0.5389558331738674, | |
| "learning_rate": 3.9718987002617e-05, | |
| "loss": 0.4656, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.44918585064570465, | |
| "grad_norm": 0.46420165649755885, | |
| "learning_rate": 3.97011938097209e-05, | |
| "loss": 0.4771, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.4536777091521617, | |
| "grad_norm": 0.45799374669882925, | |
| "learning_rate": 3.9682858693743725e-05, | |
| "loss": 0.4891, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.45816956765861877, | |
| "grad_norm": 0.46150767448038055, | |
| "learning_rate": 3.966398215903167e-05, | |
| "loss": 0.4847, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.4626614261650758, | |
| "grad_norm": 0.4066261743648596, | |
| "learning_rate": 3.964456472482384e-05, | |
| "loss": 0.4852, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.46715328467153283, | |
| "grad_norm": 0.42196128326013826, | |
| "learning_rate": 3.962460692523789e-05, | |
| "loss": 0.4752, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.4716451431779899, | |
| "grad_norm": 0.7090421027312218, | |
| "learning_rate": 3.960410930925539e-05, | |
| "loss": 0.472, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.47613700168444695, | |
| "grad_norm": 0.7582871914029802, | |
| "learning_rate": 3.958307244070671e-05, | |
| "loss": 0.481, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.480628860190904, | |
| "grad_norm": 0.8515082639665583, | |
| "learning_rate": 3.956149689825549e-05, | |
| "loss": 0.4813, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.485120718697361, | |
| "grad_norm": 0.8379247205573097, | |
| "learning_rate": 3.953938327538274e-05, | |
| "loss": 0.4922, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.48961257720381807, | |
| "grad_norm": 0.6262504598078678, | |
| "learning_rate": 3.951673218037051e-05, | |
| "loss": 0.4898, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.4941044357102751, | |
| "grad_norm": 0.5954855460815923, | |
| "learning_rate": 3.949354423628518e-05, | |
| "loss": 0.4843, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.4985962942167322, | |
| "grad_norm": 0.6408649753178594, | |
| "learning_rate": 3.946982008096028e-05, | |
| "loss": 0.4865, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.5030881527231892, | |
| "grad_norm": 0.5208640539915395, | |
| "learning_rate": 3.9445560366978953e-05, | |
| "loss": 0.4741, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.5075800112296462, | |
| "grad_norm": 0.7079534048734941, | |
| "learning_rate": 3.9420765761656056e-05, | |
| "loss": 0.4911, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.5120718697361033, | |
| "grad_norm": 0.7712779549004658, | |
| "learning_rate": 3.939543694701974e-05, | |
| "loss": 0.4867, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.5165637282425604, | |
| "grad_norm": 0.6111039593290515, | |
| "learning_rate": 3.936957461979272e-05, | |
| "loss": 0.4727, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.5210555867490174, | |
| "grad_norm": 0.7352458313843772, | |
| "learning_rate": 3.9343179491373126e-05, | |
| "loss": 0.4944, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.5255474452554745, | |
| "grad_norm": 0.767990645394538, | |
| "learning_rate": 3.931625228781486e-05, | |
| "loss": 0.4793, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.5300393037619315, | |
| "grad_norm": 0.7323956123336426, | |
| "learning_rate": 3.928879374980773e-05, | |
| "loss": 0.4857, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.5345311622683886, | |
| "grad_norm": 0.7350487217816434, | |
| "learning_rate": 3.926080463265701e-05, | |
| "loss": 0.4791, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.5390230207748455, | |
| "grad_norm": 0.7211426627444099, | |
| "learning_rate": 3.9232285706262684e-05, | |
| "loss": 0.4786, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.5435148792813026, | |
| "grad_norm": 0.8314018064295706, | |
| "learning_rate": 3.9203237755098226e-05, | |
| "loss": 0.4865, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.5480067377877597, | |
| "grad_norm": 0.39138399334727947, | |
| "learning_rate": 3.9173661578189095e-05, | |
| "loss": 0.4767, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.5524985962942167, | |
| "grad_norm": 0.6700113450907816, | |
| "learning_rate": 3.914355798909071e-05, | |
| "loss": 0.4616, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.5569904548006738, | |
| "grad_norm": 0.794652726786288, | |
| "learning_rate": 3.9112927815866075e-05, | |
| "loss": 0.4891, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.5614823133071308, | |
| "grad_norm": 0.46358371366435136, | |
| "learning_rate": 3.9081771901063014e-05, | |
| "loss": 0.4895, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.5659741718135879, | |
| "grad_norm": 0.5008430258741339, | |
| "learning_rate": 3.9050091101690963e-05, | |
| "loss": 0.4676, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.570466030320045, | |
| "grad_norm": 0.41758826319783765, | |
| "learning_rate": 3.901788628919745e-05, | |
| "loss": 0.4693, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.574957888826502, | |
| "grad_norm": 0.44254185274885993, | |
| "learning_rate": 3.8985158349444085e-05, | |
| "loss": 0.4738, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.5794497473329591, | |
| "grad_norm": 0.385337779857357, | |
| "learning_rate": 3.8951908182682195e-05, | |
| "loss": 0.4792, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.583941605839416, | |
| "grad_norm": 0.4734307244779799, | |
| "learning_rate": 3.8918136703528064e-05, | |
| "loss": 0.4647, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.5884334643458731, | |
| "grad_norm": 0.5245674853110656, | |
| "learning_rate": 3.88838448409378e-05, | |
| "loss": 0.4796, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.5929253228523301, | |
| "grad_norm": 0.3610943160498356, | |
| "learning_rate": 3.8849033538181726e-05, | |
| "loss": 0.4625, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.5974171813587872, | |
| "grad_norm": 0.47445421763073464, | |
| "learning_rate": 3.881370375281851e-05, | |
| "loss": 0.4689, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.6019090398652442, | |
| "grad_norm": 0.45596469221574276, | |
| "learning_rate": 3.877785645666874e-05, | |
| "loss": 0.475, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.6064008983717013, | |
| "grad_norm": 0.563642796061016, | |
| "learning_rate": 3.874149263578827e-05, | |
| "loss": 0.4755, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.6108927568781584, | |
| "grad_norm": 0.5657743885092765, | |
| "learning_rate": 3.8704613290441036e-05, | |
| "loss": 0.4785, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.6153846153846154, | |
| "grad_norm": 0.5928016386339262, | |
| "learning_rate": 3.8667219435071575e-05, | |
| "loss": 0.4722, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.6198764738910725, | |
| "grad_norm": 0.4406018741965115, | |
| "learning_rate": 3.862931209827712e-05, | |
| "loss": 0.4682, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.6243683323975294, | |
| "grad_norm": 0.5314884866958551, | |
| "learning_rate": 3.859089232277929e-05, | |
| "loss": 0.474, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.6288601909039865, | |
| "grad_norm": 0.5119316621437477, | |
| "learning_rate": 3.8551961165395414e-05, | |
| "loss": 0.4671, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.6333520494104435, | |
| "grad_norm": 0.5560114427721705, | |
| "learning_rate": 3.851251969700947e-05, | |
| "loss": 0.4776, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.6378439079169006, | |
| "grad_norm": 0.6604299398145215, | |
| "learning_rate": 3.8472569002542634e-05, | |
| "loss": 0.4678, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.6423357664233577, | |
| "grad_norm": 0.4364878470478741, | |
| "learning_rate": 3.8432110180923386e-05, | |
| "loss": 0.4607, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.6468276249298147, | |
| "grad_norm": 0.4069893493259037, | |
| "learning_rate": 3.8391144345057356e-05, | |
| "loss": 0.4624, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.6513194834362718, | |
| "grad_norm": 0.45845672915819824, | |
| "learning_rate": 3.834967262179667e-05, | |
| "loss": 0.4785, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.6558113419427288, | |
| "grad_norm": 0.5029246830686521, | |
| "learning_rate": 3.830769615190893e-05, | |
| "loss": 0.4717, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.6603032004491859, | |
| "grad_norm": 0.4558554097937835, | |
| "learning_rate": 3.826521609004591e-05, | |
| "loss": 0.4703, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.664795058955643, | |
| "grad_norm": 0.3535331612175166, | |
| "learning_rate": 3.822223360471171e-05, | |
| "loss": 0.4695, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.6692869174620999, | |
| "grad_norm": 0.4598040172374265, | |
| "learning_rate": 3.817874987823067e-05, | |
| "loss": 0.4727, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.673778775968557, | |
| "grad_norm": 0.43937173686518055, | |
| "learning_rate": 3.813476610671482e-05, | |
| "loss": 0.4657, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.678270634475014, | |
| "grad_norm": 0.5148568012258624, | |
| "learning_rate": 3.8090283500031e-05, | |
| "loss": 0.4806, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.6827624929814711, | |
| "grad_norm": 0.5369266451434185, | |
| "learning_rate": 3.804530328176756e-05, | |
| "loss": 0.4875, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.6872543514879281, | |
| "grad_norm": 0.3278656749083231, | |
| "learning_rate": 3.799982668920072e-05, | |
| "loss": 0.4739, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.6917462099943852, | |
| "grad_norm": 0.3447113810996204, | |
| "learning_rate": 3.7953854973260504e-05, | |
| "loss": 0.4674, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.6962380685008422, | |
| "grad_norm": 0.43927011073196653, | |
| "learning_rate": 3.7907389398496386e-05, | |
| "loss": 0.4724, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.7007299270072993, | |
| "grad_norm": 0.35027515620654415, | |
| "learning_rate": 3.786043124304244e-05, | |
| "loss": 0.4703, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.7052217855137564, | |
| "grad_norm": 0.37499676238566443, | |
| "learning_rate": 3.781298179858225e-05, | |
| "loss": 0.4627, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.7097136440202133, | |
| "grad_norm": 0.3246905362867196, | |
| "learning_rate": 3.776504237031332e-05, | |
| "loss": 0.4684, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.7142055025266704, | |
| "grad_norm": 0.3967779625241376, | |
| "learning_rate": 3.77166142769112e-05, | |
| "loss": 0.4815, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.7186973610331274, | |
| "grad_norm": 0.42089556754678914, | |
| "learning_rate": 3.766769885049322e-05, | |
| "loss": 0.4753, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.7231892195395845, | |
| "grad_norm": 0.3848249197299972, | |
| "learning_rate": 3.761829743658184e-05, | |
| "loss": 0.4647, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.7276810780460415, | |
| "grad_norm": 0.4756249626235572, | |
| "learning_rate": 3.7568411394067606e-05, | |
| "loss": 0.4675, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.7321729365524986, | |
| "grad_norm": 0.5528207693868079, | |
| "learning_rate": 3.751804209517184e-05, | |
| "loss": 0.4517, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.7366647950589557, | |
| "grad_norm": 0.5693017917205246, | |
| "learning_rate": 3.746719092540884e-05, | |
| "loss": 0.4584, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.7411566535654127, | |
| "grad_norm": 0.6155167318871719, | |
| "learning_rate": 3.741585928354775e-05, | |
| "loss": 0.4745, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.7456485120718698, | |
| "grad_norm": 0.6702248490370594, | |
| "learning_rate": 3.7364048581574185e-05, | |
| "loss": 0.4735, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.7501403705783268, | |
| "grad_norm": 0.6475627890040105, | |
| "learning_rate": 3.7311760244651265e-05, | |
| "loss": 0.4777, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.7546322290847838, | |
| "grad_norm": 0.5094933135604696, | |
| "learning_rate": 3.72589957110805e-05, | |
| "loss": 0.4615, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.7591240875912408, | |
| "grad_norm": 0.5452917123868611, | |
| "learning_rate": 3.7205756432262185e-05, | |
| "loss": 0.4669, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.7636159460976979, | |
| "grad_norm": 0.5503231929243207, | |
| "learning_rate": 3.7152043872655494e-05, | |
| "loss": 0.4658, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.768107804604155, | |
| "grad_norm": 0.39254314661573575, | |
| "learning_rate": 3.709785950973819e-05, | |
| "loss": 0.473, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.772599663110612, | |
| "grad_norm": 0.4379163079244134, | |
| "learning_rate": 3.7043204833965996e-05, | |
| "loss": 0.4641, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.7770915216170691, | |
| "grad_norm": 0.533567744126813, | |
| "learning_rate": 3.698808134873157e-05, | |
| "loss": 0.4549, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.7815833801235261, | |
| "grad_norm": 0.5082188562188028, | |
| "learning_rate": 3.6932490570323186e-05, | |
| "loss": 0.4688, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.7860752386299832, | |
| "grad_norm": 0.4507926710511767, | |
| "learning_rate": 3.687643402788299e-05, | |
| "loss": 0.4722, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.7905670971364402, | |
| "grad_norm": 0.43474587363733824, | |
| "learning_rate": 3.681991326336495e-05, | |
| "loss": 0.4712, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.7950589556428973, | |
| "grad_norm": 0.36945212717566794, | |
| "learning_rate": 3.676292983149248e-05, | |
| "loss": 0.4518, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.7995508141493542, | |
| "grad_norm": 0.4333955065374179, | |
| "learning_rate": 3.67054852997156e-05, | |
| "loss": 0.46, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.8040426726558113, | |
| "grad_norm": 0.43120311771642844, | |
| "learning_rate": 3.664758124816788e-05, | |
| "loss": 0.4635, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.8085345311622684, | |
| "grad_norm": 0.32260325013130875, | |
| "learning_rate": 3.6589219269622965e-05, | |
| "loss": 0.4685, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.8130263896687254, | |
| "grad_norm": 0.4153591026549294, | |
| "learning_rate": 3.653040096945073e-05, | |
| "loss": 0.4773, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.8175182481751825, | |
| "grad_norm": 0.44017377780676453, | |
| "learning_rate": 3.647112796557314e-05, | |
| "loss": 0.4598, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.8220101066816395, | |
| "grad_norm": 0.40755848815368373, | |
| "learning_rate": 3.641140188841978e-05, | |
| "loss": 0.4649, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.8265019651880966, | |
| "grad_norm": 0.39345400941458303, | |
| "learning_rate": 3.635122438088296e-05, | |
| "loss": 0.4574, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.8309938236945537, | |
| "grad_norm": 0.34029509406129693, | |
| "learning_rate": 3.629059709827252e-05, | |
| "loss": 0.4569, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.8354856822010107, | |
| "grad_norm": 0.3030115957527451, | |
| "learning_rate": 3.6229521708270336e-05, | |
| "loss": 0.4618, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.8399775407074677, | |
| "grad_norm": 0.3322897737890596, | |
| "learning_rate": 3.616799989088444e-05, | |
| "loss": 0.464, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.8444693992139247, | |
| "grad_norm": 0.3374180826538677, | |
| "learning_rate": 3.610603333840276e-05, | |
| "loss": 0.4549, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.8489612577203818, | |
| "grad_norm": 0.2985612708602053, | |
| "learning_rate": 3.604362375534664e-05, | |
| "loss": 0.4556, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.8534531162268388, | |
| "grad_norm": 0.3406868122612806, | |
| "learning_rate": 3.598077285842389e-05, | |
| "loss": 0.4704, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.8579449747332959, | |
| "grad_norm": 0.4141779578206493, | |
| "learning_rate": 3.5917482376481615e-05, | |
| "loss": 0.4719, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.862436833239753, | |
| "grad_norm": 0.39908734482222585, | |
| "learning_rate": 3.5853754050458626e-05, | |
| "loss": 0.468, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.86692869174621, | |
| "grad_norm": 0.283109833633429, | |
| "learning_rate": 3.578958963333756e-05, | |
| "loss": 0.4634, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.8714205502526671, | |
| "grad_norm": 0.3596339250669256, | |
| "learning_rate": 3.572499089009668e-05, | |
| "loss": 0.4749, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.8759124087591241, | |
| "grad_norm": 0.46352643849956215, | |
| "learning_rate": 3.565995959766129e-05, | |
| "loss": 0.4504, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.8804042672655812, | |
| "grad_norm": 0.33961299920639637, | |
| "learning_rate": 3.55944975448549e-05, | |
| "loss": 0.4536, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.8848961257720381, | |
| "grad_norm": 0.41429827329115776, | |
| "learning_rate": 3.552860653234996e-05, | |
| "loss": 0.4617, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.8893879842784952, | |
| "grad_norm": 0.4187878964853087, | |
| "learning_rate": 3.54622883726184e-05, | |
| "loss": 0.4705, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.8938798427849522, | |
| "grad_norm": 0.35608886915257115, | |
| "learning_rate": 3.539554488988172e-05, | |
| "loss": 0.4603, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.8983717012914093, | |
| "grad_norm": 0.44882027765206955, | |
| "learning_rate": 3.5328377920060844e-05, | |
| "loss": 0.4474, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.9028635597978664, | |
| "grad_norm": 0.42596830117453416, | |
| "learning_rate": 3.5260789310725596e-05, | |
| "loss": 0.4598, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.9073554183043234, | |
| "grad_norm": 0.3425619312496081, | |
| "learning_rate": 3.519278092104389e-05, | |
| "loss": 0.4753, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.9118472768107805, | |
| "grad_norm": 0.30751297079342615, | |
| "learning_rate": 3.512435462173059e-05, | |
| "loss": 0.4537, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.9163391353172375, | |
| "grad_norm": 0.23048760802560575, | |
| "learning_rate": 3.505551229499605e-05, | |
| "loss": 0.4543, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.9208309938236946, | |
| "grad_norm": 0.30292078378914283, | |
| "learning_rate": 3.4986255834494336e-05, | |
| "loss": 0.4643, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.9253228523301515, | |
| "grad_norm": 0.301350875164664, | |
| "learning_rate": 3.4916587145271146e-05, | |
| "loss": 0.469, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.9298147108366086, | |
| "grad_norm": 0.29327535553055734, | |
| "learning_rate": 3.484650814371139e-05, | |
| "loss": 0.4574, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.9343065693430657, | |
| "grad_norm": 0.3517555580436979, | |
| "learning_rate": 3.477602075748651e-05, | |
| "loss": 0.4554, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.9387984278495227, | |
| "grad_norm": 0.2741276185568711, | |
| "learning_rate": 3.470512692550142e-05, | |
| "loss": 0.4651, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.9432902863559798, | |
| "grad_norm": 0.28314243237558445, | |
| "learning_rate": 3.4633828597841184e-05, | |
| "loss": 0.4684, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.9477821448624368, | |
| "grad_norm": 0.271199523757628, | |
| "learning_rate": 3.4562127735717374e-05, | |
| "loss": 0.4584, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.9522740033688939, | |
| "grad_norm": 0.2981292448924746, | |
| "learning_rate": 3.449002631141415e-05, | |
| "loss": 0.4521, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.956765861875351, | |
| "grad_norm": 0.39759733834992567, | |
| "learning_rate": 3.4417526308233946e-05, | |
| "loss": 0.4517, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.961257720381808, | |
| "grad_norm": 0.3207040640118584, | |
| "learning_rate": 3.434462972044299e-05, | |
| "loss": 0.4621, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.9657495788882651, | |
| "grad_norm": 0.4267023462148468, | |
| "learning_rate": 3.427133855321636e-05, | |
| "loss": 0.4408, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.970241437394722, | |
| "grad_norm": 0.30693747717518904, | |
| "learning_rate": 3.419765482258295e-05, | |
| "loss": 0.4568, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.9747332959011791, | |
| "grad_norm": 0.26641218705516495, | |
| "learning_rate": 3.412358055536987e-05, | |
| "loss": 0.4524, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.9792251544076361, | |
| "grad_norm": 0.33075643911928854, | |
| "learning_rate": 3.4049117789146816e-05, | |
| "loss": 0.4596, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.9837170129140932, | |
| "grad_norm": 0.2960698908990801, | |
| "learning_rate": 3.397426857216994e-05, | |
| "loss": 0.4585, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.9882088714205502, | |
| "grad_norm": 0.34974861836422566, | |
| "learning_rate": 3.389903496332556e-05, | |
| "loss": 0.4722, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.9927007299270073, | |
| "grad_norm": 0.24079781613495796, | |
| "learning_rate": 3.382341903207351e-05, | |
| "loss": 0.4597, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.9971925884334644, | |
| "grad_norm": 0.254596167126036, | |
| "learning_rate": 3.374742285839019e-05, | |
| "loss": 0.4576, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.0030881527231892, | |
| "grad_norm": 0.45650849798528037, | |
| "learning_rate": 3.3671048532711395e-05, | |
| "loss": 0.7372, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.0075800112296462, | |
| "grad_norm": 0.48502549810446316, | |
| "learning_rate": 3.359429815587479e-05, | |
| "loss": 0.4406, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.0120718697361033, | |
| "grad_norm": 0.6551974973199829, | |
| "learning_rate": 3.351717383906211e-05, | |
| "loss": 0.4137, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.0165637282425604, | |
| "grad_norm": 0.9098843306978837, | |
| "learning_rate": 3.34396777037411e-05, | |
| "loss": 0.4298, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.0210555867490174, | |
| "grad_norm": 1.005880921859405, | |
| "learning_rate": 3.336181188160719e-05, | |
| "loss": 0.4272, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.0255474452554745, | |
| "grad_norm": 0.7638539525043158, | |
| "learning_rate": 3.32835785145248e-05, | |
| "loss": 0.4452, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.0300393037619315, | |
| "grad_norm": 0.6490870004724567, | |
| "learning_rate": 3.320497975446847e-05, | |
| "loss": 0.4268, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.0345311622683886, | |
| "grad_norm": 0.6699263984499649, | |
| "learning_rate": 3.312601776346362e-05, | |
| "loss": 0.4438, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.0390230207748457, | |
| "grad_norm": 0.5286275909466732, | |
| "learning_rate": 3.3046694713527145e-05, | |
| "loss": 0.4367, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.0435148792813027, | |
| "grad_norm": 0.6130866640466045, | |
| "learning_rate": 3.296701278660761e-05, | |
| "loss": 0.446, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.0480067377877598, | |
| "grad_norm": 0.6884002135849526, | |
| "learning_rate": 3.288697417452526e-05, | |
| "loss": 0.4332, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.0524985962942168, | |
| "grad_norm": 0.5208063516835049, | |
| "learning_rate": 3.28065810789117e-05, | |
| "loss": 0.4279, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.0569904548006739, | |
| "grad_norm": 0.6500657214106221, | |
| "learning_rate": 3.272583571114939e-05, | |
| "loss": 0.4391, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.0614823133071307, | |
| "grad_norm": 0.589151852282848, | |
| "learning_rate": 3.2644740292310744e-05, | |
| "loss": 0.4298, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.0659741718135878, | |
| "grad_norm": 0.565064070430246, | |
| "learning_rate": 3.256329705309711e-05, | |
| "loss": 0.4154, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.0704660303200448, | |
| "grad_norm": 0.46206941555534536, | |
| "learning_rate": 3.2481508233777345e-05, | |
| "loss": 0.4345, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.074957888826502, | |
| "grad_norm": 0.5359425083161934, | |
| "learning_rate": 3.2399376084126236e-05, | |
| "loss": 0.4269, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.079449747332959, | |
| "grad_norm": 0.42417348299926266, | |
| "learning_rate": 3.2316902863362585e-05, | |
| "loss": 0.421, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.083941605839416, | |
| "grad_norm": 0.4022467319356316, | |
| "learning_rate": 3.223409084008709e-05, | |
| "loss": 0.4238, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.088433464345873, | |
| "grad_norm": 0.4771399887670459, | |
| "learning_rate": 3.2150942292219915e-05, | |
| "loss": 0.4368, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.0929253228523301, | |
| "grad_norm": 0.405316889537787, | |
| "learning_rate": 3.206745950693806e-05, | |
| "loss": 0.4272, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.0974171813587872, | |
| "grad_norm": 0.4560171315935234, | |
| "learning_rate": 3.198364478061242e-05, | |
| "loss": 0.4164, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.1019090398652442, | |
| "grad_norm": 0.40674436430728234, | |
| "learning_rate": 3.189950041874466e-05, | |
| "loss": 0.4259, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.1064008983717013, | |
| "grad_norm": 0.3589283590064641, | |
| "learning_rate": 3.181502873590372e-05, | |
| "loss": 0.4486, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.1108927568781584, | |
| "grad_norm": 0.4787191044063629, | |
| "learning_rate": 3.1730232055662246e-05, | |
| "loss": 0.4105, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.1153846153846154, | |
| "grad_norm": 0.3094289645272275, | |
| "learning_rate": 3.1645112710532604e-05, | |
| "loss": 0.4272, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.1198764738910725, | |
| "grad_norm": 0.4136701758014435, | |
| "learning_rate": 3.155967304190274e-05, | |
| "loss": 0.4315, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.1243683323975295, | |
| "grad_norm": 0.332359393466012, | |
| "learning_rate": 3.1473915399971775e-05, | |
| "loss": 0.4482, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.1288601909039866, | |
| "grad_norm": 0.33091458760760906, | |
| "learning_rate": 3.1387842143685376e-05, | |
| "loss": 0.4269, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.1333520494104437, | |
| "grad_norm": 0.3061436465048981, | |
| "learning_rate": 3.1301455640670834e-05, | |
| "loss": 0.4031, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.1378439079169007, | |
| "grad_norm": 0.25695899042648535, | |
| "learning_rate": 3.1214758267171955e-05, | |
| "loss": 0.4475, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.1423357664233578, | |
| "grad_norm": 0.33071861697938854, | |
| "learning_rate": 3.112775240798371e-05, | |
| "loss": 0.4116, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.1468276249298146, | |
| "grad_norm": 0.33490387258618176, | |
| "learning_rate": 3.10404404563866e-05, | |
| "loss": 0.44, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.1513194834362717, | |
| "grad_norm": 0.26455082509459493, | |
| "learning_rate": 3.095282481408089e-05, | |
| "loss": 0.4087, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.1558113419427287, | |
| "grad_norm": 0.32651467340494467, | |
| "learning_rate": 3.0864907891120445e-05, | |
| "loss": 0.4418, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.1603032004491858, | |
| "grad_norm": 0.32277249531525637, | |
| "learning_rate": 3.0776692105846536e-05, | |
| "loss": 0.4228, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.1647950589556428, | |
| "grad_norm": 0.31253851440977537, | |
| "learning_rate": 3.068817988482126e-05, | |
| "loss": 0.4398, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.1692869174621, | |
| "grad_norm": 0.30100317313200226, | |
| "learning_rate": 3.059937366276079e-05, | |
| "loss": 0.4125, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.173778775968557, | |
| "grad_norm": 0.26292927467871224, | |
| "learning_rate": 3.0510275882468455e-05, | |
| "loss": 0.4044, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.178270634475014, | |
| "grad_norm": 0.33658807213495406, | |
| "learning_rate": 3.042088899476749e-05, | |
| "loss": 0.4501, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.182762492981471, | |
| "grad_norm": 0.36177111419297686, | |
| "learning_rate": 3.0331215458433645e-05, | |
| "loss": 0.4216, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.1872543514879281, | |
| "grad_norm": 0.32308987763570995, | |
| "learning_rate": 3.0241257740127544e-05, | |
| "loss": 0.4308, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.1917462099943852, | |
| "grad_norm": 0.28816723044551135, | |
| "learning_rate": 3.0151018314326845e-05, | |
| "loss": 0.4169, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.1962380685008422, | |
| "grad_norm": 0.33784917149936466, | |
| "learning_rate": 3.006049966325816e-05, | |
| "loss": 0.4328, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.2007299270072993, | |
| "grad_norm": 0.38695560048945116, | |
| "learning_rate": 2.99697042768288e-05, | |
| "loss": 0.4267, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.2052217855137564, | |
| "grad_norm": 0.2630092985590674, | |
| "learning_rate": 2.987863465255825e-05, | |
| "loss": 0.4251, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.2097136440202134, | |
| "grad_norm": 0.26907416848755616, | |
| "learning_rate": 2.97872932955095e-05, | |
| "loss": 0.4342, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.2142055025266705, | |
| "grad_norm": 0.3329576789905292, | |
| "learning_rate": 2.9695682718220115e-05, | |
| "loss": 0.4105, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.2186973610331275, | |
| "grad_norm": 0.3667650140360048, | |
| "learning_rate": 2.960380544063314e-05, | |
| "loss": 0.4445, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.2231892195395846, | |
| "grad_norm": 0.24174581681253743, | |
| "learning_rate": 2.951166399002778e-05, | |
| "loss": 0.4054, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.2276810780460417, | |
| "grad_norm": 0.40069290093861953, | |
| "learning_rate": 2.9419260900949873e-05, | |
| "loss": 0.4211, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.2321729365524985, | |
| "grad_norm": 0.2835874836855102, | |
| "learning_rate": 2.932659871514218e-05, | |
| "loss": 0.4346, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.2366647950589555, | |
| "grad_norm": 0.39945173999132705, | |
| "learning_rate": 2.9233679981474464e-05, | |
| "loss": 0.4415, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.2411566535654126, | |
| "grad_norm": 0.363960202176237, | |
| "learning_rate": 2.9140507255873386e-05, | |
| "loss": 0.403, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.2456485120718697, | |
| "grad_norm": 0.25428905620278036, | |
| "learning_rate": 2.9047083101252186e-05, | |
| "loss": 0.4358, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.2501403705783267, | |
| "grad_norm": 0.3283189055026595, | |
| "learning_rate": 2.8953410087440197e-05, | |
| "loss": 0.4117, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.2546322290847838, | |
| "grad_norm": 0.2647714639844562, | |
| "learning_rate": 2.8859490791112147e-05, | |
| "loss": 0.4275, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.2591240875912408, | |
| "grad_norm": 0.25210022132262266, | |
| "learning_rate": 2.8765327795717294e-05, | |
| "loss": 0.4223, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.263615946097698, | |
| "grad_norm": 0.25860170516260206, | |
| "learning_rate": 2.8670923691408346e-05, | |
| "loss": 0.4268, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.268107804604155, | |
| "grad_norm": 0.290440795791154, | |
| "learning_rate": 2.857628107497024e-05, | |
| "loss": 0.4253, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.272599663110612, | |
| "grad_norm": 0.2566064118000192, | |
| "learning_rate": 2.8481402549748662e-05, | |
| "loss": 0.4233, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.277091521617069, | |
| "grad_norm": 0.2172030349507481, | |
| "learning_rate": 2.83862907255785e-05, | |
| "loss": 0.4258, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.2815833801235261, | |
| "grad_norm": 0.2759192322386285, | |
| "learning_rate": 2.8290948218712017e-05, | |
| "loss": 0.4273, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.2860752386299832, | |
| "grad_norm": 0.26092478357009175, | |
| "learning_rate": 2.8195377651746874e-05, | |
| "loss": 0.4261, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.2905670971364402, | |
| "grad_norm": 0.2738776596351749, | |
| "learning_rate": 2.8099581653554033e-05, | |
| "loss": 0.4397, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.2950589556428973, | |
| "grad_norm": 0.2615518142696367, | |
| "learning_rate": 2.8003562859205407e-05, | |
| "loss": 0.4247, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.2995508141493544, | |
| "grad_norm": 0.24950762363323614, | |
| "learning_rate": 2.7907323909901385e-05, | |
| "loss": 0.4047, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.3040426726558114, | |
| "grad_norm": 0.2487777503269512, | |
| "learning_rate": 2.7810867452898205e-05, | |
| "loss": 0.4267, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.3085345311622683, | |
| "grad_norm": 0.28784743438436633, | |
| "learning_rate": 2.7714196141435087e-05, | |
| "loss": 0.4272, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.3130263896687255, | |
| "grad_norm": 0.27763421264559995, | |
| "learning_rate": 2.7617312634661293e-05, | |
| "loss": 0.4197, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.3175182481751824, | |
| "grad_norm": 0.22010284960057416, | |
| "learning_rate": 2.7520219597562977e-05, | |
| "loss": 0.43, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.3220101066816397, | |
| "grad_norm": 0.24544086790006628, | |
| "learning_rate": 2.7422919700889847e-05, | |
| "loss": 0.4241, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.3265019651880965, | |
| "grad_norm": 0.3051326473090878, | |
| "learning_rate": 2.7325415621081734e-05, | |
| "loss": 0.4329, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.3309938236945535, | |
| "grad_norm": 0.24393542706009996, | |
| "learning_rate": 2.7227710040194953e-05, | |
| "loss": 0.4262, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.3354856822010106, | |
| "grad_norm": 0.27341650845121357, | |
| "learning_rate": 2.7129805645828547e-05, | |
| "loss": 0.4183, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.3399775407074677, | |
| "grad_norm": 0.32068500420913715, | |
| "learning_rate": 2.7031705131050333e-05, | |
| "loss": 0.4192, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.3444693992139247, | |
| "grad_norm": 0.22802107083604534, | |
| "learning_rate": 2.693341119432283e-05, | |
| "loss": 0.4189, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.3489612577203818, | |
| "grad_norm": 0.32416038766831196, | |
| "learning_rate": 2.683492653942905e-05, | |
| "loss": 0.4354, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.3534531162268388, | |
| "grad_norm": 0.3541483052340605, | |
| "learning_rate": 2.673625387539811e-05, | |
| "loss": 0.4276, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.357944974733296, | |
| "grad_norm": 0.334691623151228, | |
| "learning_rate": 2.6637395916430716e-05, | |
| "loss": 0.4201, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.362436833239753, | |
| "grad_norm": 0.2329543840857231, | |
| "learning_rate": 2.653835538182449e-05, | |
| "loss": 0.4169, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.36692869174621, | |
| "grad_norm": 0.25602661259768267, | |
| "learning_rate": 2.6439134995899207e-05, | |
| "loss": 0.4162, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.371420550252667, | |
| "grad_norm": 0.24757252624763498, | |
| "learning_rate": 2.633973748792181e-05, | |
| "loss": 0.4153, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.3759124087591241, | |
| "grad_norm": 0.2644237747349422, | |
| "learning_rate": 2.624016559203139e-05, | |
| "loss": 0.4203, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.3804042672655812, | |
| "grad_norm": 0.24532537432401993, | |
| "learning_rate": 2.6140422047163917e-05, | |
| "loss": 0.4048, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.3848961257720382, | |
| "grad_norm": 0.2102088609405344, | |
| "learning_rate": 2.604050959697694e-05, | |
| "loss": 0.4352, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.3893879842784953, | |
| "grad_norm": 0.25458677144504505, | |
| "learning_rate": 2.5940430989774115e-05, | |
| "loss": 0.416, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.3938798427849521, | |
| "grad_norm": 0.1983484331670118, | |
| "learning_rate": 2.5840188978429588e-05, | |
| "loss": 0.4173, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.3983717012914094, | |
| "grad_norm": 0.2587164731888213, | |
| "learning_rate": 2.5739786320312287e-05, | |
| "loss": 0.4038, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.4028635597978663, | |
| "grad_norm": 0.2039124363087396, | |
| "learning_rate": 2.5639225777210075e-05, | |
| "loss": 0.4223, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.4073554183043235, | |
| "grad_norm": 0.22976131799256824, | |
| "learning_rate": 2.5538510115253757e-05, | |
| "loss": 0.4149, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.4118472768107804, | |
| "grad_norm": 0.2595189631685738, | |
| "learning_rate": 2.5437642104841036e-05, | |
| "loss": 0.4482, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.4163391353172374, | |
| "grad_norm": 0.23276157399646058, | |
| "learning_rate": 2.5336624520560272e-05, | |
| "loss": 0.4146, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.4208309938236945, | |
| "grad_norm": 0.20481198867769435, | |
| "learning_rate": 2.5235460141114157e-05, | |
| "loss": 0.4141, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.4253228523301515, | |
| "grad_norm": 0.22636635303540364, | |
| "learning_rate": 2.5134151749243315e-05, | |
| "loss": 0.4423, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.4298147108366086, | |
| "grad_norm": 0.19394280049370696, | |
| "learning_rate": 2.5032702131649727e-05, | |
| "loss": 0.4195, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.4343065693430657, | |
| "grad_norm": 0.2558268505632932, | |
| "learning_rate": 2.4931114078920097e-05, | |
| "loss": 0.4301, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.4387984278495227, | |
| "grad_norm": 0.200459638795042, | |
| "learning_rate": 2.482939038544906e-05, | |
| "loss": 0.4222, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.4432902863559798, | |
| "grad_norm": 0.2534342488930603, | |
| "learning_rate": 2.4727533849362356e-05, | |
| "loss": 0.4345, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.4477821448624368, | |
| "grad_norm": 0.22819858783089966, | |
| "learning_rate": 2.4625547272439838e-05, | |
| "loss": 0.4145, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.452274003368894, | |
| "grad_norm": 0.23820121549687595, | |
| "learning_rate": 2.4523433460038403e-05, | |
| "loss": 0.4342, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.456765861875351, | |
| "grad_norm": 0.3010492460923718, | |
| "learning_rate": 2.4421195221014835e-05, | |
| "loss": 0.426, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.461257720381808, | |
| "grad_norm": 0.21550891267344283, | |
| "learning_rate": 2.4318835367648533e-05, | |
| "loss": 0.428, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.465749578888265, | |
| "grad_norm": 0.23829438088629673, | |
| "learning_rate": 2.4216356715564158e-05, | |
| "loss": 0.4271, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.4702414373947221, | |
| "grad_norm": 0.2177960144456483, | |
| "learning_rate": 2.411376208365418e-05, | |
| "loss": 0.4027, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.4747332959011792, | |
| "grad_norm": 0.20378185851986158, | |
| "learning_rate": 2.4011054294001326e-05, | |
| "loss": 0.4429, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.479225154407636, | |
| "grad_norm": 0.21282286475839282, | |
| "learning_rate": 2.3908236171801007e-05, | |
| "loss": 0.4093, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.4837170129140933, | |
| "grad_norm": 0.22431029202895883, | |
| "learning_rate": 2.3805310545283512e-05, | |
| "loss": 0.4352, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.4882088714205501, | |
| "grad_norm": 0.23367631487223958, | |
| "learning_rate": 2.370228024563631e-05, | |
| "loss": 0.4266, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.4927007299270074, | |
| "grad_norm": 0.26050290832203254, | |
| "learning_rate": 2.3599148106926096e-05, | |
| "loss": 0.415, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.4971925884334643, | |
| "grad_norm": 0.21757371944664214, | |
| "learning_rate": 2.3495916966020877e-05, | |
| "loss": 0.4227, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.5016844469399215, | |
| "grad_norm": 0.2292182689622095, | |
| "learning_rate": 2.3392589662511922e-05, | |
| "loss": 0.4275, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.5061763054463784, | |
| "grad_norm": 0.24168314944321498, | |
| "learning_rate": 2.3289169038635655e-05, | |
| "loss": 0.4257, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.5106681639528357, | |
| "grad_norm": 0.23477615304589983, | |
| "learning_rate": 2.318565793919548e-05, | |
| "loss": 0.4325, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.5151600224592925, | |
| "grad_norm": 0.26822251405542813, | |
| "learning_rate": 2.3082059211483515e-05, | |
| "loss": 0.418, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.5196518809657495, | |
| "grad_norm": 0.24236837945153972, | |
| "learning_rate": 2.2978375705202293e-05, | |
| "loss": 0.4113, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.5241437394722066, | |
| "grad_norm": 0.2701652119131309, | |
| "learning_rate": 2.287461027238634e-05, | |
| "loss": 0.4244, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.5286355979786637, | |
| "grad_norm": 0.25867029729580326, | |
| "learning_rate": 2.2770765767323762e-05, | |
| "loss": 0.4202, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.5331274564851207, | |
| "grad_norm": 0.3031867113737479, | |
| "learning_rate": 2.2666845046477715e-05, | |
| "loss": 0.4265, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.5376193149915778, | |
| "grad_norm": 0.2717235885586972, | |
| "learning_rate": 2.2562850968407815e-05, | |
| "loss": 0.4184, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.5421111734980348, | |
| "grad_norm": 0.2945487406873698, | |
| "learning_rate": 2.245878639369154e-05, | |
| "loss": 0.4334, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.546603032004492, | |
| "grad_norm": 0.25368632967688787, | |
| "learning_rate": 2.2354654184845522e-05, | |
| "loss": 0.4251, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.551094890510949, | |
| "grad_norm": 0.23576494752476204, | |
| "learning_rate": 2.2250457206246816e-05, | |
| "loss": 0.4137, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.5555867490174058, | |
| "grad_norm": 0.25108049191369725, | |
| "learning_rate": 2.21461983240541e-05, | |
| "loss": 0.4281, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.560078607523863, | |
| "grad_norm": 0.24712040896081508, | |
| "learning_rate": 2.204188040612885e-05, | |
| "loss": 0.4184, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.56457046603032, | |
| "grad_norm": 0.24759008033384355, | |
| "learning_rate": 2.193750632195644e-05, | |
| "loss": 0.4078, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.5690623245367772, | |
| "grad_norm": 0.2709533914156214, | |
| "learning_rate": 2.1833078942567214e-05, | |
| "loss": 0.4208, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.573554183043234, | |
| "grad_norm": 0.24612055984988512, | |
| "learning_rate": 2.1728601140457514e-05, | |
| "loss": 0.438, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.5780460415496913, | |
| "grad_norm": 0.2779727854211198, | |
| "learning_rate": 2.1624075789510672e-05, | |
| "loss": 0.4248, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.5825379000561481, | |
| "grad_norm": 0.24956359166266004, | |
| "learning_rate": 2.1519505764917945e-05, | |
| "loss": 0.4239, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.5870297585626054, | |
| "grad_norm": 0.2538581269189693, | |
| "learning_rate": 2.1414893943099434e-05, | |
| "loss": 0.3993, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.5915216170690623, | |
| "grad_norm": 0.27128349725146444, | |
| "learning_rate": 2.1310243201624964e-05, | |
| "loss": 0.4378, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.5960134755755195, | |
| "grad_norm": 0.27179359903487693, | |
| "learning_rate": 2.1205556419134923e-05, | |
| "loss": 0.43, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.6005053340819764, | |
| "grad_norm": 0.20873300737067366, | |
| "learning_rate": 2.1100836475261094e-05, | |
| "loss": 0.4015, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.6049971925884334, | |
| "grad_norm": 0.19110310902927194, | |
| "learning_rate": 2.099608625054743e-05, | |
| "loss": 0.4151, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.6094890510948905, | |
| "grad_norm": 0.19793166540329932, | |
| "learning_rate": 2.0891308626370805e-05, | |
| "loss": 0.431, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.6139809096013475, | |
| "grad_norm": 0.2375627281723576, | |
| "learning_rate": 2.0786506484861796e-05, | |
| "loss": 0.4313, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.6184727681078046, | |
| "grad_norm": 0.21497482811349622, | |
| "learning_rate": 2.0681682708825376e-05, | |
| "loss": 0.418, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.6229646266142617, | |
| "grad_norm": 0.2229009604910967, | |
| "learning_rate": 2.0576840181661606e-05, | |
| "loss": 0.4213, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.6274564851207187, | |
| "grad_norm": 0.21076530422813844, | |
| "learning_rate": 2.0471981787286368e-05, | |
| "loss": 0.4245, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.6319483436271758, | |
| "grad_norm": 0.19487715366411615, | |
| "learning_rate": 2.036711041005197e-05, | |
| "loss": 0.3994, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.6364402021336328, | |
| "grad_norm": 0.21037545586342932, | |
| "learning_rate": 2.026222893466787e-05, | |
| "loss": 0.4272, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.6409320606400897, | |
| "grad_norm": 0.1992476412865985, | |
| "learning_rate": 2.0157340246121276e-05, | |
| "loss": 0.4075, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.645423919146547, | |
| "grad_norm": 0.19258224504164123, | |
| "learning_rate": 2.005244722959782e-05, | |
| "loss": 0.4185, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.6499157776530038, | |
| "grad_norm": 0.22437711396860402, | |
| "learning_rate": 1.9947552770402185e-05, | |
| "loss": 0.4213, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.654407636159461, | |
| "grad_norm": 0.20068775432176503, | |
| "learning_rate": 1.984265975387873e-05, | |
| "loss": 0.4004, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.658899494665918, | |
| "grad_norm": 0.26827062531566015, | |
| "learning_rate": 1.9737771065332136e-05, | |
| "loss": 0.4315, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.6633913531723752, | |
| "grad_norm": 0.18339800651405755, | |
| "learning_rate": 1.9632889589948036e-05, | |
| "loss": 0.4137, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.667883211678832, | |
| "grad_norm": 0.24122026800525564, | |
| "learning_rate": 1.952801821271364e-05, | |
| "loss": 0.4143, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.6723750701852893, | |
| "grad_norm": 0.22471731255643268, | |
| "learning_rate": 1.94231598183384e-05, | |
| "loss": 0.4314, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.6768669286917461, | |
| "grad_norm": 0.21520700258506853, | |
| "learning_rate": 1.9318317291174637e-05, | |
| "loss": 0.4348, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.6813587871982034, | |
| "grad_norm": 0.20229135640356036, | |
| "learning_rate": 1.9213493515138214e-05, | |
| "loss": 0.4042, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.6858506457046603, | |
| "grad_norm": 0.2274761056468099, | |
| "learning_rate": 1.9108691373629202e-05, | |
| "loss": 0.4135, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.6903425042111173, | |
| "grad_norm": 0.19582415271216252, | |
| "learning_rate": 1.9003913749452578e-05, | |
| "loss": 0.425, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.6948343627175744, | |
| "grad_norm": 0.24533116589787965, | |
| "learning_rate": 1.889916352473891e-05, | |
| "loss": 0.4231, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.6993262212240314, | |
| "grad_norm": 0.21539388511556778, | |
| "learning_rate": 1.879444358086508e-05, | |
| "loss": 0.4211, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.7038180797304885, | |
| "grad_norm": 0.23681522924545362, | |
| "learning_rate": 1.8689756798375043e-05, | |
| "loss": 0.4133, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.7083099382369455, | |
| "grad_norm": 0.18894482674533236, | |
| "learning_rate": 1.8585106056900572e-05, | |
| "loss": 0.4165, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.7128017967434026, | |
| "grad_norm": 0.25143069296420406, | |
| "learning_rate": 1.8480494235082062e-05, | |
| "loss": 0.4322, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.7172936552498597, | |
| "grad_norm": 0.18144577540313528, | |
| "learning_rate": 1.8375924210489334e-05, | |
| "loss": 0.4149, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.7217855137563167, | |
| "grad_norm": 0.21027491169061668, | |
| "learning_rate": 1.827139885954249e-05, | |
| "loss": 0.4399, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.7262773722627736, | |
| "grad_norm": 0.19166387960227746, | |
| "learning_rate": 1.8166921057432792e-05, | |
| "loss": 0.4108, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.7307692307692308, | |
| "grad_norm": 0.1787253063436926, | |
| "learning_rate": 1.8062493678043565e-05, | |
| "loss": 0.4191, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.7352610892756877, | |
| "grad_norm": 0.1944434272721163, | |
| "learning_rate": 1.7958119593871156e-05, | |
| "loss": 0.433, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.739752947782145, | |
| "grad_norm": 0.17096065229856972, | |
| "learning_rate": 1.7853801675945907e-05, | |
| "loss": 0.4182, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.7442448062886018, | |
| "grad_norm": 0.20349298051903, | |
| "learning_rate": 1.7749542793753194e-05, | |
| "loss": 0.41, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.748736664795059, | |
| "grad_norm": 0.18023821239189158, | |
| "learning_rate": 1.7645345815154488e-05, | |
| "loss": 0.4308, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.753228523301516, | |
| "grad_norm": 0.18619304041599655, | |
| "learning_rate": 1.754121360630847e-05, | |
| "loss": 0.4119, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.7577203818079732, | |
| "grad_norm": 0.2004723428066632, | |
| "learning_rate": 1.7437149031592198e-05, | |
| "loss": 0.4447, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.76221224031443, | |
| "grad_norm": 0.20587176749850336, | |
| "learning_rate": 1.7333154953522295e-05, | |
| "loss": 0.4099, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.7667040988208873, | |
| "grad_norm": 0.20942837636034514, | |
| "learning_rate": 1.722923423267624e-05, | |
| "loss": 0.4251, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.7711959573273441, | |
| "grad_norm": 0.25212770809098806, | |
| "learning_rate": 1.7125389727613665e-05, | |
| "loss": 0.4099, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.7756878158338012, | |
| "grad_norm": 0.21920140315178377, | |
| "learning_rate": 1.7021624294797714e-05, | |
| "loss": 0.4104, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.7801796743402583, | |
| "grad_norm": 0.2503434836250085, | |
| "learning_rate": 1.6917940788516488e-05, | |
| "loss": 0.4069, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.7846715328467153, | |
| "grad_norm": 0.207125157202626, | |
| "learning_rate": 1.6814342060804525e-05, | |
| "loss": 0.4067, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.7891633913531724, | |
| "grad_norm": 0.19734214715361426, | |
| "learning_rate": 1.671083096136435e-05, | |
| "loss": 0.4204, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.7936552498596294, | |
| "grad_norm": 0.20525324823296953, | |
| "learning_rate": 1.6607410337488084e-05, | |
| "loss": 0.4088, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.7981471083660865, | |
| "grad_norm": 0.18175664570391054, | |
| "learning_rate": 1.650408303397913e-05, | |
| "loss": 0.4249, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.8026389668725435, | |
| "grad_norm": 0.1977591192490908, | |
| "learning_rate": 1.640085189307391e-05, | |
| "loss": 0.4258, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.8071308253790006, | |
| "grad_norm": 0.18881451498790647, | |
| "learning_rate": 1.6297719754363697e-05, | |
| "loss": 0.4087, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.8116226838854577, | |
| "grad_norm": 0.19657927189172936, | |
| "learning_rate": 1.6194689454716494e-05, | |
| "loss": 0.4316, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.8161145423919147, | |
| "grad_norm": 0.19778219809553968, | |
| "learning_rate": 1.6091763828199006e-05, | |
| "loss": 0.43, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.8206064008983716, | |
| "grad_norm": 0.1903251799732602, | |
| "learning_rate": 1.598894570599868e-05, | |
| "loss": 0.4136, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.8250982594048288, | |
| "grad_norm": 0.18162137947347418, | |
| "learning_rate": 1.5886237916345835e-05, | |
| "loss": 0.4183, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.8295901179112857, | |
| "grad_norm": 0.19487951977807602, | |
| "learning_rate": 1.5783643284435853e-05, | |
| "loss": 0.4187, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.834081976417743, | |
| "grad_norm": 0.17182780940541395, | |
| "learning_rate": 1.5681164632351474e-05, | |
| "loss": 0.4019, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.8385738349241998, | |
| "grad_norm": 0.18629639494438435, | |
| "learning_rate": 1.5578804778985165e-05, | |
| "loss": 0.413, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.843065693430657, | |
| "grad_norm": 0.17747107285040115, | |
| "learning_rate": 1.5476566539961597e-05, | |
| "loss": 0.4327, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.847557551937114, | |
| "grad_norm": 0.228018379729804, | |
| "learning_rate": 1.537445272756017e-05, | |
| "loss": 0.408, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.8520494104435712, | |
| "grad_norm": 0.19421333458992324, | |
| "learning_rate": 1.5272466150637647e-05, | |
| "loss": 0.4139, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.856541268950028, | |
| "grad_norm": 0.22412210005220193, | |
| "learning_rate": 1.5170609614550946e-05, | |
| "loss": 0.4354, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.861033127456485, | |
| "grad_norm": 0.17273062198040803, | |
| "learning_rate": 1.5068885921079912e-05, | |
| "loss": 0.395, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.8655249859629421, | |
| "grad_norm": 0.20855857120629168, | |
| "learning_rate": 1.4967297868350276e-05, | |
| "loss": 0.4142, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.8700168444693992, | |
| "grad_norm": 0.18397148928603044, | |
| "learning_rate": 1.486584825075669e-05, | |
| "loss": 0.4459, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.8745087029758563, | |
| "grad_norm": 0.21275337196051788, | |
| "learning_rate": 1.476453985888585e-05, | |
| "loss": 0.4109, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.8790005614823133, | |
| "grad_norm": 0.1853167651219081, | |
| "learning_rate": 1.4663375479439736e-05, | |
| "loss": 0.4169, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.8834924199887704, | |
| "grad_norm": 0.18030407451946906, | |
| "learning_rate": 1.4562357895158968e-05, | |
| "loss": 0.4283, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.8879842784952274, | |
| "grad_norm": 0.18951569597374218, | |
| "learning_rate": 1.446148988474625e-05, | |
| "loss": 0.4052, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.8924761370016845, | |
| "grad_norm": 0.17794987340852964, | |
| "learning_rate": 1.4360774222789935e-05, | |
| "loss": 0.4164, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.8969679955081415, | |
| "grad_norm": 0.17286898833294703, | |
| "learning_rate": 1.426021367968772e-05, | |
| "loss": 0.4135, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.9014598540145986, | |
| "grad_norm": 0.18217259664369126, | |
| "learning_rate": 1.415981102157042e-05, | |
| "loss": 0.4168, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.9059517125210554, | |
| "grad_norm": 0.18083728829151619, | |
| "learning_rate": 1.4059569010225895e-05, | |
| "loss": 0.4308, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.9104435710275127, | |
| "grad_norm": 0.16432334350932104, | |
| "learning_rate": 1.3959490403023062e-05, | |
| "loss": 0.4032, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.9149354295339696, | |
| "grad_norm": 0.17032385568331607, | |
| "learning_rate": 1.3859577952836087e-05, | |
| "loss": 0.4076, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.9194272880404268, | |
| "grad_norm": 0.19756002935637215, | |
| "learning_rate": 1.3759834407968613e-05, | |
| "loss": 0.4385, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.9239191465468837, | |
| "grad_norm": 0.15968233005956364, | |
| "learning_rate": 1.3660262512078188e-05, | |
| "loss": 0.413, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.928411005053341, | |
| "grad_norm": 0.183430026499084, | |
| "learning_rate": 1.35608650041008e-05, | |
| "loss": 0.4174, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.9329028635597978, | |
| "grad_norm": 0.16091197293358206, | |
| "learning_rate": 1.3461644618175513e-05, | |
| "loss": 0.4067, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.937394722066255, | |
| "grad_norm": 0.15939384332295572, | |
| "learning_rate": 1.3362604083569291e-05, | |
| "loss": 0.4166, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.941886580572712, | |
| "grad_norm": 0.1714165301375367, | |
| "learning_rate": 1.3263746124601893e-05, | |
| "loss": 0.4242, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.946378439079169, | |
| "grad_norm": 0.17672790675612873, | |
| "learning_rate": 1.3165073460570951e-05, | |
| "loss": 0.4115, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.950870297585626, | |
| "grad_norm": 0.15388644375632554, | |
| "learning_rate": 1.3066588805677177e-05, | |
| "loss": 0.4063, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.955362156092083, | |
| "grad_norm": 0.18333568284520907, | |
| "learning_rate": 1.2968294868949673e-05, | |
| "loss": 0.424, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.9598540145985401, | |
| "grad_norm": 0.17923968151434616, | |
| "learning_rate": 1.2870194354171456e-05, | |
| "loss": 0.4241, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.9643458731049972, | |
| "grad_norm": 0.17979939988921212, | |
| "learning_rate": 1.2772289959805053e-05, | |
| "loss": 0.4173, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.9688377316114543, | |
| "grad_norm": 0.15933915317152375, | |
| "learning_rate": 1.2674584378918278e-05, | |
| "loss": 0.4141, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.9733295901179113, | |
| "grad_norm": 0.17891280546874952, | |
| "learning_rate": 1.2577080299110165e-05, | |
| "loss": 0.4088, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.9778214486243684, | |
| "grad_norm": 0.18595242673961962, | |
| "learning_rate": 1.2479780402437034e-05, | |
| "loss": 0.4221, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.9823133071308254, | |
| "grad_norm": 0.17197113741160228, | |
| "learning_rate": 1.2382687365338714e-05, | |
| "loss": 0.4097, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.9868051656372825, | |
| "grad_norm": 0.17288888553025833, | |
| "learning_rate": 1.2285803858564918e-05, | |
| "loss": 0.4032, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.9912970241437393, | |
| "grad_norm": 0.1831498174331215, | |
| "learning_rate": 1.2189132547101798e-05, | |
| "loss": 0.4285, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.9957888826501966, | |
| "grad_norm": 0.18690814887413265, | |
| "learning_rate": 1.2092676090098615e-05, | |
| "loss": 0.4132, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 2.0016844469399215, | |
| "grad_norm": 0.31820233942220144, | |
| "learning_rate": 1.1996437140794596e-05, | |
| "loss": 0.6728, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 2.0061763054463784, | |
| "grad_norm": 0.24845794106817168, | |
| "learning_rate": 1.190041834644597e-05, | |
| "loss": 0.3911, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 2.0106681639528357, | |
| "grad_norm": 0.23529011866190525, | |
| "learning_rate": 1.180462234825313e-05, | |
| "loss": 0.3875, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 2.0151600224592925, | |
| "grad_norm": 0.20541592097331435, | |
| "learning_rate": 1.1709051781287992e-05, | |
| "loss": 0.3764, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 2.0196518809657498, | |
| "grad_norm": 0.23850249371466287, | |
| "learning_rate": 1.1613709274421504e-05, | |
| "loss": 0.3838, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 2.0241437394722066, | |
| "grad_norm": 0.23269546113510145, | |
| "learning_rate": 1.1518597450251343e-05, | |
| "loss": 0.3978, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.028635597978664, | |
| "grad_norm": 0.21046227650518717, | |
| "learning_rate": 1.1423718925029767e-05, | |
| "loss": 0.3989, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 2.0331274564851207, | |
| "grad_norm": 0.23228388463289662, | |
| "learning_rate": 1.1329076308591654e-05, | |
| "loss": 0.3836, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 2.0376193149915776, | |
| "grad_norm": 0.22084027535536246, | |
| "learning_rate": 1.1234672204282711e-05, | |
| "loss": 0.3791, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 2.042111173498035, | |
| "grad_norm": 0.21257855027368064, | |
| "learning_rate": 1.1140509208887858e-05, | |
| "loss": 0.3928, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 2.0466030320044917, | |
| "grad_norm": 0.22223464338110302, | |
| "learning_rate": 1.1046589912559811e-05, | |
| "loss": 0.3963, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 2.051094890510949, | |
| "grad_norm": 0.22247881079865983, | |
| "learning_rate": 1.0952916898747819e-05, | |
| "loss": 0.3766, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 2.055586749017406, | |
| "grad_norm": 0.2291263601713879, | |
| "learning_rate": 1.0859492744126618e-05, | |
| "loss": 0.3866, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 2.060078607523863, | |
| "grad_norm": 0.19705657511960636, | |
| "learning_rate": 1.0766320018525541e-05, | |
| "loss": 0.3987, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 2.06457046603032, | |
| "grad_norm": 0.22106890230730505, | |
| "learning_rate": 1.0673401284857825e-05, | |
| "loss": 0.403, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 2.069062324536777, | |
| "grad_norm": 0.1979104498511638, | |
| "learning_rate": 1.0580739099050132e-05, | |
| "loss": 0.3695, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 2.073554183043234, | |
| "grad_norm": 0.18185997974783355, | |
| "learning_rate": 1.0488336009972225e-05, | |
| "loss": 0.3752, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 2.0780460415496913, | |
| "grad_norm": 0.1995077254401588, | |
| "learning_rate": 1.0396194559366868e-05, | |
| "loss": 0.3875, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 2.082537900056148, | |
| "grad_norm": 0.1842603161398483, | |
| "learning_rate": 1.0304317281779895e-05, | |
| "loss": 0.3864, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 2.0870297585626054, | |
| "grad_norm": 0.17522173114097753, | |
| "learning_rate": 1.0212706704490509e-05, | |
| "loss": 0.3843, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 2.0915216170690623, | |
| "grad_norm": 0.20582517644212794, | |
| "learning_rate": 1.0121365347441756e-05, | |
| "loss": 0.3782, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 2.0960134755755195, | |
| "grad_norm": 0.17269025378235323, | |
| "learning_rate": 1.0030295723171207e-05, | |
| "loss": 0.3745, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 2.1005053340819764, | |
| "grad_norm": 0.1672708444703473, | |
| "learning_rate": 9.939500336741848e-06, | |
| "loss": 0.3892, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 2.1049971925884337, | |
| "grad_norm": 0.190094018700073, | |
| "learning_rate": 9.848981685673167e-06, | |
| "loss": 0.3869, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 2.1094890510948905, | |
| "grad_norm": 0.16798933856570303, | |
| "learning_rate": 9.758742259872466e-06, | |
| "loss": 0.376, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 2.1139809096013478, | |
| "grad_norm": 0.17835571477004522, | |
| "learning_rate": 9.668784541566365e-06, | |
| "loss": 0.4031, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 2.1184727681078046, | |
| "grad_norm": 0.1767072318763064, | |
| "learning_rate": 9.579111005232517e-06, | |
| "loss": 0.3781, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 2.1229646266142614, | |
| "grad_norm": 0.1858609134040628, | |
| "learning_rate": 9.489724117531551e-06, | |
| "loss": 0.3889, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 2.1274564851207187, | |
| "grad_norm": 0.18045023510913724, | |
| "learning_rate": 9.400626337239216e-06, | |
| "loss": 0.3914, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 2.1319483436271756, | |
| "grad_norm": 0.2009653882699152, | |
| "learning_rate": 9.311820115178753e-06, | |
| "loss": 0.3979, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 2.136440202133633, | |
| "grad_norm": 0.18185195763749404, | |
| "learning_rate": 9.223307894153464e-06, | |
| "loss": 0.3809, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 2.1409320606400897, | |
| "grad_norm": 0.2047525313163014, | |
| "learning_rate": 9.135092108879555e-06, | |
| "loss": 0.3839, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 2.145423919146547, | |
| "grad_norm": 0.1851785348593311, | |
| "learning_rate": 9.047175185919114e-06, | |
| "loss": 0.3836, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 2.149915777653004, | |
| "grad_norm": 0.17714291133303015, | |
| "learning_rate": 8.959559543613398e-06, | |
| "loss": 0.3798, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 2.154407636159461, | |
| "grad_norm": 0.17996896473863375, | |
| "learning_rate": 8.872247592016299e-06, | |
| "loss": 0.3845, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 2.158899494665918, | |
| "grad_norm": 0.1589452532146188, | |
| "learning_rate": 8.785241732828051e-06, | |
| "loss": 0.3852, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 2.163391353172375, | |
| "grad_norm": 0.17567945924134573, | |
| "learning_rate": 8.698544359329173e-06, | |
| "loss": 0.401, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 2.167883211678832, | |
| "grad_norm": 0.16960905447773494, | |
| "learning_rate": 8.612157856314627e-06, | |
| "loss": 0.3536, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 2.1723750701852893, | |
| "grad_norm": 0.17837372239963734, | |
| "learning_rate": 8.526084600028224e-06, | |
| "loss": 0.4124, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 2.176866928691746, | |
| "grad_norm": 0.15968538949963945, | |
| "learning_rate": 8.440326958097263e-06, | |
| "loss": 0.3784, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 2.1813587871982034, | |
| "grad_norm": 0.176287402163356, | |
| "learning_rate": 8.354887289467397e-06, | |
| "loss": 0.4017, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 2.1858506457046603, | |
| "grad_norm": 0.16696731653404612, | |
| "learning_rate": 8.269767944337754e-06, | |
| "loss": 0.4012, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 2.1903425042111175, | |
| "grad_norm": 0.15898548252925163, | |
| "learning_rate": 8.184971264096283e-06, | |
| "loss": 0.374, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 2.1948343627175744, | |
| "grad_norm": 0.17760446745352174, | |
| "learning_rate": 8.10049958125535e-06, | |
| "loss": 0.3961, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 2.199326221224031, | |
| "grad_norm": 0.15318983709032877, | |
| "learning_rate": 8.016355219387581e-06, | |
| "loss": 0.3822, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 2.2038180797304885, | |
| "grad_norm": 0.15574070294018197, | |
| "learning_rate": 7.93254049306195e-06, | |
| "loss": 0.3762, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 2.2083099382369458, | |
| "grad_norm": 0.1792100057983903, | |
| "learning_rate": 7.849057707780092e-06, | |
| "loss": 0.3808, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 2.2128017967434026, | |
| "grad_norm": 0.15515150974209543, | |
| "learning_rate": 7.76590915991291e-06, | |
| "loss": 0.3783, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 2.2172936552498594, | |
| "grad_norm": 0.16343961349749975, | |
| "learning_rate": 7.683097136637412e-06, | |
| "loss": 0.3986, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 2.2217855137563167, | |
| "grad_norm": 0.1636033882308239, | |
| "learning_rate": 7.600623915873761e-06, | |
| "loss": 0.3714, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 2.2262773722627736, | |
| "grad_norm": 0.1418188610038935, | |
| "learning_rate": 7.518491766222651e-06, | |
| "loss": 0.3739, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 2.230769230769231, | |
| "grad_norm": 0.15292665001934458, | |
| "learning_rate": 7.436702946902896e-06, | |
| "loss": 0.3785, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 2.2352610892756877, | |
| "grad_norm": 0.17132805211451424, | |
| "learning_rate": 7.355259707689261e-06, | |
| "loss": 0.3854, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 2.239752947782145, | |
| "grad_norm": 0.13219021664852376, | |
| "learning_rate": 7.2741642888506225e-06, | |
| "loss": 0.3781, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 2.244244806288602, | |
| "grad_norm": 0.15182675040480578, | |
| "learning_rate": 7.193418921088309e-06, | |
| "loss": 0.3715, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 2.248736664795059, | |
| "grad_norm": 0.1614429892062153, | |
| "learning_rate": 7.113025825474753e-06, | |
| "loss": 0.3978, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.253228523301516, | |
| "grad_norm": 0.14377799568034885, | |
| "learning_rate": 7.032987213392397e-06, | |
| "loss": 0.3827, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 2.257720381807973, | |
| "grad_norm": 0.14169930172967138, | |
| "learning_rate": 6.953305286472862e-06, | |
| "loss": 0.4003, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 2.26221224031443, | |
| "grad_norm": 0.14182100490158306, | |
| "learning_rate": 6.87398223653639e-06, | |
| "loss": 0.3728, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 2.2667040988208873, | |
| "grad_norm": 0.1469290286945146, | |
| "learning_rate": 6.795020245531545e-06, | |
| "loss": 0.4099, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 2.271195957327344, | |
| "grad_norm": 0.13876615735629477, | |
| "learning_rate": 6.716421485475209e-06, | |
| "loss": 0.3805, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 2.2756878158338014, | |
| "grad_norm": 0.13927105550890062, | |
| "learning_rate": 6.638188118392819e-06, | |
| "loss": 0.3753, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 2.2801796743402583, | |
| "grad_norm": 0.13262777740226053, | |
| "learning_rate": 6.5603222962589076e-06, | |
| "loss": 0.3829, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 2.2846715328467155, | |
| "grad_norm": 0.2291014928704109, | |
| "learning_rate": 6.4828261609378984e-06, | |
| "loss": 0.4131, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 2.2891633913531724, | |
| "grad_norm": 0.14876212704335656, | |
| "learning_rate": 6.405701844125214e-06, | |
| "loss": 0.3806, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 2.293655249859629, | |
| "grad_norm": 0.1357314628408272, | |
| "learning_rate": 6.3289514672886046e-06, | |
| "loss": 0.3917, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.2981471083660865, | |
| "grad_norm": 0.15317713760216325, | |
| "learning_rate": 6.252577141609812e-06, | |
| "loss": 0.3937, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 2.3026389668725433, | |
| "grad_norm": 0.14603624638639434, | |
| "learning_rate": 6.176580967926495e-06, | |
| "loss": 0.3972, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 2.3071308253790006, | |
| "grad_norm": 0.14991200099061008, | |
| "learning_rate": 6.100965036674442e-06, | |
| "loss": 0.3999, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 2.3116226838854574, | |
| "grad_norm": 0.14948198377091151, | |
| "learning_rate": 6.025731427830066e-06, | |
| "loss": 0.3785, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 2.3161145423919147, | |
| "grad_norm": 0.1505587295663645, | |
| "learning_rate": 5.950882210853193e-06, | |
| "loss": 0.3853, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 2.3206064008983716, | |
| "grad_norm": 0.14095341693251215, | |
| "learning_rate": 5.876419444630135e-06, | |
| "loss": 0.3747, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 2.325098259404829, | |
| "grad_norm": 0.15343927263976573, | |
| "learning_rate": 5.802345177417057e-06, | |
| "loss": 0.3911, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 2.3295901179112857, | |
| "grad_norm": 0.14581061961173922, | |
| "learning_rate": 5.7286614467836385e-06, | |
| "loss": 0.3938, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 2.334081976417743, | |
| "grad_norm": 0.14825964553734852, | |
| "learning_rate": 5.655370279557022e-06, | |
| "loss": 0.3833, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 2.3385738349242, | |
| "grad_norm": 0.13932130833002707, | |
| "learning_rate": 5.582473691766059e-06, | |
| "loss": 0.375, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.343065693430657, | |
| "grad_norm": 0.16094642012274518, | |
| "learning_rate": 5.509973688585857e-06, | |
| "loss": 0.4212, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 2.347557551937114, | |
| "grad_norm": 0.14017245129347197, | |
| "learning_rate": 5.437872264282629e-06, | |
| "loss": 0.3682, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 2.352049410443571, | |
| "grad_norm": 0.16563285946571352, | |
| "learning_rate": 5.366171402158824e-06, | |
| "loss": 0.3946, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 2.356541268950028, | |
| "grad_norm": 0.14816943823168197, | |
| "learning_rate": 5.294873074498588e-06, | |
| "loss": 0.3786, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 2.3610331274564853, | |
| "grad_norm": 0.14097249335807502, | |
| "learning_rate": 5.22397924251349e-06, | |
| "loss": 0.3956, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 2.365524985962942, | |
| "grad_norm": 0.1626055328008332, | |
| "learning_rate": 5.153491856288613e-06, | |
| "loss": 0.3781, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 2.370016844469399, | |
| "grad_norm": 0.1581766871638016, | |
| "learning_rate": 5.083412854728862e-06, | |
| "loss": 0.3773, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 2.3745087029758563, | |
| "grad_norm": 0.1370505704219127, | |
| "learning_rate": 5.013744165505668e-06, | |
| "loss": 0.4019, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 2.3790005614823135, | |
| "grad_norm": 0.14919563626397872, | |
| "learning_rate": 4.944487705003953e-06, | |
| "loss": 0.3889, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 2.3834924199887704, | |
| "grad_norm": 0.15220146516049335, | |
| "learning_rate": 4.875645378269411e-06, | |
| "loss": 0.4075, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.387984278495227, | |
| "grad_norm": 0.13753788829585528, | |
| "learning_rate": 4.807219078956112e-06, | |
| "loss": 0.3702, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 2.3924761370016845, | |
| "grad_norm": 0.13555607788005236, | |
| "learning_rate": 4.739210689274405e-06, | |
| "loss": 0.4015, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 2.3969679955081413, | |
| "grad_norm": 0.13667107894813973, | |
| "learning_rate": 4.6716220799391576e-06, | |
| "loss": 0.3789, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 2.4014598540145986, | |
| "grad_norm": 0.14649763774434837, | |
| "learning_rate": 4.604455110118282e-06, | |
| "loss": 0.3863, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 2.4059517125210554, | |
| "grad_norm": 0.14007880554776833, | |
| "learning_rate": 4.537711627381605e-06, | |
| "loss": 0.4009, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 2.4104435710275127, | |
| "grad_norm": 0.13148063363093063, | |
| "learning_rate": 4.471393467650045e-06, | |
| "loss": 0.3831, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 2.4149354295339696, | |
| "grad_norm": 0.13308039714871103, | |
| "learning_rate": 4.405502455145112e-06, | |
| "loss": 0.3904, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 2.419427288040427, | |
| "grad_norm": 0.1403661371830867, | |
| "learning_rate": 4.340040402338715e-06, | |
| "loss": 0.3983, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 2.4239191465468837, | |
| "grad_norm": 0.12646768625314328, | |
| "learning_rate": 4.275009109903327e-06, | |
| "loss": 0.3577, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 2.428411005053341, | |
| "grad_norm": 0.13562669832834004, | |
| "learning_rate": 4.210410366662448e-06, | |
| "loss": 0.3976, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.432902863559798, | |
| "grad_norm": 0.13623904105238863, | |
| "learning_rate": 4.146245949541385e-06, | |
| "loss": 0.3886, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 2.437394722066255, | |
| "grad_norm": 0.13646009660556613, | |
| "learning_rate": 4.08251762351839e-06, | |
| "loss": 0.3766, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 2.441886580572712, | |
| "grad_norm": 0.1316392042932278, | |
| "learning_rate": 4.019227141576114e-06, | |
| "loss": 0.3824, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 2.446378439079169, | |
| "grad_norm": 0.1390641915087067, | |
| "learning_rate": 3.956376244653366e-06, | |
| "loss": 0.3846, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 2.450870297585626, | |
| "grad_norm": 0.13984878116688948, | |
| "learning_rate": 3.893966661597242e-06, | |
| "loss": 0.3876, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 2.4553621560920833, | |
| "grad_norm": 0.12832607427523418, | |
| "learning_rate": 3.832000109115566e-06, | |
| "loss": 0.3631, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 2.45985401459854, | |
| "grad_norm": 0.13491933073478224, | |
| "learning_rate": 3.7704782917296645e-06, | |
| "loss": 0.3869, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 2.464345873104997, | |
| "grad_norm": 0.14118699667163256, | |
| "learning_rate": 3.709402901727488e-06, | |
| "loss": 0.407, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 2.4688377316114543, | |
| "grad_norm": 0.12328872205972351, | |
| "learning_rate": 3.648775619117049e-06, | |
| "loss": 0.3741, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 2.473329590117911, | |
| "grad_norm": 0.1506985853117082, | |
| "learning_rate": 3.5885981115802238e-06, | |
| "loss": 0.4011, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.4778214486243684, | |
| "grad_norm": 0.13386940983312248, | |
| "learning_rate": 3.5288720344268646e-06, | |
| "loss": 0.398, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 2.482313307130825, | |
| "grad_norm": 0.12523306053803873, | |
| "learning_rate": 3.469599030549282e-06, | |
| "loss": 0.3811, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 2.4868051656372825, | |
| "grad_norm": 0.14913858233275104, | |
| "learning_rate": 3.4107807303770434e-06, | |
| "loss": 0.3886, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 2.4912970241437393, | |
| "grad_norm": 0.14690732271607437, | |
| "learning_rate": 3.352418751832123e-06, | |
| "loss": 0.3779, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 2.4957888826501966, | |
| "grad_norm": 0.1283976136597926, | |
| "learning_rate": 3.294514700284408e-06, | |
| "loss": 0.3912, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 2.5002807411566534, | |
| "grad_norm": 0.12599932292049454, | |
| "learning_rate": 3.23707016850753e-06, | |
| "loss": 0.3649, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 2.5047725996631107, | |
| "grad_norm": 0.1412452663633671, | |
| "learning_rate": 3.1800867366350553e-06, | |
| "loss": 0.4099, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 2.5092644581695676, | |
| "grad_norm": 0.12994496236317274, | |
| "learning_rate": 3.1235659721170175e-06, | |
| "loss": 0.3585, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 2.513756316676025, | |
| "grad_norm": 0.13704998276944425, | |
| "learning_rate": 3.067509429676816e-06, | |
| "loss": 0.378, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 2.5182481751824817, | |
| "grad_norm": 0.1329699691102329, | |
| "learning_rate": 3.0119186512684285e-06, | |
| "loss": 0.4016, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.522740033688939, | |
| "grad_norm": 0.12787339333439612, | |
| "learning_rate": 2.9567951660340057e-06, | |
| "loss": 0.3769, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 2.527231892195396, | |
| "grad_norm": 0.12639302238646444, | |
| "learning_rate": 2.902140490261811e-06, | |
| "loss": 0.384, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 2.531723750701853, | |
| "grad_norm": 0.12740622326489023, | |
| "learning_rate": 2.847956127344511e-06, | |
| "loss": 0.3727, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 2.53621560920831, | |
| "grad_norm": 0.1274077725349253, | |
| "learning_rate": 2.79424356773782e-06, | |
| "loss": 0.3949, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 2.5407074677147667, | |
| "grad_norm": 0.14070694794745542, | |
| "learning_rate": 2.7410042889195042e-06, | |
| "loss": 0.4028, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 2.545199326221224, | |
| "grad_norm": 0.1302616328585049, | |
| "learning_rate": 2.688239755348736e-06, | |
| "loss": 0.395, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 2.5496911847276813, | |
| "grad_norm": 0.12295708919457898, | |
| "learning_rate": 2.6359514184258174e-06, | |
| "loss": 0.3728, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 2.554183043234138, | |
| "grad_norm": 0.12276285816841742, | |
| "learning_rate": 2.584140716452248e-06, | |
| "loss": 0.3691, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 2.558674901740595, | |
| "grad_norm": 0.13873231203080208, | |
| "learning_rate": 2.5328090745911716e-06, | |
| "loss": 0.4278, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 2.5631667602470523, | |
| "grad_norm": 0.12829162549900386, | |
| "learning_rate": 2.4819579048281626e-06, | |
| "loss": 0.3782, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.5676586187535095, | |
| "grad_norm": 0.13231537917773445, | |
| "learning_rate": 2.4315886059323954e-06, | |
| "loss": 0.3808, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 2.5721504772599664, | |
| "grad_norm": 0.1369680213078627, | |
| "learning_rate": 2.3817025634181667e-06, | |
| "loss": 0.387, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 2.576642335766423, | |
| "grad_norm": 0.12299316314723288, | |
| "learning_rate": 2.3323011495067793e-06, | |
| "loss": 0.3752, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 2.5811341942728805, | |
| "grad_norm": 0.132398051983568, | |
| "learning_rate": 2.2833857230888025e-06, | |
| "loss": 0.4173, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 2.5856260527793373, | |
| "grad_norm": 0.13673797066153306, | |
| "learning_rate": 2.2349576296866847e-06, | |
| "loss": 0.3701, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 2.5901179112857946, | |
| "grad_norm": 0.14211055939963077, | |
| "learning_rate": 2.1870182014177522e-06, | |
| "loss": 0.3854, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 2.5946097697922514, | |
| "grad_norm": 0.13087656075785165, | |
| "learning_rate": 2.139568756957562e-06, | |
| "loss": 0.3919, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 2.5991016282987087, | |
| "grad_norm": 0.13104371501204107, | |
| "learning_rate": 2.092610601503622e-06, | |
| "loss": 0.3737, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 2.6035934868051656, | |
| "grad_norm": 0.13955533120466573, | |
| "learning_rate": 2.046145026739499e-06, | |
| "loss": 0.3913, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 2.608085345311623, | |
| "grad_norm": 0.12454085806220169, | |
| "learning_rate": 2.0001733107992873e-06, | |
| "loss": 0.3876, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.6125772038180797, | |
| "grad_norm": 0.12987899577798392, | |
| "learning_rate": 1.9546967182324405e-06, | |
| "loss": 0.3888, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 2.6170690623245365, | |
| "grad_norm": 0.13618838810031256, | |
| "learning_rate": 1.909716499969003e-06, | |
| "loss": 0.3983, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 2.621560920830994, | |
| "grad_norm": 0.11456147785049309, | |
| "learning_rate": 1.8652338932851833e-06, | |
| "loss": 0.356, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 2.626052779337451, | |
| "grad_norm": 0.12452208443851459, | |
| "learning_rate": 1.8212501217693357e-06, | |
| "loss": 0.3967, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 2.630544637843908, | |
| "grad_norm": 0.1193858070172244, | |
| "learning_rate": 1.7777663952882963e-06, | |
| "loss": 0.3759, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 2.6350364963503647, | |
| "grad_norm": 0.12976111486762895, | |
| "learning_rate": 1.7347839099540965e-06, | |
| "loss": 0.4029, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 2.639528354856822, | |
| "grad_norm": 0.11988888490936794, | |
| "learning_rate": 1.6923038480910724e-06, | |
| "loss": 0.3864, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 2.6440202133632793, | |
| "grad_norm": 0.12351681340692204, | |
| "learning_rate": 1.6503273782033403e-06, | |
| "loss": 0.3905, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 2.648512071869736, | |
| "grad_norm": 0.1257528893654648, | |
| "learning_rate": 1.6088556549426492e-06, | |
| "loss": 0.3735, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 2.653003930376193, | |
| "grad_norm": 0.12993275191787904, | |
| "learning_rate": 1.5678898190766223e-06, | |
| "loss": 0.3955, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.6574957888826503, | |
| "grad_norm": 0.12200417045769588, | |
| "learning_rate": 1.5274309974573775e-06, | |
| "loss": 0.3909, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 2.661987647389107, | |
| "grad_norm": 0.12176338417131267, | |
| "learning_rate": 1.4874803029905293e-06, | |
| "loss": 0.3843, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 2.6664795058955644, | |
| "grad_norm": 0.11864963451714565, | |
| "learning_rate": 1.4480388346045882e-06, | |
| "loss": 0.3779, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 2.670971364402021, | |
| "grad_norm": 0.12242771643988595, | |
| "learning_rate": 1.4091076772207158e-06, | |
| "loss": 0.3888, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 2.6754632229084785, | |
| "grad_norm": 0.129632970484511, | |
| "learning_rate": 1.3706879017228824e-06, | |
| "loss": 0.3923, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 2.6799550814149353, | |
| "grad_norm": 0.12538027814679648, | |
| "learning_rate": 1.3327805649284264e-06, | |
| "loss": 0.3845, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 2.6844469399213926, | |
| "grad_norm": 0.1283471841054888, | |
| "learning_rate": 1.295386709558968e-06, | |
| "loss": 0.3882, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 2.6889387984278494, | |
| "grad_norm": 0.12000518511157998, | |
| "learning_rate": 1.258507364211732e-06, | |
| "loss": 0.3776, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 2.6934306569343067, | |
| "grad_norm": 0.12407135092312963, | |
| "learning_rate": 1.2221435433312579e-06, | |
| "loss": 0.3898, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 2.6979225154407636, | |
| "grad_norm": 0.12386841328354725, | |
| "learning_rate": 1.1862962471814931e-06, | |
| "loss": 0.3702, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.702414373947221, | |
| "grad_norm": 0.12269735076924326, | |
| "learning_rate": 1.1509664618182724e-06, | |
| "loss": 0.3929, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 2.7069062324536777, | |
| "grad_norm": 0.11968639335338847, | |
| "learning_rate": 1.1161551590622066e-06, | |
| "loss": 0.3796, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 2.7113980909601345, | |
| "grad_norm": 0.12506634531419425, | |
| "learning_rate": 1.0818632964719388e-06, | |
| "loss": 0.3882, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 2.715889949466592, | |
| "grad_norm": 0.12492898023332717, | |
| "learning_rate": 1.0480918173178112e-06, | |
| "loss": 0.3863, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 2.720381807973049, | |
| "grad_norm": 0.12465075623946018, | |
| "learning_rate": 1.0148416505559178e-06, | |
| "loss": 0.4011, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 2.724873666479506, | |
| "grad_norm": 0.11514151474983603, | |
| "learning_rate": 9.821137108025502e-07, | |
| "loss": 0.365, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 2.7293655249859627, | |
| "grad_norm": 0.12074321491714875, | |
| "learning_rate": 9.499088983090399e-07, | |
| "loss": 0.3806, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 2.73385738349242, | |
| "grad_norm": 0.11798285664886023, | |
| "learning_rate": 9.18228098936993e-07, | |
| "loss": 0.3807, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 2.7383492419988773, | |
| "grad_norm": 0.12361829495280739, | |
| "learning_rate": 8.870721841339236e-07, | |
| "loss": 0.3906, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 2.742841100505334, | |
| "grad_norm": 0.1175804885546061, | |
| "learning_rate": 8.564420109092908e-07, | |
| "loss": 0.3855, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.747332959011791, | |
| "grad_norm": 0.12342724046508405, | |
| "learning_rate": 8.263384218109061e-07, | |
| "loss": 0.3693, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 2.7518248175182483, | |
| "grad_norm": 0.11945663960806462, | |
| "learning_rate": 7.967622449017787e-07, | |
| "loss": 0.3861, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 2.756316676024705, | |
| "grad_norm": 0.11826332338408053, | |
| "learning_rate": 7.677142937373227e-07, | |
| "loss": 0.382, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 2.7608085345311624, | |
| "grad_norm": 0.11879518910048437, | |
| "learning_rate": 7.391953673429864e-07, | |
| "loss": 0.3791, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 2.765300393037619, | |
| "grad_norm": 0.1220817215216799, | |
| "learning_rate": 7.112062501922712e-07, | |
| "loss": 0.3874, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 2.7697922515440765, | |
| "grad_norm": 0.12127886373612777, | |
| "learning_rate": 6.837477121851455e-07, | |
| "loss": 0.369, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 2.7742841100505333, | |
| "grad_norm": 0.1211945636909947, | |
| "learning_rate": 6.568205086268852e-07, | |
| "loss": 0.401, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 2.7787759685569906, | |
| "grad_norm": 0.11327475694882008, | |
| "learning_rate": 6.304253802072802e-07, | |
| "loss": 0.3891, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 2.7832678270634474, | |
| "grad_norm": 0.11913308992918804, | |
| "learning_rate": 6.045630529802627e-07, | |
| "loss": 0.3826, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 2.7877596855699043, | |
| "grad_norm": 0.13072230310382224, | |
| "learning_rate": 5.792342383439486e-07, | |
| "loss": 0.3664, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.7922515440763616, | |
| "grad_norm": 0.12023659956123842, | |
| "learning_rate": 5.544396330210533e-07, | |
| "loss": 0.3967, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 2.796743402582819, | |
| "grad_norm": 0.12351491807786948, | |
| "learning_rate": 5.301799190397305e-07, | |
| "loss": 0.3882, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 2.8012352610892757, | |
| "grad_norm": 0.1271068102427812, | |
| "learning_rate": 5.064557637148216e-07, | |
| "loss": 0.3965, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 2.8057271195957325, | |
| "grad_norm": 0.11945133886715532, | |
| "learning_rate": 4.832678196294871e-07, | |
| "loss": 0.3797, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 2.81021897810219, | |
| "grad_norm": 0.1228526979626178, | |
| "learning_rate": 4.6061672461726216e-07, | |
| "loss": 0.3823, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 2.814710836608647, | |
| "grad_norm": 0.12377975256634413, | |
| "learning_rate": 4.385031017445118e-07, | |
| "loss": 0.3853, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 2.819202695115104, | |
| "grad_norm": 0.1180918464343877, | |
| "learning_rate": 4.16927559293292e-07, | |
| "loss": 0.3714, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 2.8236945536215607, | |
| "grad_norm": 0.12029715772911251, | |
| "learning_rate": 3.958906907446114e-07, | |
| "loss": 0.3973, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 2.828186412128018, | |
| "grad_norm": 0.1191457088441823, | |
| "learning_rate": 3.7539307476211594e-07, | |
| "loss": 0.3889, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 2.832678270634475, | |
| "grad_norm": 0.12323691151471038, | |
| "learning_rate": 3.5543527517616805e-07, | |
| "loss": 0.3808, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.837170129140932, | |
| "grad_norm": 0.11625344806648244, | |
| "learning_rate": 3.3601784096833237e-07, | |
| "loss": 0.395, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 2.841661987647389, | |
| "grad_norm": 0.1179022658845467, | |
| "learning_rate": 3.1714130625628117e-07, | |
| "loss": 0.3796, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 2.8461538461538463, | |
| "grad_norm": 0.1241261141557499, | |
| "learning_rate": 2.9880619027910176e-07, | |
| "loss": 0.3887, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 2.850645704660303, | |
| "grad_norm": 0.11699829970120944, | |
| "learning_rate": 2.810129973830056e-07, | |
| "loss": 0.3937, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 2.8551375631667604, | |
| "grad_norm": 0.11930637212092053, | |
| "learning_rate": 2.6376221700747054e-07, | |
| "loss": 0.398, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 2.859629421673217, | |
| "grad_norm": 0.11860271118478417, | |
| "learning_rate": 2.4705432367176043e-07, | |
| "loss": 0.3762, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 2.8641212801796745, | |
| "grad_norm": 0.12755729413902409, | |
| "learning_rate": 2.3088977696188676e-07, | |
| "loss": 0.3771, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 2.8686131386861313, | |
| "grad_norm": 0.12814174883620247, | |
| "learning_rate": 2.1526902151795426e-07, | |
| "loss": 0.3908, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 2.8731049971925886, | |
| "grad_norm": 0.1196187477785971, | |
| "learning_rate": 2.0019248702194627e-07, | |
| "loss": 0.3983, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 2.8775968556990454, | |
| "grad_norm": 0.11427000021771151, | |
| "learning_rate": 1.8566058818588527e-07, | |
| "loss": 0.3777, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.8820887142055023, | |
| "grad_norm": 0.11272751050998314, | |
| "learning_rate": 1.716737247404421e-07, | |
| "loss": 0.367, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 2.8865805727119596, | |
| "grad_norm": 0.12224138502917167, | |
| "learning_rate": 1.5823228142393342e-07, | |
| "loss": 0.4054, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 2.891072431218417, | |
| "grad_norm": 0.11865893236869224, | |
| "learning_rate": 1.4533662797174163e-07, | |
| "loss": 0.3819, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 2.8955642897248737, | |
| "grad_norm": 0.11451275840262805, | |
| "learning_rate": 1.3298711910613604e-07, | |
| "loss": 0.3814, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 2.9000561482313305, | |
| "grad_norm": 0.11485876752966154, | |
| "learning_rate": 1.2118409452652746e-07, | |
| "loss": 0.3878, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 2.904548006737788, | |
| "grad_norm": 0.11910803012729669, | |
| "learning_rate": 1.0992787890011569e-07, | |
| "loss": 0.3872, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 2.909039865244245, | |
| "grad_norm": 0.11475416604465803, | |
| "learning_rate": 9.92187818529633e-08, | |
| "loss": 0.3755, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 2.913531723750702, | |
| "grad_norm": 0.11778215890810805, | |
| "learning_rate": 8.905709796147576e-08, | |
| "loss": 0.3866, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 2.9180235822571587, | |
| "grad_norm": 0.11375812275621636, | |
| "learning_rate": 7.944310674430133e-08, | |
| "loss": 0.3885, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 2.922515440763616, | |
| "grad_norm": 0.11767172933181524, | |
| "learning_rate": 7.037707265463711e-08, | |
| "loss": 0.3845, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.927007299270073, | |
| "grad_norm": 0.11510040456362815, | |
| "learning_rate": 6.185924507296604e-08, | |
| "loss": 0.3846, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 2.93149915777653, | |
| "grad_norm": 0.11220358655401662, | |
| "learning_rate": 5.388985830018012e-08, | |
| "loss": 0.3787, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 2.935991016282987, | |
| "grad_norm": 0.11516277078250776, | |
| "learning_rate": 4.646913155115007e-08, | |
| "loss": 0.383, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 2.9404828747894443, | |
| "grad_norm": 0.11788257039366141, | |
| "learning_rate": 3.9597268948690096e-08, | |
| "loss": 0.3727, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 2.944974733295901, | |
| "grad_norm": 0.1159263352388678, | |
| "learning_rate": 3.3274459517940174e-08, | |
| "loss": 0.369, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 2.9494665918023584, | |
| "grad_norm": 0.12171180722391726, | |
| "learning_rate": 2.7500877181172446e-08, | |
| "loss": 0.4033, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 2.953958450308815, | |
| "grad_norm": 0.11540273319295859, | |
| "learning_rate": 2.227668075300393e-08, | |
| "loss": 0.3785, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 2.958450308815272, | |
| "grad_norm": 0.1171211470134919, | |
| "learning_rate": 1.7602013936024454e-08, | |
| "loss": 0.3847, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 2.9629421673217293, | |
| "grad_norm": 0.12098501893941271, | |
| "learning_rate": 1.3477005316853142e-08, | |
| "loss": 0.3961, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 2.9674340258281866, | |
| "grad_norm": 0.11924096917621506, | |
| "learning_rate": 9.901768362590158e-09, | |
| "loss": 0.38, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.9719258843346434, | |
| "grad_norm": 0.11543501600006961, | |
| "learning_rate": 6.876401417703627e-09, | |
| "loss": 0.3864, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 2.9764177428411003, | |
| "grad_norm": 0.12011641995061609, | |
| "learning_rate": 4.400987701325132e-09, | |
| "loss": 0.3861, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 2.9809096013475576, | |
| "grad_norm": 0.1207065593897211, | |
| "learning_rate": 2.475595304949341e-09, | |
| "loss": 0.3867, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 2.985401459854015, | |
| "grad_norm": 0.11391803224673658, | |
| "learning_rate": 1.1002771905777033e-09, | |
| "loss": 0.3774, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 2.9898933183604717, | |
| "grad_norm": 0.11504162612683114, | |
| "learning_rate": 2.750711892485214e-10, | |
| "loss": 0.3814, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 2.9943851768669285, | |
| "grad_norm": 0.1203643342690432, | |
| "learning_rate": 0.0, | |
| "loss": 0.388, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 2.9943851768669285, | |
| "step": 666, | |
| "total_flos": 7.06867120956244e+18, | |
| "train_loss": 0.4409467803912836, | |
| "train_runtime": 63390.4305, | |
| "train_samples_per_second": 5.393, | |
| "train_steps_per_second": 0.011 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 666, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 7.06867120956244e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |