| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 8.559201141226819, | |
| "eval_steps": 500, | |
| "global_step": 6000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.07132667617689016, | |
| "grad_norm": 8.374612808227539, | |
| "learning_rate": 2.9400000000000002e-06, | |
| "loss": 1.4876, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.14265335235378032, | |
| "grad_norm": 1.39237642288208, | |
| "learning_rate": 2.9772093023255814e-06, | |
| "loss": 1.2975, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.21398002853067047, | |
| "grad_norm": 1.192126989364624, | |
| "learning_rate": 2.953953488372093e-06, | |
| "loss": 1.2197, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.28530670470756064, | |
| "grad_norm": 1.450819730758667, | |
| "learning_rate": 2.9306976744186045e-06, | |
| "loss": 1.1853, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.3566333808844508, | |
| "grad_norm": 1.5788187980651855, | |
| "learning_rate": 2.9074418604651165e-06, | |
| "loss": 1.1815, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.42796005706134094, | |
| "grad_norm": 1.7196673154830933, | |
| "learning_rate": 2.884186046511628e-06, | |
| "loss": 1.1645, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.4992867332382311, | |
| "grad_norm": 1.5300871133804321, | |
| "learning_rate": 2.8609302325581396e-06, | |
| "loss": 1.1449, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.5706134094151213, | |
| "grad_norm": 1.5986438989639282, | |
| "learning_rate": 2.837674418604651e-06, | |
| "loss": 1.142, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.6419400855920114, | |
| "grad_norm": 1.7197072505950928, | |
| "learning_rate": 2.8144186046511628e-06, | |
| "loss": 1.135, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.7132667617689016, | |
| "grad_norm": 1.5155837535858154, | |
| "learning_rate": 2.7911627906976743e-06, | |
| "loss": 1.118, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.7845934379457917, | |
| "grad_norm": 1.4595504999160767, | |
| "learning_rate": 2.767906976744186e-06, | |
| "loss": 1.1237, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.8559201141226819, | |
| "grad_norm": 1.3575159311294556, | |
| "learning_rate": 2.7446511627906975e-06, | |
| "loss": 1.1367, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.927246790299572, | |
| "grad_norm": 1.3612666130065918, | |
| "learning_rate": 2.7213953488372094e-06, | |
| "loss": 1.1206, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.9985734664764622, | |
| "grad_norm": 1.3593813180923462, | |
| "learning_rate": 2.698139534883721e-06, | |
| "loss": 1.1385, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.0699001426533523, | |
| "grad_norm": 0.8913443088531494, | |
| "learning_rate": 2.6748837209302326e-06, | |
| "loss": 1.108, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.1412268188302426, | |
| "grad_norm": 0.9751181602478027, | |
| "learning_rate": 2.651627906976744e-06, | |
| "loss": 1.0941, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.2125534950071326, | |
| "grad_norm": 0.9726682901382446, | |
| "learning_rate": 2.6283720930232557e-06, | |
| "loss": 1.1093, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.2838801711840229, | |
| "grad_norm": 0.87285315990448, | |
| "learning_rate": 2.6051162790697673e-06, | |
| "loss": 1.135, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.355206847360913, | |
| "grad_norm": 0.8699735403060913, | |
| "learning_rate": 2.581860465116279e-06, | |
| "loss": 1.104, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.4265335235378032, | |
| "grad_norm": 1.0313692092895508, | |
| "learning_rate": 2.558604651162791e-06, | |
| "loss": 1.1197, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.4978601997146934, | |
| "grad_norm": 0.9118053317070007, | |
| "learning_rate": 2.5353488372093024e-06, | |
| "loss": 1.096, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.5691868758915835, | |
| "grad_norm": 0.9036098122596741, | |
| "learning_rate": 2.512093023255814e-06, | |
| "loss": 1.0938, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.6405135520684735, | |
| "grad_norm": 1.0469361543655396, | |
| "learning_rate": 2.4888372093023255e-06, | |
| "loss": 1.093, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.7118402282453637, | |
| "grad_norm": 0.9310563206672668, | |
| "learning_rate": 2.4655813953488375e-06, | |
| "loss": 1.1114, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.783166904422254, | |
| "grad_norm": 0.8712385296821594, | |
| "learning_rate": 2.442325581395349e-06, | |
| "loss": 1.0992, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.854493580599144, | |
| "grad_norm": 0.9832571148872375, | |
| "learning_rate": 2.4190697674418606e-06, | |
| "loss": 1.0896, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.925820256776034, | |
| "grad_norm": 0.9631858468055725, | |
| "learning_rate": 2.395813953488372e-06, | |
| "loss": 1.0729, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.9971469329529246, | |
| "grad_norm": 0.9085518717765808, | |
| "learning_rate": 2.3725581395348838e-06, | |
| "loss": 1.1032, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 2.0684736091298146, | |
| "grad_norm": 0.7245323061943054, | |
| "learning_rate": 2.3493023255813958e-06, | |
| "loss": 1.1042, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 2.1398002853067046, | |
| "grad_norm": 0.8104898929595947, | |
| "learning_rate": 2.3260465116279073e-06, | |
| "loss": 1.0829, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 2.2111269614835947, | |
| "grad_norm": 0.7655696272850037, | |
| "learning_rate": 2.302790697674419e-06, | |
| "loss": 1.0701, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 2.282453637660485, | |
| "grad_norm": 0.6674259305000305, | |
| "learning_rate": 2.2795348837209304e-06, | |
| "loss": 1.0831, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 2.353780313837375, | |
| "grad_norm": 0.7704442739486694, | |
| "learning_rate": 2.256279069767442e-06, | |
| "loss": 1.0916, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 2.425106990014265, | |
| "grad_norm": 0.9494684934616089, | |
| "learning_rate": 2.2330232558139536e-06, | |
| "loss": 1.0756, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 2.4964336661911553, | |
| "grad_norm": 0.8056609630584717, | |
| "learning_rate": 2.209767441860465e-06, | |
| "loss": 1.0921, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 2.5677603423680457, | |
| "grad_norm": 0.736752986907959, | |
| "learning_rate": 2.1865116279069767e-06, | |
| "loss": 1.1026, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 2.6390870185449358, | |
| "grad_norm": 0.676659882068634, | |
| "learning_rate": 2.1632558139534887e-06, | |
| "loss": 1.127, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 2.710413694721826, | |
| "grad_norm": 0.7117391228675842, | |
| "learning_rate": 2.1400000000000003e-06, | |
| "loss": 1.064, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 2.7817403708987163, | |
| "grad_norm": 0.649662971496582, | |
| "learning_rate": 2.116744186046512e-06, | |
| "loss": 1.0887, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 2.8530670470756063, | |
| "grad_norm": 0.7031757831573486, | |
| "learning_rate": 2.0934883720930234e-06, | |
| "loss": 1.084, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 2.9243937232524964, | |
| "grad_norm": 0.8348050117492676, | |
| "learning_rate": 2.070232558139535e-06, | |
| "loss": 1.0606, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 2.995720399429387, | |
| "grad_norm": 0.7338863015174866, | |
| "learning_rate": 2.0469767441860465e-06, | |
| "loss": 1.0748, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 3.067047075606277, | |
| "grad_norm": 0.8033592700958252, | |
| "learning_rate": 2.023720930232558e-06, | |
| "loss": 1.0861, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 3.138373751783167, | |
| "grad_norm": 0.7755851745605469, | |
| "learning_rate": 2.0004651162790697e-06, | |
| "loss": 1.0839, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 3.209700427960057, | |
| "grad_norm": 0.7161579728126526, | |
| "learning_rate": 1.9772093023255812e-06, | |
| "loss": 1.0688, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 3.281027104136947, | |
| "grad_norm": 0.854751706123352, | |
| "learning_rate": 1.953953488372093e-06, | |
| "loss": 1.0622, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 3.3523537803138375, | |
| "grad_norm": 0.7753428220748901, | |
| "learning_rate": 1.9306976744186048e-06, | |
| "loss": 1.0677, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 3.4236804564907275, | |
| "grad_norm": 0.7227015495300293, | |
| "learning_rate": 1.9074418604651163e-06, | |
| "loss": 1.0706, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 3.4950071326676175, | |
| "grad_norm": 0.7708733677864075, | |
| "learning_rate": 1.884186046511628e-06, | |
| "loss": 1.0783, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 3.566333808844508, | |
| "grad_norm": 0.7089836001396179, | |
| "learning_rate": 1.8609302325581395e-06, | |
| "loss": 1.0783, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 3.637660485021398, | |
| "grad_norm": 0.7261233329772949, | |
| "learning_rate": 1.837674418604651e-06, | |
| "loss": 1.0997, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 3.708987161198288, | |
| "grad_norm": 0.792870819568634, | |
| "learning_rate": 1.8144186046511628e-06, | |
| "loss": 1.0904, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 3.7803138373751786, | |
| "grad_norm": 0.7912273406982422, | |
| "learning_rate": 1.7911627906976744e-06, | |
| "loss": 1.0609, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 3.8516405135520686, | |
| "grad_norm": 0.8693161010742188, | |
| "learning_rate": 1.7679069767441861e-06, | |
| "loss": 1.0507, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 3.9229671897289586, | |
| "grad_norm": 0.8193188905715942, | |
| "learning_rate": 1.7446511627906977e-06, | |
| "loss": 1.0802, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 3.9942938659058487, | |
| "grad_norm": 0.8383765816688538, | |
| "learning_rate": 1.7213953488372095e-06, | |
| "loss": 1.0747, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 4.065620542082739, | |
| "grad_norm": 0.6324388980865479, | |
| "learning_rate": 1.698139534883721e-06, | |
| "loss": 1.0643, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 4.136947218259629, | |
| "grad_norm": 0.6263940930366516, | |
| "learning_rate": 1.6748837209302326e-06, | |
| "loss": 1.0707, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 4.20827389443652, | |
| "grad_norm": 0.7516465187072754, | |
| "learning_rate": 1.6516279069767442e-06, | |
| "loss": 1.0723, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 4.279600570613409, | |
| "grad_norm": 0.6298231482505798, | |
| "learning_rate": 1.6283720930232558e-06, | |
| "loss": 1.0902, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 4.3509272467903, | |
| "grad_norm": 0.7128252387046814, | |
| "learning_rate": 1.6051162790697673e-06, | |
| "loss": 1.08, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 4.422253922967189, | |
| "grad_norm": 0.8887324333190918, | |
| "learning_rate": 1.5818604651162793e-06, | |
| "loss": 1.064, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 4.49358059914408, | |
| "grad_norm": 0.5942298769950867, | |
| "learning_rate": 1.5586046511627909e-06, | |
| "loss": 1.0748, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 4.56490727532097, | |
| "grad_norm": 0.6519397497177124, | |
| "learning_rate": 1.5353488372093024e-06, | |
| "loss": 1.049, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 4.63623395149786, | |
| "grad_norm": 0.8501943945884705, | |
| "learning_rate": 1.512093023255814e-06, | |
| "loss": 1.0689, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 4.70756062767475, | |
| "grad_norm": 0.8405245542526245, | |
| "learning_rate": 1.4888372093023256e-06, | |
| "loss": 1.089, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 4.778887303851641, | |
| "grad_norm": 0.8166652321815491, | |
| "learning_rate": 1.4655813953488371e-06, | |
| "loss": 1.0583, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 4.85021398002853, | |
| "grad_norm": 0.6082524061203003, | |
| "learning_rate": 1.442325581395349e-06, | |
| "loss": 1.0721, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 4.921540656205421, | |
| "grad_norm": 0.7854369282722473, | |
| "learning_rate": 1.4190697674418605e-06, | |
| "loss": 1.0536, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 4.9928673323823105, | |
| "grad_norm": 0.662228524684906, | |
| "learning_rate": 1.395813953488372e-06, | |
| "loss": 1.0562, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 5.064194008559201, | |
| "grad_norm": 0.5415034294128418, | |
| "learning_rate": 1.3725581395348836e-06, | |
| "loss": 1.0698, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 5.1355206847360915, | |
| "grad_norm": 0.5144311785697937, | |
| "learning_rate": 1.3493023255813954e-06, | |
| "loss": 1.0739, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 5.206847360912981, | |
| "grad_norm": 0.6178659796714783, | |
| "learning_rate": 1.326046511627907e-06, | |
| "loss": 1.0592, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 5.2781740370898715, | |
| "grad_norm": 0.625819981098175, | |
| "learning_rate": 1.3027906976744185e-06, | |
| "loss": 1.0627, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 5.349500713266762, | |
| "grad_norm": 0.605794370174408, | |
| "learning_rate": 1.2795348837209303e-06, | |
| "loss": 1.0903, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 5.420827389443652, | |
| "grad_norm": 0.5412514209747314, | |
| "learning_rate": 1.2562790697674418e-06, | |
| "loss": 1.038, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 5.492154065620542, | |
| "grad_norm": 0.5697770118713379, | |
| "learning_rate": 1.2330232558139536e-06, | |
| "loss": 1.0812, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 5.563480741797433, | |
| "grad_norm": 0.6216000318527222, | |
| "learning_rate": 1.2097674418604652e-06, | |
| "loss": 1.0546, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 5.634807417974322, | |
| "grad_norm": 0.6266453266143799, | |
| "learning_rate": 1.1865116279069768e-06, | |
| "loss": 1.0684, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 5.706134094151213, | |
| "grad_norm": 0.5270595550537109, | |
| "learning_rate": 1.1632558139534885e-06, | |
| "loss": 1.0808, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 5.777460770328103, | |
| "grad_norm": 0.5590298771858215, | |
| "learning_rate": 1.14e-06, | |
| "loss": 1.0685, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 5.848787446504993, | |
| "grad_norm": 0.5806601643562317, | |
| "learning_rate": 1.1167441860465117e-06, | |
| "loss": 1.0524, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 5.920114122681883, | |
| "grad_norm": 0.595583975315094, | |
| "learning_rate": 1.0934883720930232e-06, | |
| "loss": 1.0502, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 5.991440798858774, | |
| "grad_norm": 0.5921752452850342, | |
| "learning_rate": 1.070232558139535e-06, | |
| "loss": 1.0579, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 6.062767475035663, | |
| "grad_norm": 0.49118900299072266, | |
| "learning_rate": 1.0469767441860466e-06, | |
| "loss": 1.0414, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 6.134094151212554, | |
| "grad_norm": 0.5080968141555786, | |
| "learning_rate": 1.0237209302325581e-06, | |
| "loss": 1.0669, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 6.205420827389443, | |
| "grad_norm": 0.5190466642379761, | |
| "learning_rate": 1.0004651162790697e-06, | |
| "loss": 1.0815, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 6.276747503566334, | |
| "grad_norm": 0.571312427520752, | |
| "learning_rate": 9.772093023255815e-07, | |
| "loss": 1.0586, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 6.348074179743224, | |
| "grad_norm": 0.5268988013267517, | |
| "learning_rate": 9.53953488372093e-07, | |
| "loss": 1.0493, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 6.419400855920114, | |
| "grad_norm": 0.5871033072471619, | |
| "learning_rate": 9.306976744186046e-07, | |
| "loss": 1.0639, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 6.490727532097004, | |
| "grad_norm": 0.49360474944114685, | |
| "learning_rate": 9.074418604651163e-07, | |
| "loss": 1.0544, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 6.562054208273894, | |
| "grad_norm": 0.5026892423629761, | |
| "learning_rate": 8.841860465116279e-07, | |
| "loss": 1.0788, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 6.633380884450784, | |
| "grad_norm": 0.6054779291152954, | |
| "learning_rate": 8.609302325581396e-07, | |
| "loss": 1.0689, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 6.704707560627675, | |
| "grad_norm": 0.5217434167861938, | |
| "learning_rate": 8.376744186046512e-07, | |
| "loss": 1.051, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 6.7760342368045645, | |
| "grad_norm": 0.47993195056915283, | |
| "learning_rate": 8.144186046511627e-07, | |
| "loss": 1.0578, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 6.847360912981455, | |
| "grad_norm": 0.5778261423110962, | |
| "learning_rate": 7.911627906976745e-07, | |
| "loss": 1.0706, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 6.9186875891583455, | |
| "grad_norm": 0.5452645421028137, | |
| "learning_rate": 7.679069767441861e-07, | |
| "loss": 1.0663, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 6.990014265335235, | |
| "grad_norm": 0.5666407346725464, | |
| "learning_rate": 7.446511627906977e-07, | |
| "loss": 1.0599, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 7.0613409415121255, | |
| "grad_norm": 0.5317495465278625, | |
| "learning_rate": 7.213953488372093e-07, | |
| "loss": 1.0537, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 7.132667617689016, | |
| "grad_norm": 0.5068672299385071, | |
| "learning_rate": 6.981395348837209e-07, | |
| "loss": 1.0779, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 7.203994293865906, | |
| "grad_norm": 0.5887683629989624, | |
| "learning_rate": 6.748837209302326e-07, | |
| "loss": 1.057, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 7.275320970042796, | |
| "grad_norm": 0.4751645028591156, | |
| "learning_rate": 6.516279069767442e-07, | |
| "loss": 1.0531, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 7.346647646219687, | |
| "grad_norm": 0.4856860935688019, | |
| "learning_rate": 6.283720930232559e-07, | |
| "loss": 1.0575, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 7.417974322396576, | |
| "grad_norm": 0.5266442894935608, | |
| "learning_rate": 6.051162790697675e-07, | |
| "loss": 1.0607, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 7.489300998573467, | |
| "grad_norm": 0.5558798909187317, | |
| "learning_rate": 5.818604651162791e-07, | |
| "loss": 1.0653, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 7.560627674750357, | |
| "grad_norm": 0.5203211307525635, | |
| "learning_rate": 5.586046511627907e-07, | |
| "loss": 1.0583, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 7.631954350927247, | |
| "grad_norm": 0.4732428789138794, | |
| "learning_rate": 5.353488372093024e-07, | |
| "loss": 1.0534, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 7.703281027104137, | |
| "grad_norm": 0.6876837015151978, | |
| "learning_rate": 5.120930232558139e-07, | |
| "loss": 1.0562, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 7.774607703281027, | |
| "grad_norm": 0.5170673727989197, | |
| "learning_rate": 4.888372093023256e-07, | |
| "loss": 1.066, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 7.845934379457917, | |
| "grad_norm": 0.5236818790435791, | |
| "learning_rate": 4.6558139534883723e-07, | |
| "loss": 1.0644, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 7.917261055634808, | |
| "grad_norm": 0.47422999143600464, | |
| "learning_rate": 4.423255813953489e-07, | |
| "loss": 1.0594, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 7.988587731811697, | |
| "grad_norm": 0.5537230968475342, | |
| "learning_rate": 4.1906976744186046e-07, | |
| "loss": 1.0578, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 8.059914407988588, | |
| "grad_norm": 0.5376424789428711, | |
| "learning_rate": 3.9581395348837213e-07, | |
| "loss": 1.0523, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 8.131241084165477, | |
| "grad_norm": 0.4635964334011078, | |
| "learning_rate": 3.725581395348837e-07, | |
| "loss": 1.0559, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 8.202567760342369, | |
| "grad_norm": 0.46095508337020874, | |
| "learning_rate": 3.493023255813953e-07, | |
| "loss": 1.0825, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 8.273894436519258, | |
| "grad_norm": 0.48418450355529785, | |
| "learning_rate": 3.26046511627907e-07, | |
| "loss": 1.0541, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 8.345221112696148, | |
| "grad_norm": 0.45845749974250793, | |
| "learning_rate": 3.027906976744186e-07, | |
| "loss": 1.0731, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 8.41654778887304, | |
| "grad_norm": 0.5616029500961304, | |
| "learning_rate": 2.795348837209302e-07, | |
| "loss": 1.055, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 8.487874465049929, | |
| "grad_norm": 0.5239433646202087, | |
| "learning_rate": 2.5627906976744184e-07, | |
| "loss": 1.0522, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 8.559201141226819, | |
| "grad_norm": 0.46216270327568054, | |
| "learning_rate": 2.3302325581395349e-07, | |
| "loss": 1.0674, | |
| "step": 6000 | |
| } | |
| ], | |
| "logging_steps": 50, | |
| "max_steps": 6500, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 10, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.4510459760115139e+18, | |
| "train_batch_size": 48, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |