| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.9998523628652342, |
| "eval_steps": 500, |
| "global_step": 4656, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.010737246164789885, |
| "grad_norm": 122.58504486083984, |
| "learning_rate": 3.5714285714285718e-06, |
| "loss": 25.5971, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.02147449232957977, |
| "grad_norm": 92.14105224609375, |
| "learning_rate": 7.1428571428571436e-06, |
| "loss": 15.8095, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.03221173849436966, |
| "grad_norm": 88.2498779296875, |
| "learning_rate": 9.999879015387978e-06, |
| "loss": 14.4751, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.04294898465915954, |
| "grad_norm": 79.90851593017578, |
| "learning_rate": 9.995645168701038e-06, |
| "loss": 10.6395, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.05368623082394943, |
| "grad_norm": 98.06950378417969, |
| "learning_rate": 9.98536794504998e-06, |
| "loss": 11.1393, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.06442347698873932, |
| "grad_norm": 95.09337615966797, |
| "learning_rate": 9.969059777090564e-06, |
| "loss": 11.2403, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.0751607231535292, |
| "grad_norm": 94.5640869140625, |
| "learning_rate": 9.946740393286928e-06, |
| "loss": 10.161, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.08589796931831908, |
| "grad_norm": 45.75733947753906, |
| "learning_rate": 9.918436794045507e-06, |
| "loss": 9.6319, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.09663521548310897, |
| "grad_norm": 44.17033767700195, |
| "learning_rate": 9.884183219051837e-06, |
| "loss": 10.4065, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.10737246164789886, |
| "grad_norm": 41.16874694824219, |
| "learning_rate": 9.844021105849837e-06, |
| "loss": 7.8502, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.11810970781268874, |
| "grad_norm": 84.32959747314453, |
| "learning_rate": 9.797999039713586e-06, |
| "loss": 9.3041, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.12884695397747864, |
| "grad_norm": 39.66738510131836, |
| "learning_rate": 9.746172694872332e-06, |
| "loss": 8.5279, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.13958420014226852, |
| "grad_norm": 39.50006866455078, |
| "learning_rate": 9.688604767159736e-06, |
| "loss": 9.5201, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.1503214463070584, |
| "grad_norm": 38.226261138916016, |
| "learning_rate": 9.62536489816892e-06, |
| "loss": 9.9717, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.16105869247184829, |
| "grad_norm": 35.42646789550781, |
| "learning_rate": 9.556529591005001e-06, |
| "loss": 9.0548, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.17179593863663817, |
| "grad_norm": 44.124263763427734, |
| "learning_rate": 9.482182117737066e-06, |
| "loss": 8.9763, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.18253318480142805, |
| "grad_norm": 43.76786422729492, |
| "learning_rate": 9.402412418661541e-06, |
| "loss": 9.8142, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.19327043096621793, |
| "grad_norm": 34.88469314575195, |
| "learning_rate": 9.317316993498788e-06, |
| "loss": 8.6092, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.20400767713100781, |
| "grad_norm": 38.54001998901367, |
| "learning_rate": 9.226998784654606e-06, |
| "loss": 7.4996, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.21474492329579772, |
| "grad_norm": 74.26197814941406, |
| "learning_rate": 9.131567052687811e-06, |
| "loss": 9.0709, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.2254821694605876, |
| "grad_norm": 41.04670333862305, |
| "learning_rate": 9.03113724413456e-06, |
| "loss": 10.0284, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.2362194156253775, |
| "grad_norm": 42.67012405395508, |
| "learning_rate": 8.925830851849338e-06, |
| "loss": 7.9923, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.24695666179016737, |
| "grad_norm": 49.978736877441406, |
| "learning_rate": 8.815775268031514e-06, |
| "loss": 8.8409, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.2576939079549573, |
| "grad_norm": 80.3055419921875, |
| "learning_rate": 8.701103630115303e-06, |
| "loss": 8.9708, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.26843115411974716, |
| "grad_norm": 31.320579528808594, |
| "learning_rate": 8.581954659709549e-06, |
| "loss": 9.6101, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.27916840028453704, |
| "grad_norm": 35.097434997558594, |
| "learning_rate": 8.458472494782169e-06, |
| "loss": 9.1278, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.2899056464493269, |
| "grad_norm": 79.29938507080078, |
| "learning_rate": 8.330806515292271e-06, |
| "loss": 8.365, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.3006428926141168, |
| "grad_norm": 35.84828186035156, |
| "learning_rate": 8.199111162480871e-06, |
| "loss": 9.4828, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.3113801387789067, |
| "grad_norm": 28.13869857788086, |
| "learning_rate": 8.063545752038854e-06, |
| "loss": 7.6841, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.32211738494369657, |
| "grad_norm": 48.257423400878906, |
| "learning_rate": 7.924274281378153e-06, |
| "loss": 7.3778, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.33285463110848645, |
| "grad_norm": 39.27827835083008, |
| "learning_rate": 7.781465231239318e-06, |
| "loss": 10.1326, |
| "step": 1550 |
| }, |
| { |
| "epoch": 0.34359187727327634, |
| "grad_norm": 29.741004943847656, |
| "learning_rate": 7.635291361875474e-06, |
| "loss": 8.61, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.3543291234380662, |
| "grad_norm": 39.182769775390625, |
| "learning_rate": 7.485929504059234e-06, |
| "loss": 8.702, |
| "step": 1650 |
| }, |
| { |
| "epoch": 0.3650663696028561, |
| "grad_norm": 32.254554748535156, |
| "learning_rate": 7.333560345165371e-06, |
| "loss": 7.5357, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.375803615767646, |
| "grad_norm": 28.51645851135254, |
| "learning_rate": 7.178368210588067e-06, |
| "loss": 7.2457, |
| "step": 1750 |
| }, |
| { |
| "epoch": 0.38654086193243586, |
| "grad_norm": 28.341947555541992, |
| "learning_rate": 7.020540840757124e-06, |
| "loss": 9.2959, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.39727810809722575, |
| "grad_norm": 42.67914581298828, |
| "learning_rate": 6.860269164022921e-06, |
| "loss": 9.1062, |
| "step": 1850 |
| }, |
| { |
| "epoch": 0.40801535426201563, |
| "grad_norm": 31.293323516845703, |
| "learning_rate": 6.697747065684851e-06, |
| "loss": 9.5109, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.4187526004268055, |
| "grad_norm": 35.099021911621094, |
| "learning_rate": 6.5331711534426326e-06, |
| "loss": 8.1034, |
| "step": 1950 |
| }, |
| { |
| "epoch": 0.42948984659159545, |
| "grad_norm": 35.72314453125, |
| "learning_rate": 6.366740519554286e-06, |
| "loss": 7.9167, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.44022709275638533, |
| "grad_norm": 73.83097076416016, |
| "learning_rate": 6.198656499988444e-06, |
| "loss": 8.5164, |
| "step": 2050 |
| }, |
| { |
| "epoch": 0.4509643389211752, |
| "grad_norm": 30.618099212646484, |
| "learning_rate": 6.029122430862373e-06, |
| "loss": 9.6928, |
| "step": 2100 |
| }, |
| { |
| "epoch": 0.4617015850859651, |
| "grad_norm": 74.77922058105469, |
| "learning_rate": 5.858343402460391e-06, |
| "loss": 8.1368, |
| "step": 2150 |
| }, |
| { |
| "epoch": 0.472438831250755, |
| "grad_norm": 26.513835906982422, |
| "learning_rate": 5.68652601113019e-06, |
| "loss": 7.5154, |
| "step": 2200 |
| }, |
| { |
| "epoch": 0.48317607741554486, |
| "grad_norm": 70.47901916503906, |
| "learning_rate": 5.513878109357228e-06, |
| "loss": 8.6317, |
| "step": 2250 |
| }, |
| { |
| "epoch": 0.49391332358033474, |
| "grad_norm": 33.31165313720703, |
| "learning_rate": 5.3406085543195555e-06, |
| "loss": 9.8199, |
| "step": 2300 |
| }, |
| { |
| "epoch": 0.5046505697451246, |
| "grad_norm": 36.43808364868164, |
| "learning_rate": 5.166926955227224e-06, |
| "loss": 9.8981, |
| "step": 2350 |
| }, |
| { |
| "epoch": 0.5153878159099146, |
| "grad_norm": 35.49153137207031, |
| "learning_rate": 4.993043419751933e-06, |
| "loss": 7.7103, |
| "step": 2400 |
| }, |
| { |
| "epoch": 0.5261250620747044, |
| "grad_norm": 27.446149826049805, |
| "learning_rate": 4.8191682998536905e-06, |
| "loss": 7.0446, |
| "step": 2450 |
| }, |
| { |
| "epoch": 0.5368623082394943, |
| "grad_norm": 35.19398498535156, |
| "learning_rate": 4.645511937311934e-06, |
| "loss": 8.806, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.5475995544042842, |
| "grad_norm": 77.66211700439453, |
| "learning_rate": 4.472284409268976e-06, |
| "loss": 10.2757, |
| "step": 2550 |
| }, |
| { |
| "epoch": 0.5583368005690741, |
| "grad_norm": 29.25933837890625, |
| "learning_rate": 4.299695274093593e-06, |
| "loss": 9.8498, |
| "step": 2600 |
| }, |
| { |
| "epoch": 0.569074046733864, |
| "grad_norm": 32.54759979248047, |
| "learning_rate": 4.1279533178721755e-06, |
| "loss": 9.312, |
| "step": 2650 |
| }, |
| { |
| "epoch": 0.5798112928986539, |
| "grad_norm": 77.79645538330078, |
| "learning_rate": 3.957266301834145e-06, |
| "loss": 8.5012, |
| "step": 2700 |
| }, |
| { |
| "epoch": 0.5905485390634437, |
| "grad_norm": 30.680482864379883, |
| "learning_rate": 3.7878407110171646e-06, |
| "loss": 8.1592, |
| "step": 2750 |
| }, |
| { |
| "epoch": 0.6012857852282336, |
| "grad_norm": 30.990217208862305, |
| "learning_rate": 3.6198815044761847e-06, |
| "loss": 9.4135, |
| "step": 2800 |
| }, |
| { |
| "epoch": 0.6120230313930235, |
| "grad_norm": 34.311153411865234, |
| "learning_rate": 3.4535918673385456e-06, |
| "loss": 8.7601, |
| "step": 2850 |
| }, |
| { |
| "epoch": 0.6227602775578134, |
| "grad_norm": 34.08210754394531, |
| "learning_rate": 3.2891729650050096e-06, |
| "loss": 8.4742, |
| "step": 2900 |
| }, |
| { |
| "epoch": 0.6334975237226033, |
| "grad_norm": 30.370546340942383, |
| "learning_rate": 3.1268236997941535e-06, |
| "loss": 7.6472, |
| "step": 2950 |
| }, |
| { |
| "epoch": 0.6442347698873931, |
| "grad_norm": 76.85076141357422, |
| "learning_rate": 2.966740470324451e-06, |
| "loss": 10.3334, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.654972016052183, |
| "grad_norm": 33.31392288208008, |
| "learning_rate": 2.8091169339251644e-06, |
| "loss": 8.1425, |
| "step": 3050 |
| }, |
| { |
| "epoch": 0.6657092622169729, |
| "grad_norm": 28.994285583496094, |
| "learning_rate": 2.654143772363455e-06, |
| "loss": 8.2618, |
| "step": 3100 |
| }, |
| { |
| "epoch": 0.6764465083817628, |
| "grad_norm": 33.96207809448242, |
| "learning_rate": 2.502008461171114e-06, |
| "loss": 7.9787, |
| "step": 3150 |
| }, |
| { |
| "epoch": 0.6871837545465527, |
| "grad_norm": 44.06400680541992, |
| "learning_rate": 2.352895042849965e-06, |
| "loss": 8.0353, |
| "step": 3200 |
| }, |
| { |
| "epoch": 0.6979210007113426, |
| "grad_norm": 35.88554000854492, |
| "learning_rate": 2.20698390423032e-06, |
| "loss": 9.3106, |
| "step": 3250 |
| }, |
| { |
| "epoch": 0.7086582468761324, |
| "grad_norm": 31.703388214111328, |
| "learning_rate": 2.0644515582517803e-06, |
| "loss": 8.5185, |
| "step": 3300 |
| }, |
| { |
| "epoch": 0.7193954930409223, |
| "grad_norm": 34.17527770996094, |
| "learning_rate": 1.9254704304304174e-06, |
| "loss": 9.1151, |
| "step": 3350 |
| }, |
| { |
| "epoch": 0.7301327392057122, |
| "grad_norm": 29.952329635620117, |
| "learning_rate": 1.7902086502706256e-06, |
| "loss": 7.4807, |
| "step": 3400 |
| }, |
| { |
| "epoch": 0.7408699853705021, |
| "grad_norm": 29.618684768676758, |
| "learning_rate": 1.658829847873965e-06, |
| "loss": 8.402, |
| "step": 3450 |
| }, |
| { |
| "epoch": 0.751607231535292, |
| "grad_norm": 35.65748596191406, |
| "learning_rate": 1.5314929559910985e-06, |
| "loss": 8.7392, |
| "step": 3500 |
| }, |
| { |
| "epoch": 0.7623444777000818, |
| "grad_norm": 40.38413619995117, |
| "learning_rate": 1.4083520177562154e-06, |
| "loss": 6.4324, |
| "step": 3550 |
| }, |
| { |
| "epoch": 0.7730817238648717, |
| "grad_norm": 38.58983612060547, |
| "learning_rate": 1.2895560003365837e-06, |
| "loss": 9.4396, |
| "step": 3600 |
| }, |
| { |
| "epoch": 0.7838189700296616, |
| "grad_norm": 33.327510833740234, |
| "learning_rate": 1.1752486147226505e-06, |
| "loss": 7.8763, |
| "step": 3650 |
| }, |
| { |
| "epoch": 0.7945562161944515, |
| "grad_norm": 31.827482223510742, |
| "learning_rate": 1.0655681418766772e-06, |
| "loss": 8.1159, |
| "step": 3700 |
| }, |
| { |
| "epoch": 0.8052934623592414, |
| "grad_norm": 37.83740997314453, |
| "learning_rate": 9.60647265450249e-07, |
| "loss": 7.1248, |
| "step": 3750 |
| }, |
| { |
| "epoch": 0.8160307085240313, |
| "grad_norm": 68.32665252685547, |
| "learning_rate": 8.60612911273011e-07, |
| "loss": 8.8583, |
| "step": 3800 |
| }, |
| { |
| "epoch": 0.8267679546888211, |
| "grad_norm": 28.628252029418945, |
| "learning_rate": 7.655860938068071e-07, |
| "loss": 6.5486, |
| "step": 3850 |
| }, |
| { |
| "epoch": 0.837505200853611, |
| "grad_norm": 35.551815032958984, |
| "learning_rate": 6.756817697509755e-07, |
| "loss": 8.1337, |
| "step": 3900 |
| }, |
| { |
| "epoch": 0.8482424470184009, |
| "grad_norm": 29.21653175354004, |
| "learning_rate": 5.910086989758862e-07, |
| "loss": 7.0004, |
| "step": 3950 |
| }, |
| { |
| "epoch": 0.8589796931831909, |
| "grad_norm": 28.43161392211914, |
| "learning_rate": 5.11669312952977e-07, |
| "loss": 8.2137, |
| "step": 4000 |
| }, |
| { |
| "epoch": 0.8697169393479808, |
| "grad_norm": 27.239362716674805, |
| "learning_rate": 4.377595908404225e-07, |
| "loss": 7.9611, |
| "step": 4050 |
| }, |
| { |
| "epoch": 0.8804541855127707, |
| "grad_norm": 27.20981788635254, |
| "learning_rate": 3.693689433743658e-07, |
| "loss": 8.2071, |
| "step": 4100 |
| }, |
| { |
| "epoch": 0.8911914316775605, |
| "grad_norm": 30.86234474182129, |
| "learning_rate": 3.065801047061517e-07, |
| "loss": 6.6662, |
| "step": 4150 |
| }, |
| { |
| "epoch": 0.9019286778423504, |
| "grad_norm": 72.03483581542969, |
| "learning_rate": 2.4946903231642727e-07, |
| "loss": 9.2256, |
| "step": 4200 |
| }, |
| { |
| "epoch": 0.9126659240071403, |
| "grad_norm": 35.229248046875, |
| "learning_rate": 1.9810481512716638e-07, |
| "loss": 7.8211, |
| "step": 4250 |
| }, |
| { |
| "epoch": 0.9234031701719302, |
| "grad_norm": 29.196678161621094, |
| "learning_rate": 1.5254958992280022e-07, |
| "loss": 7.9577, |
| "step": 4300 |
| }, |
| { |
| "epoch": 0.9341404163367201, |
| "grad_norm": 31.289777755737305, |
| "learning_rate": 1.128584661815435e-07, |
| "loss": 7.572, |
| "step": 4350 |
| }, |
| { |
| "epoch": 0.94487766250151, |
| "grad_norm": 29.68185043334961, |
| "learning_rate": 7.907945940786033e-08, |
| "loss": 7.5517, |
| "step": 4400 |
| }, |
| { |
| "epoch": 0.9556149086662998, |
| "grad_norm": 28.52676773071289, |
| "learning_rate": 5.125343304671459e-08, |
| "loss": 10.374, |
| "step": 4450 |
| }, |
| { |
| "epoch": 0.9663521548310897, |
| "grad_norm": 34.890655517578125, |
| "learning_rate": 2.9414049049872883e-08, |
| "loss": 7.1905, |
| "step": 4500 |
| }, |
| { |
| "epoch": 0.9770894009958796, |
| "grad_norm": 40.517208099365234, |
| "learning_rate": 1.35877271540652e-08, |
| "loss": 8.6259, |
| "step": 4550 |
| }, |
| { |
| "epoch": 0.9878266471606695, |
| "grad_norm": 29.639062881469727, |
| "learning_rate": 3.7936129202648106e-09, |
| "loss": 6.9152, |
| "step": 4600 |
| }, |
| { |
| "epoch": 0.9985638933254594, |
| "grad_norm": 43.505428314208984, |
| "learning_rate": 4.3554572743409463e-11, |
| "loss": 10.0409, |
| "step": 4650 |
| }, |
| { |
| "epoch": 0.9998523628652342, |
| "step": 4656, |
| "total_flos": 3.2168639765195784e+19, |
| "train_loss": 8.979798909315129, |
| "train_runtime": 72260.7648, |
| "train_samples_per_second": 4.124, |
| "train_steps_per_second": 0.064 |
| } |
| ], |
| "logging_steps": 50, |
| "max_steps": 4656, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 1000, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 3.2168639765195784e+19, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|