| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.4035965693723908, |
| "eval_steps": 1000000, |
| "global_step": 46211, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0043668885046027, |
| "grad_norm": 1.8038700819015503, |
| "learning_rate": 9.99956331114954e-06, |
| "loss": 3.7275, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.0087337770092054, |
| "grad_norm": 1.882214069366455, |
| "learning_rate": 9.99912662229908e-06, |
| "loss": 3.7247, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.013100665513808101, |
| "grad_norm": 1.934370994567871, |
| "learning_rate": 9.99868993344862e-06, |
| "loss": 3.7176, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.0174675540184108, |
| "grad_norm": 1.8633307218551636, |
| "learning_rate": 9.99825324459816e-06, |
| "loss": 3.7196, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.021834442523013503, |
| "grad_norm": 1.8951599597930908, |
| "learning_rate": 9.9978165557477e-06, |
| "loss": 3.7127, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.026201331027616202, |
| "grad_norm": 1.875725507736206, |
| "learning_rate": 9.997379866897238e-06, |
| "loss": 3.7033, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.030568219532218905, |
| "grad_norm": 1.8393280506134033, |
| "learning_rate": 9.996943178046778e-06, |
| "loss": 3.7037, |
| "step": 3500 |
| }, |
| { |
| "epoch": 0.0349351080368216, |
| "grad_norm": 2.0136420726776123, |
| "learning_rate": 9.996506489196318e-06, |
| "loss": 3.7021, |
| "step": 4000 |
| }, |
| { |
| "epoch": 0.03930199654142431, |
| "grad_norm": 1.8446974754333496, |
| "learning_rate": 9.996069800345858e-06, |
| "loss": 3.6928, |
| "step": 4500 |
| }, |
| { |
| "epoch": 0.043668885046027006, |
| "grad_norm": 1.8116663694381714, |
| "learning_rate": 9.995633111495399e-06, |
| "loss": 3.6877, |
| "step": 5000 |
| }, |
| { |
| "epoch": 0.048035773550629705, |
| "grad_norm": 1.8685935735702515, |
| "learning_rate": 9.995196422644939e-06, |
| "loss": 3.6815, |
| "step": 5500 |
| }, |
| { |
| "epoch": 0.052402662055232405, |
| "grad_norm": 1.8733059167861938, |
| "learning_rate": 9.994759733794479e-06, |
| "loss": 3.6753, |
| "step": 6000 |
| }, |
| { |
| "epoch": 0.056769550559835104, |
| "grad_norm": 1.9478683471679688, |
| "learning_rate": 9.994323044944017e-06, |
| "loss": 3.6652, |
| "step": 6500 |
| }, |
| { |
| "epoch": 0.06113643906443781, |
| "grad_norm": 1.9699769020080566, |
| "learning_rate": 9.993886356093557e-06, |
| "loss": 3.6716, |
| "step": 7000 |
| }, |
| { |
| "epoch": 0.06550332756904051, |
| "grad_norm": 1.8126554489135742, |
| "learning_rate": 9.993449667243097e-06, |
| "loss": 3.6619, |
| "step": 7500 |
| }, |
| { |
| "epoch": 0.0698702160736432, |
| "grad_norm": 1.965243935585022, |
| "learning_rate": 9.993012978392636e-06, |
| "loss": 3.6587, |
| "step": 8000 |
| }, |
| { |
| "epoch": 0.07423710457824591, |
| "grad_norm": 1.8856313228607178, |
| "learning_rate": 9.992576289542176e-06, |
| "loss": 3.6573, |
| "step": 8500 |
| }, |
| { |
| "epoch": 0.07860399308284861, |
| "grad_norm": 1.9191926717758179, |
| "learning_rate": 9.992139600691716e-06, |
| "loss": 3.6421, |
| "step": 9000 |
| }, |
| { |
| "epoch": 0.0829708815874513, |
| "grad_norm": 1.9937667846679688, |
| "learning_rate": 9.991702911841256e-06, |
| "loss": 3.6399, |
| "step": 9500 |
| }, |
| { |
| "epoch": 0.08733777009205401, |
| "grad_norm": 1.911783218383789, |
| "learning_rate": 9.991266222990796e-06, |
| "loss": 3.641, |
| "step": 10000 |
| }, |
| { |
| "epoch": 0.0917046585966567, |
| "grad_norm": 1.9108695983886719, |
| "learning_rate": 9.990829534140335e-06, |
| "loss": 3.6448, |
| "step": 10500 |
| }, |
| { |
| "epoch": 0.09607154710125941, |
| "grad_norm": 1.9195294380187988, |
| "learning_rate": 9.990392845289875e-06, |
| "loss": 3.6369, |
| "step": 11000 |
| }, |
| { |
| "epoch": 0.10043843560586212, |
| "grad_norm": 1.8656957149505615, |
| "learning_rate": 9.989956156439415e-06, |
| "loss": 3.6278, |
| "step": 11500 |
| }, |
| { |
| "epoch": 0.10480532411046481, |
| "grad_norm": 1.8072896003723145, |
| "learning_rate": 9.989519467588953e-06, |
| "loss": 3.6106, |
| "step": 12000 |
| }, |
| { |
| "epoch": 0.10917221261506752, |
| "grad_norm": 1.8502795696258545, |
| "learning_rate": 9.989082778738493e-06, |
| "loss": 3.6208, |
| "step": 12500 |
| }, |
| { |
| "epoch": 0.11353910111967021, |
| "grad_norm": 1.9243334531784058, |
| "learning_rate": 9.988646089888033e-06, |
| "loss": 3.6175, |
| "step": 13000 |
| }, |
| { |
| "epoch": 0.11790598962427291, |
| "grad_norm": 1.8611966371536255, |
| "learning_rate": 9.988209401037573e-06, |
| "loss": 3.6144, |
| "step": 13500 |
| }, |
| { |
| "epoch": 0.12227287812887562, |
| "grad_norm": 1.8360419273376465, |
| "learning_rate": 9.987772712187114e-06, |
| "loss": 3.6033, |
| "step": 14000 |
| }, |
| { |
| "epoch": 0.1266397666334783, |
| "grad_norm": 1.9218759536743164, |
| "learning_rate": 9.987336023336654e-06, |
| "loss": 3.6078, |
| "step": 14500 |
| }, |
| { |
| "epoch": 0.13100665513808102, |
| "grad_norm": 1.868091344833374, |
| "learning_rate": 9.986899334486194e-06, |
| "loss": 3.6002, |
| "step": 15000 |
| }, |
| { |
| "epoch": 0.13537354364268372, |
| "grad_norm": 1.8368401527404785, |
| "learning_rate": 9.986462645635732e-06, |
| "loss": 3.6028, |
| "step": 15500 |
| }, |
| { |
| "epoch": 0.1397404321472864, |
| "grad_norm": 1.838019847869873, |
| "learning_rate": 9.986025956785272e-06, |
| "loss": 3.5879, |
| "step": 16000 |
| }, |
| { |
| "epoch": 0.1441073206518891, |
| "grad_norm": 1.816603422164917, |
| "learning_rate": 9.985589267934812e-06, |
| "loss": 3.5938, |
| "step": 16500 |
| }, |
| { |
| "epoch": 0.14847420915649182, |
| "grad_norm": 1.8263438940048218, |
| "learning_rate": 9.985152579084352e-06, |
| "loss": 3.5808, |
| "step": 17000 |
| }, |
| { |
| "epoch": 0.15284109766109452, |
| "grad_norm": 1.8684626817703247, |
| "learning_rate": 9.984715890233891e-06, |
| "loss": 3.578, |
| "step": 17500 |
| }, |
| { |
| "epoch": 0.15720798616569723, |
| "grad_norm": 1.9385733604431152, |
| "learning_rate": 9.984279201383431e-06, |
| "loss": 3.5759, |
| "step": 18000 |
| }, |
| { |
| "epoch": 0.1615748746702999, |
| "grad_norm": 1.8138542175292969, |
| "learning_rate": 9.983842512532971e-06, |
| "loss": 3.5804, |
| "step": 18500 |
| }, |
| { |
| "epoch": 0.1659417631749026, |
| "grad_norm": 1.8231014013290405, |
| "learning_rate": 9.98340582368251e-06, |
| "loss": 3.5728, |
| "step": 19000 |
| }, |
| { |
| "epoch": 0.17030865167950532, |
| "grad_norm": 1.820784568786621, |
| "learning_rate": 9.98296913483205e-06, |
| "loss": 3.5699, |
| "step": 19500 |
| }, |
| { |
| "epoch": 0.17467554018410802, |
| "grad_norm": 1.8315212726593018, |
| "learning_rate": 9.98253244598159e-06, |
| "loss": 3.5727, |
| "step": 20000 |
| }, |
| { |
| "epoch": 0.17904242868871073, |
| "grad_norm": 1.8523602485656738, |
| "learning_rate": 9.982095757131129e-06, |
| "loss": 3.5587, |
| "step": 20500 |
| }, |
| { |
| "epoch": 0.1834093171933134, |
| "grad_norm": 1.8133721351623535, |
| "learning_rate": 9.981659068280668e-06, |
| "loss": 3.5656, |
| "step": 21000 |
| }, |
| { |
| "epoch": 0.18777620569791612, |
| "grad_norm": 1.826695203781128, |
| "learning_rate": 9.981222379430208e-06, |
| "loss": 3.5574, |
| "step": 21500 |
| }, |
| { |
| "epoch": 0.19214309420251882, |
| "grad_norm": 1.7944340705871582, |
| "learning_rate": 9.980785690579748e-06, |
| "loss": 3.5574, |
| "step": 22000 |
| }, |
| { |
| "epoch": 0.19650998270712153, |
| "grad_norm": 1.8452637195587158, |
| "learning_rate": 9.980349001729288e-06, |
| "loss": 3.5502, |
| "step": 22500 |
| }, |
| { |
| "epoch": 0.20087687121172423, |
| "grad_norm": 1.8428194522857666, |
| "learning_rate": 9.97991231287883e-06, |
| "loss": 3.5518, |
| "step": 23000 |
| }, |
| { |
| "epoch": 0.2052437597163269, |
| "grad_norm": 1.873677134513855, |
| "learning_rate": 9.97947562402837e-06, |
| "loss": 3.5488, |
| "step": 23500 |
| }, |
| { |
| "epoch": 0.20961064822092962, |
| "grad_norm": 1.819214940071106, |
| "learning_rate": 9.979038935177907e-06, |
| "loss": 3.5398, |
| "step": 24000 |
| }, |
| { |
| "epoch": 0.21397753672553232, |
| "grad_norm": 1.8920146226882935, |
| "learning_rate": 9.978602246327447e-06, |
| "loss": 3.5324, |
| "step": 24500 |
| }, |
| { |
| "epoch": 0.21834442523013503, |
| "grad_norm": 1.831764817237854, |
| "learning_rate": 9.978165557476987e-06, |
| "loss": 3.5342, |
| "step": 25000 |
| }, |
| { |
| "epoch": 0.22271131373473774, |
| "grad_norm": 1.8074841499328613, |
| "learning_rate": 9.977728868626527e-06, |
| "loss": 3.53, |
| "step": 25500 |
| }, |
| { |
| "epoch": 0.22707820223934042, |
| "grad_norm": 1.9136210680007935, |
| "learning_rate": 9.977292179776067e-06, |
| "loss": 3.5342, |
| "step": 26000 |
| }, |
| { |
| "epoch": 0.23144509074394312, |
| "grad_norm": 1.8257259130477905, |
| "learning_rate": 9.976855490925607e-06, |
| "loss": 3.5245, |
| "step": 26500 |
| }, |
| { |
| "epoch": 0.23581197924854583, |
| "grad_norm": 1.9069098234176636, |
| "learning_rate": 9.976418802075146e-06, |
| "loss": 3.5204, |
| "step": 27000 |
| }, |
| { |
| "epoch": 0.24017886775314853, |
| "grad_norm": 1.7865424156188965, |
| "learning_rate": 9.975982113224686e-06, |
| "loss": 3.5283, |
| "step": 27500 |
| }, |
| { |
| "epoch": 0.24454575625775124, |
| "grad_norm": 1.8387185335159302, |
| "learning_rate": 9.975545424374226e-06, |
| "loss": 3.5199, |
| "step": 28000 |
| }, |
| { |
| "epoch": 0.24891264476235392, |
| "grad_norm": 1.8645451068878174, |
| "learning_rate": 9.975108735523766e-06, |
| "loss": 3.5148, |
| "step": 28500 |
| }, |
| { |
| "epoch": 0.2532795332669566, |
| "grad_norm": 1.7989400625228882, |
| "learning_rate": 9.974672046673306e-06, |
| "loss": 3.5133, |
| "step": 29000 |
| }, |
| { |
| "epoch": 0.25764642177155933, |
| "grad_norm": 1.7804815769195557, |
| "learning_rate": 9.974235357822844e-06, |
| "loss": 3.4984, |
| "step": 29500 |
| }, |
| { |
| "epoch": 0.26201331027616204, |
| "grad_norm": 1.844984531402588, |
| "learning_rate": 9.973798668972384e-06, |
| "loss": 3.5089, |
| "step": 30000 |
| }, |
| { |
| "epoch": 0.26638019878076474, |
| "grad_norm": 1.8139082193374634, |
| "learning_rate": 9.973361980121923e-06, |
| "loss": 3.5089, |
| "step": 30500 |
| }, |
| { |
| "epoch": 0.27074708728536745, |
| "grad_norm": 1.8752747774124146, |
| "learning_rate": 9.972925291271463e-06, |
| "loss": 3.5011, |
| "step": 31000 |
| }, |
| { |
| "epoch": 0.27511397578997016, |
| "grad_norm": 1.7818485498428345, |
| "learning_rate": 9.972488602421003e-06, |
| "loss": 3.4952, |
| "step": 31500 |
| }, |
| { |
| "epoch": 0.2794808642945728, |
| "grad_norm": 1.87028169631958, |
| "learning_rate": 9.972051913570545e-06, |
| "loss": 3.4904, |
| "step": 32000 |
| }, |
| { |
| "epoch": 0.2838477527991755, |
| "grad_norm": 1.7847636938095093, |
| "learning_rate": 9.971615224720084e-06, |
| "loss": 3.4925, |
| "step": 32500 |
| }, |
| { |
| "epoch": 0.2882146413037782, |
| "grad_norm": 1.7927122116088867, |
| "learning_rate": 9.971178535869623e-06, |
| "loss": 3.4823, |
| "step": 33000 |
| }, |
| { |
| "epoch": 0.2925815298083809, |
| "grad_norm": 1.7564101219177246, |
| "learning_rate": 9.970741847019162e-06, |
| "loss": 3.483, |
| "step": 33500 |
| }, |
| { |
| "epoch": 0.29694841831298363, |
| "grad_norm": 1.8266777992248535, |
| "learning_rate": 9.970305158168702e-06, |
| "loss": 3.4805, |
| "step": 34000 |
| }, |
| { |
| "epoch": 0.30131530681758634, |
| "grad_norm": 1.7539730072021484, |
| "learning_rate": 9.969868469318242e-06, |
| "loss": 3.4876, |
| "step": 34500 |
| }, |
| { |
| "epoch": 0.30568219532218904, |
| "grad_norm": 1.838841199874878, |
| "learning_rate": 9.969431780467782e-06, |
| "loss": 3.4802, |
| "step": 35000 |
| }, |
| { |
| "epoch": 0.31004908382679175, |
| "grad_norm": 1.7870306968688965, |
| "learning_rate": 9.968995091617322e-06, |
| "loss": 3.4738, |
| "step": 35500 |
| }, |
| { |
| "epoch": 0.31441597233139446, |
| "grad_norm": 1.8143889904022217, |
| "learning_rate": 9.968558402766862e-06, |
| "loss": 3.4714, |
| "step": 36000 |
| }, |
| { |
| "epoch": 0.31878286083599716, |
| "grad_norm": 1.9000575542449951, |
| "learning_rate": 9.968121713916401e-06, |
| "loss": 3.4694, |
| "step": 36500 |
| }, |
| { |
| "epoch": 0.3231497493405998, |
| "grad_norm": 1.8620721101760864, |
| "learning_rate": 9.967685025065941e-06, |
| "loss": 3.4683, |
| "step": 37000 |
| }, |
| { |
| "epoch": 0.3275166378452025, |
| "grad_norm": 1.7763477563858032, |
| "learning_rate": 9.967248336215481e-06, |
| "loss": 3.4687, |
| "step": 37500 |
| }, |
| { |
| "epoch": 0.3318835263498052, |
| "grad_norm": 1.730004906654358, |
| "learning_rate": 9.96681164736502e-06, |
| "loss": 3.4653, |
| "step": 38000 |
| }, |
| { |
| "epoch": 0.33625041485440793, |
| "grad_norm": 1.7360706329345703, |
| "learning_rate": 9.966374958514559e-06, |
| "loss": 3.4659, |
| "step": 38500 |
| }, |
| { |
| "epoch": 0.34061730335901064, |
| "grad_norm": 1.7843414545059204, |
| "learning_rate": 9.965938269664099e-06, |
| "loss": 3.4596, |
| "step": 39000 |
| }, |
| { |
| "epoch": 0.34498419186361334, |
| "grad_norm": 1.7715613842010498, |
| "learning_rate": 9.965501580813639e-06, |
| "loss": 3.4598, |
| "step": 39500 |
| }, |
| { |
| "epoch": 0.34935108036821605, |
| "grad_norm": 1.8175365924835205, |
| "learning_rate": 9.965064891963178e-06, |
| "loss": 3.4525, |
| "step": 40000 |
| }, |
| { |
| "epoch": 0.35371796887281876, |
| "grad_norm": 1.783474326133728, |
| "learning_rate": 9.964628203112718e-06, |
| "loss": 3.4526, |
| "step": 40500 |
| }, |
| { |
| "epoch": 0.35808485737742146, |
| "grad_norm": 1.8786745071411133, |
| "learning_rate": 9.96419151426226e-06, |
| "loss": 3.4535, |
| "step": 41000 |
| }, |
| { |
| "epoch": 0.36245174588202417, |
| "grad_norm": 1.7226436138153076, |
| "learning_rate": 9.9637548254118e-06, |
| "loss": 3.4476, |
| "step": 41500 |
| }, |
| { |
| "epoch": 0.3668186343866268, |
| "grad_norm": 1.805066704750061, |
| "learning_rate": 9.963318136561338e-06, |
| "loss": 3.4484, |
| "step": 42000 |
| }, |
| { |
| "epoch": 0.3711855228912295, |
| "grad_norm": 1.737598180770874, |
| "learning_rate": 9.962881447710878e-06, |
| "loss": 3.4411, |
| "step": 42500 |
| }, |
| { |
| "epoch": 0.37555241139583223, |
| "grad_norm": 1.7667919397354126, |
| "learning_rate": 9.962444758860417e-06, |
| "loss": 3.4428, |
| "step": 43000 |
| }, |
| { |
| "epoch": 0.37991929990043494, |
| "grad_norm": 1.8246058225631714, |
| "learning_rate": 9.962008070009957e-06, |
| "loss": 3.4436, |
| "step": 43500 |
| }, |
| { |
| "epoch": 0.38428618840503764, |
| "grad_norm": 1.8644582033157349, |
| "learning_rate": 9.961571381159497e-06, |
| "loss": 3.4355, |
| "step": 44000 |
| }, |
| { |
| "epoch": 0.38865307690964035, |
| "grad_norm": 1.7610771656036377, |
| "learning_rate": 9.961134692309037e-06, |
| "loss": 3.4286, |
| "step": 44500 |
| }, |
| { |
| "epoch": 0.39301996541424306, |
| "grad_norm": 1.7471799850463867, |
| "learning_rate": 9.960698003458577e-06, |
| "loss": 3.4359, |
| "step": 45000 |
| }, |
| { |
| "epoch": 0.39738685391884576, |
| "grad_norm": 1.730660319328308, |
| "learning_rate": 9.960261314608117e-06, |
| "loss": 3.4329, |
| "step": 45500 |
| }, |
| { |
| "epoch": 0.40175374242344847, |
| "grad_norm": 1.7533937692642212, |
| "learning_rate": 9.959824625757656e-06, |
| "loss": 3.4346, |
| "step": 46000 |
| }, |
| { |
| "epoch": 0.4035965693723908, |
| "step": 46211, |
| "total_flos": 1.1591583382865183e+18, |
| "train_loss": 3.5562555452165068, |
| "train_runtime": 46797.4598, |
| "train_samples_per_second": 11743.941, |
| "train_steps_per_second": 244.667 |
| } |
| ], |
| "logging_steps": 500, |
| "max_steps": 11449800, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 100, |
| "save_steps": 1000000, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": false, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.1591583382865183e+18, |
| "train_batch_size": 48, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|