| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "global_step": 68592, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.963552600886401e-05, | |
| "loss": 1.8747, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.927105201772802e-05, | |
| "loss": 1.7998, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.890657802659203e-05, | |
| "loss": 1.7788, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.8542104035456035e-05, | |
| "loss": 1.7681, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.8177630044320036e-05, | |
| "loss": 1.7504, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.781315605318405e-05, | |
| "loss": 1.7426, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.744868206204805e-05, | |
| "loss": 1.7515, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.708420807091206e-05, | |
| "loss": 1.7297, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.671973407977607e-05, | |
| "loss": 1.7266, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.635526008864008e-05, | |
| "loss": 1.716, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.5990786097504085e-05, | |
| "loss": 1.7168, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.5626312106368094e-05, | |
| "loss": 1.7136, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.5261838115232095e-05, | |
| "loss": 1.7018, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.489736412409611e-05, | |
| "loss": 1.6961, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 4.453289013296011e-05, | |
| "loss": 1.7138, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.416841614182412e-05, | |
| "loss": 1.7039, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.380394215068813e-05, | |
| "loss": 1.698, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 4.3439468159552136e-05, | |
| "loss": 1.6897, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.3074994168416144e-05, | |
| "loss": 1.6895, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.271052017728015e-05, | |
| "loss": 1.6914, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 4.234604618614416e-05, | |
| "loss": 1.6772, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 4.198157219500817e-05, | |
| "loss": 1.6738, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 4.161709820387218e-05, | |
| "loss": 1.6739, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.125262421273618e-05, | |
| "loss": 1.6645, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.088815022160019e-05, | |
| "loss": 1.6601, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.0523676230464194e-05, | |
| "loss": 1.6523, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.01592022393282e-05, | |
| "loss": 1.6626, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 3.979472824819221e-05, | |
| "loss": 1.6574, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 3.943025425705622e-05, | |
| "loss": 1.6748, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 3.906578026592023e-05, | |
| "loss": 1.668, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 3.8701306274784235e-05, | |
| "loss": 1.656, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 3.833683228364824e-05, | |
| "loss": 1.6487, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.797235829251225e-05, | |
| "loss": 1.6537, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.760788430137625e-05, | |
| "loss": 1.655, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 3.724341031024026e-05, | |
| "loss": 1.6492, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 3.687893631910427e-05, | |
| "loss": 1.6477, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 3.651446232796828e-05, | |
| "loss": 1.642, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.6149988336832286e-05, | |
| "loss": 1.6285, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 3.5785514345696294e-05, | |
| "loss": 1.6415, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 3.5421040354560295e-05, | |
| "loss": 1.6349, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 3.505656636342431e-05, | |
| "loss": 1.6272, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.469209237228832e-05, | |
| "loss": 1.6268, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 3.432761838115232e-05, | |
| "loss": 1.6244, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 3.3963144390016335e-05, | |
| "loss": 1.6253, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 3.3598670398880336e-05, | |
| "loss": 1.6337, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.3234196407744344e-05, | |
| "loss": 1.6131, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 3.286972241660835e-05, | |
| "loss": 1.6085, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 3.250524842547236e-05, | |
| "loss": 1.6141, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 3.214077443433637e-05, | |
| "loss": 1.6097, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 3.177630044320038e-05, | |
| "loss": 1.6068, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 3.141182645206438e-05, | |
| "loss": 1.5991, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 3.104735246092839e-05, | |
| "loss": 1.6001, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 3.0682878469792395e-05, | |
| "loss": 1.5873, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 3.0318404478656403e-05, | |
| "loss": 1.5979, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 2.995393048752041e-05, | |
| "loss": 1.5989, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 2.9589456496384422e-05, | |
| "loss": 1.5973, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 2.9224982505248427e-05, | |
| "loss": 1.5945, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 2.8860508514112432e-05, | |
| "loss": 1.581, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 2.849603452297644e-05, | |
| "loss": 1.5826, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 2.8131560531840452e-05, | |
| "loss": 1.594, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 2.7767086540704457e-05, | |
| "loss": 1.5812, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 2.740261254956846e-05, | |
| "loss": 1.5779, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 2.7038138558432473e-05, | |
| "loss": 1.5807, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 2.667366456729648e-05, | |
| "loss": 1.5815, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 2.6309190576160486e-05, | |
| "loss": 1.5877, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 2.594471658502449e-05, | |
| "loss": 1.5694, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 2.5580242593888502e-05, | |
| "loss": 1.5807, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 2.521576860275251e-05, | |
| "loss": 1.5794, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 2.4851294611616515e-05, | |
| "loss": 1.5798, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 2.4486820620480523e-05, | |
| "loss": 1.5702, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 2.412234662934453e-05, | |
| "loss": 1.5757, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 2.375787263820854e-05, | |
| "loss": 1.5699, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 2.3393398647072544e-05, | |
| "loss": 1.5691, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 2.3028924655936553e-05, | |
| "loss": 1.5566, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 2.266445066480056e-05, | |
| "loss": 1.562, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 2.229997667366457e-05, | |
| "loss": 1.5541, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 2.1935502682528574e-05, | |
| "loss": 1.5554, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 2.1571028691392585e-05, | |
| "loss": 1.5597, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 2.120655470025659e-05, | |
| "loss": 1.5508, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 2.0842080709120598e-05, | |
| "loss": 1.5551, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 2.0477606717984603e-05, | |
| "loss": 1.5637, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 2.0113132726848614e-05, | |
| "loss": 1.5442, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 1.9748658735712623e-05, | |
| "loss": 1.5521, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 1.9384184744576627e-05, | |
| "loss": 1.5468, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 1.9019710753440636e-05, | |
| "loss": 1.5492, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 1.8655236762304644e-05, | |
| "loss": 1.551, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 1.8290762771168652e-05, | |
| "loss": 1.5483, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 1.7926288780032657e-05, | |
| "loss": 1.5499, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 1.7561814788896665e-05, | |
| "loss": 1.5273, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 1.7197340797760673e-05, | |
| "loss": 1.5441, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 1.683286680662468e-05, | |
| "loss": 1.5401, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 1.6468392815488686e-05, | |
| "loss": 1.5262, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 1.6103918824352694e-05, | |
| "loss": 1.5231, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 1.5739444833216702e-05, | |
| "loss": 1.5282, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 1.537497084208071e-05, | |
| "loss": 1.5209, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 1.5010496850944717e-05, | |
| "loss": 1.5224, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 1.4646022859808725e-05, | |
| "loss": 1.5072, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 1.4281548868672732e-05, | |
| "loss": 1.5153, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.391707487753674e-05, | |
| "loss": 1.5189, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 1.3552600886400746e-05, | |
| "loss": 1.5185, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 1.3188126895264754e-05, | |
| "loss": 1.5073, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 1.282365290412876e-05, | |
| "loss": 1.522, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 1.2459178912992769e-05, | |
| "loss": 1.5158, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 1.2094704921856777e-05, | |
| "loss": 1.5102, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 1.1730230930720784e-05, | |
| "loss": 1.5093, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 1.1365756939584792e-05, | |
| "loss": 1.5101, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 1.1001282948448798e-05, | |
| "loss": 1.5062, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 1.0636808957312806e-05, | |
| "loss": 1.5039, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 1.0272334966176813e-05, | |
| "loss": 1.494, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 9.907860975040821e-06, | |
| "loss": 1.5095, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 9.54338698390483e-06, | |
| "loss": 1.5021, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 9.178912992768836e-06, | |
| "loss": 1.5046, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 8.814439001632844e-06, | |
| "loss": 1.4966, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 8.449965010496852e-06, | |
| "loss": 1.4875, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 8.085491019360859e-06, | |
| "loss": 1.5078, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 7.721017028224867e-06, | |
| "loss": 1.4892, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 7.356543037088874e-06, | |
| "loss": 1.4984, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 6.992069045952881e-06, | |
| "loss": 1.4963, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 6.627595054816889e-06, | |
| "loss": 1.4895, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 6.263121063680896e-06, | |
| "loss": 1.5005, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 5.898647072544903e-06, | |
| "loss": 1.4999, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 5.534173081408911e-06, | |
| "loss": 1.4832, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 5.169699090272919e-06, | |
| "loss": 1.495, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 4.805225099136926e-06, | |
| "loss": 1.499, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 4.4407511080009334e-06, | |
| "loss": 1.4831, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 4.076277116864941e-06, | |
| "loss": 1.4854, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 3.711803125728948e-06, | |
| "loss": 1.4918, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 3.3473291345929554e-06, | |
| "loss": 1.4914, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 2.982855143456963e-06, | |
| "loss": 1.4836, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 2.6183811523209705e-06, | |
| "loss": 1.4829, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 2.253907161184978e-06, | |
| "loss": 1.4824, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 1.8894331700489855e-06, | |
| "loss": 1.4856, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 1.5249591789129928e-06, | |
| "loss": 1.487, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.1604851877770002e-06, | |
| "loss": 1.4791, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 7.960111966410078e-07, | |
| "loss": 1.4779, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 4.3153720550501516e-07, | |
| "loss": 1.4794, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 6.706321436902263e-08, | |
| "loss": 1.476, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 68592, | |
| "total_flos": 1.805366555813376e+17, | |
| "train_loss": 1.5861018672255278, | |
| "train_runtime": 49915.9647, | |
| "train_samples_per_second": 13.741, | |
| "train_steps_per_second": 1.374 | |
| } | |
| ], | |
| "max_steps": 68592, | |
| "num_train_epochs": 3, | |
| "total_flos": 1.805366555813376e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |