| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9990680335507922, | |
| "eval_steps": 500, | |
| "global_step": 268, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01863932898415657, | |
| "grad_norm": 1.7573972317970177, | |
| "learning_rate": 1.4285714285714285e-05, | |
| "loss": 0.8497, | |
| "num_tokens": 10465392.0, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.03727865796831314, | |
| "grad_norm": 0.7814164461382908, | |
| "learning_rate": 3.2142857142857144e-05, | |
| "loss": 0.7788, | |
| "num_tokens": 20951152.0, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.05591798695246971, | |
| "grad_norm": 0.4971840947687821, | |
| "learning_rate": 5e-05, | |
| "loss": 0.7096, | |
| "num_tokens": 31436912.0, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.07455731593662628, | |
| "grad_norm": 0.4104663829493778, | |
| "learning_rate": 4.995698831799242e-05, | |
| "loss": 0.6707, | |
| "num_tokens": 41909269.0, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.09319664492078285, | |
| "grad_norm": 0.378224237110894, | |
| "learning_rate": 4.982811771683982e-05, | |
| "loss": 0.6517, | |
| "num_tokens": 52353719.0, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.11183597390493942, | |
| "grad_norm": 0.27264995346014925, | |
| "learning_rate": 4.9613880902437035e-05, | |
| "loss": 0.6363, | |
| "num_tokens": 62792124.0, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.13047530288909598, | |
| "grad_norm": 0.25488038499432786, | |
| "learning_rate": 4.931509695796055e-05, | |
| "loss": 0.6217, | |
| "num_tokens": 73260442.0, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.14911463187325255, | |
| "grad_norm": 0.2475293398276613, | |
| "learning_rate": 4.8932908212300035e-05, | |
| "loss": 0.6053, | |
| "num_tokens": 83727556.0, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.16775396085740912, | |
| "grad_norm": 0.2763844980958693, | |
| "learning_rate": 4.846877587263728e-05, | |
| "loss": 0.6041, | |
| "num_tokens": 94201938.0, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.1863932898415657, | |
| "grad_norm": 0.3232566590307947, | |
| "learning_rate": 4.792447443787034e-05, | |
| "loss": 0.5967, | |
| "num_tokens": 104670740.0, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.20503261882572227, | |
| "grad_norm": 0.3499333864597863, | |
| "learning_rate": 4.730208491424174e-05, | |
| "loss": 0.5998, | |
| "num_tokens": 115151933.0, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.22367194780987884, | |
| "grad_norm": 0.3933865267179061, | |
| "learning_rate": 4.660398685910928e-05, | |
| "loss": 0.5906, | |
| "num_tokens": 125607410.0, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.2423112767940354, | |
| "grad_norm": 0.3915672585619426, | |
| "learning_rate": 4.583284928327805e-05, | |
| "loss": 0.5884, | |
| "num_tokens": 136036466.0, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.26095060577819196, | |
| "grad_norm": 0.4195837025490587, | |
| "learning_rate": 4.499162044667658e-05, | |
| "loss": 0.5849, | |
| "num_tokens": 146502251.0, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.27958993476234856, | |
| "grad_norm": 0.3381612617240059, | |
| "learning_rate": 4.4083516586390694e-05, | |
| "loss": 0.5821, | |
| "num_tokens": 156977308.0, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.2982292637465051, | |
| "grad_norm": 0.2551239560014803, | |
| "learning_rate": 4.3112009620150904e-05, | |
| "loss": 0.576, | |
| "num_tokens": 167423794.0, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.3168685927306617, | |
| "grad_norm": 0.30592698170937993, | |
| "learning_rate": 4.208081387228612e-05, | |
| "loss": 0.5685, | |
| "num_tokens": 177900466.0, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.33550792171481825, | |
| "grad_norm": 0.281492013111172, | |
| "learning_rate": 4.099387187289365e-05, | |
| "loss": 0.5724, | |
| "num_tokens": 188373813.0, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.35414725069897485, | |
| "grad_norm": 0.26672187272505754, | |
| "learning_rate": 3.985533928451914e-05, | |
| "loss": 0.5706, | |
| "num_tokens": 198859382.0, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.3727865796831314, | |
| "grad_norm": 0.2923857402012376, | |
| "learning_rate": 3.866956901397559e-05, | |
| "loss": 0.5676, | |
| "num_tokens": 209345142.0, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.391425908667288, | |
| "grad_norm": 0.2780198681024584, | |
| "learning_rate": 3.74410945700462e-05, | |
| "loss": 0.5668, | |
| "num_tokens": 219820636.0, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.41006523765144454, | |
| "grad_norm": 0.3103372433812494, | |
| "learning_rate": 3.617461273069867e-05, | |
| "loss": 0.559, | |
| "num_tokens": 230306396.0, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.42870456663560114, | |
| "grad_norm": 0.32943677203389876, | |
| "learning_rate": 3.487496558607898e-05, | |
| "loss": 0.5627, | |
| "num_tokens": 240792156.0, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.4473438956197577, | |
| "grad_norm": 0.2903244983178175, | |
| "learning_rate": 3.354712202593882e-05, | |
| "loss": 0.5611, | |
| "num_tokens": 251259365.0, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.4659832246039143, | |
| "grad_norm": 0.27649466725394467, | |
| "learning_rate": 3.219615874227504e-05, | |
| "loss": 0.5644, | |
| "num_tokens": 261736444.0, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.4846225535880708, | |
| "grad_norm": 0.28234984702886096, | |
| "learning_rate": 3.082724081981306e-05, | |
| "loss": 0.563, | |
| "num_tokens": 272205866.0, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.5032618825722274, | |
| "grad_norm": 0.2766528639933286, | |
| "learning_rate": 2.9445601988541782e-05, | |
| "loss": 0.5587, | |
| "num_tokens": 282683327.0, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.5219012115563839, | |
| "grad_norm": 0.2685787031695611, | |
| "learning_rate": 2.8056524613799888e-05, | |
| "loss": 0.5612, | |
| "num_tokens": 293122806.0, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.5405405405405406, | |
| "grad_norm": 0.2101892722509089, | |
| "learning_rate": 2.6665319500416385e-05, | |
| "loss": 0.5606, | |
| "num_tokens": 303598947.0, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.5591798695246971, | |
| "grad_norm": 0.2589895951146755, | |
| "learning_rate": 2.527730558811963e-05, | |
| "loss": 0.5579, | |
| "num_tokens": 314077641.0, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.5778191985088537, | |
| "grad_norm": 0.21518960674848625, | |
| "learning_rate": 2.3897789615844557e-05, | |
| "loss": 0.5488, | |
| "num_tokens": 324545386.0, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.5964585274930102, | |
| "grad_norm": 0.18657627443004557, | |
| "learning_rate": 2.2532045832686615e-05, | |
| "loss": 0.5609, | |
| "num_tokens": 335031146.0, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.6150978564771669, | |
| "grad_norm": 0.20944107327672593, | |
| "learning_rate": 2.1185295833072914e-05, | |
| "loss": 0.5412, | |
| "num_tokens": 345504904.0, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.6337371854613234, | |
| "grad_norm": 0.18523084293093703, | |
| "learning_rate": 1.9862688593245853e-05, | |
| "loss": 0.5508, | |
| "num_tokens": 355956520.0, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.65237651444548, | |
| "grad_norm": 0.16744206090364774, | |
| "learning_rate": 1.8569280785385046e-05, | |
| "loss": 0.5515, | |
| "num_tokens": 366434348.0, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.6710158434296365, | |
| "grad_norm": 0.19529463948190923, | |
| "learning_rate": 1.731001744463161e-05, | |
| "loss": 0.5543, | |
| "num_tokens": 376908695.0, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.6896551724137931, | |
| "grad_norm": 0.18569828701168353, | |
| "learning_rate": 1.6089713062930108e-05, | |
| "loss": 0.5571, | |
| "num_tokens": 387384301.0, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.7082945013979497, | |
| "grad_norm": 0.1713285501371151, | |
| "learning_rate": 1.4913033181971056e-05, | |
| "loss": 0.548, | |
| "num_tokens": 397835323.0, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.7269338303821062, | |
| "grad_norm": 0.16294932839233475, | |
| "learning_rate": 1.3784476555609077e-05, | |
| "loss": 0.5467, | |
| "num_tokens": 408298322.0, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.7455731593662628, | |
| "grad_norm": 0.16147697817221135, | |
| "learning_rate": 1.2708357949954441e-05, | |
| "loss": 0.5526, | |
| "num_tokens": 418781566.0, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.7642124883504194, | |
| "grad_norm": 0.17353407502301713, | |
| "learning_rate": 1.1688791646897726e-05, | |
| "loss": 0.5484, | |
| "num_tokens": 429246222.0, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.782851817334576, | |
| "grad_norm": 0.16962668393783945, | |
| "learning_rate": 1.0729675714137831e-05, | |
| "loss": 0.5426, | |
| "num_tokens": 439717644.0, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.8014911463187325, | |
| "grad_norm": 0.15859375239694778, | |
| "learning_rate": 9.834677101853265e-06, | |
| "loss": 0.5517, | |
| "num_tokens": 450170885.0, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.8201304753028891, | |
| "grad_norm": 0.1772198182052593, | |
| "learning_rate": 9.007217622995933e-06, | |
| "loss": 0.5478, | |
| "num_tokens": 460656645.0, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.8387698042870456, | |
| "grad_norm": 0.18021101189312388, | |
| "learning_rate": 8.250460870808394e-06, | |
| "loss": 0.5401, | |
| "num_tokens": 471142405.0, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.8574091332712023, | |
| "grad_norm": 0.14833153298429916, | |
| "learning_rate": 7.567300123582388e-06, | |
| "loss": 0.5444, | |
| "num_tokens": 481616409.0, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.8760484622553588, | |
| "grad_norm": 0.1675362976580094, | |
| "learning_rate": 6.960347282901894e-06, | |
| "loss": 0.5437, | |
| "num_tokens": 492097674.0, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.8946877912395154, | |
| "grad_norm": 0.14741844256042477, | |
| "learning_rate": 6.431922887662643e-06, | |
| "loss": 0.5414, | |
| "num_tokens": 502555042.0, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.9133271202236719, | |
| "grad_norm": 0.1496978314112162, | |
| "learning_rate": 5.984047242047134e-06, | |
| "loss": 0.5394, | |
| "num_tokens": 513031521.0, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.9319664492078286, | |
| "grad_norm": 0.14094412621251864, | |
| "learning_rate": 5.6184326913751945e-06, | |
| "loss": 0.5477, | |
| "num_tokens": 523517281.0, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.9506057781919851, | |
| "grad_norm": 0.1478294431106662, | |
| "learning_rate": 5.336477075361577e-06, | |
| "loss": 0.544, | |
| "num_tokens": 533998146.0, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.9692451071761417, | |
| "grad_norm": 0.13882538385053997, | |
| "learning_rate": 5.139258383810381e-06, | |
| "loss": 0.54, | |
| "num_tokens": 544470561.0, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.9878844361602982, | |
| "grad_norm": 0.14673471750521358, | |
| "learning_rate": 5.027530635179121e-06, | |
| "loss": 0.5362, | |
| "num_tokens": 554934577.0, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.9990680335507922, | |
| "num_tokens": 561226033.0, | |
| "step": 268, | |
| "total_flos": 488165445992448.0, | |
| "train_loss": 0.5790879526245061, | |
| "train_runtime": 25119.8538, | |
| "train_samples_per_second": 1.366, | |
| "train_steps_per_second": 0.011 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 268, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 488165445992448.0, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |