| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "eval_steps": 500, | |
| "global_step": 380, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.026385224274406333, | |
| "grad_norm": 107.23584747314453, | |
| "learning_rate": 4.210526315789474e-07, | |
| "loss": 5.622, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.052770448548812667, | |
| "grad_norm": 77.56803894042969, | |
| "learning_rate": 9.473684210526317e-07, | |
| "loss": 5.3488, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.079155672823219, | |
| "grad_norm": 60.997257232666016, | |
| "learning_rate": 1.4736842105263159e-06, | |
| "loss": 4.9812, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.10554089709762533, | |
| "grad_norm": 33.36190414428711, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 4.4552, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.13192612137203166, | |
| "grad_norm": 18.245769500732422, | |
| "learning_rate": 2.5263157894736844e-06, | |
| "loss": 4.0648, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.158311345646438, | |
| "grad_norm": 15.86693000793457, | |
| "learning_rate": 3.052631578947369e-06, | |
| "loss": 3.6069, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.18469656992084432, | |
| "grad_norm": 11.502686500549316, | |
| "learning_rate": 3.578947368421053e-06, | |
| "loss": 3.6728, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.21108179419525067, | |
| "grad_norm": 16.517114639282227, | |
| "learning_rate": 4.105263157894737e-06, | |
| "loss": 3.7924, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.23746701846965698, | |
| "grad_norm": 12.67491340637207, | |
| "learning_rate": 4.631578947368421e-06, | |
| "loss": 3.3256, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.2638522427440633, | |
| "grad_norm": 11.452499389648438, | |
| "learning_rate": 5.157894736842106e-06, | |
| "loss": 3.2939, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.29023746701846964, | |
| "grad_norm": 13.192882537841797, | |
| "learning_rate": 5.68421052631579e-06, | |
| "loss": 3.0344, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.316622691292876, | |
| "grad_norm": 11.595597267150879, | |
| "learning_rate": 6.2105263157894745e-06, | |
| "loss": 2.7082, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.34300791556728233, | |
| "grad_norm": 12.450468063354492, | |
| "learning_rate": 6.736842105263158e-06, | |
| "loss": 2.9807, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.36939313984168864, | |
| "grad_norm": 13.777202606201172, | |
| "learning_rate": 7.263157894736843e-06, | |
| "loss": 2.8359, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.39577836411609496, | |
| "grad_norm": 13.03334903717041, | |
| "learning_rate": 7.789473684210526e-06, | |
| "loss": 2.5446, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.42216358839050133, | |
| "grad_norm": 17.208057403564453, | |
| "learning_rate": 8.315789473684212e-06, | |
| "loss": 2.5668, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.44854881266490765, | |
| "grad_norm": 15.075675010681152, | |
| "learning_rate": 8.842105263157895e-06, | |
| "loss": 2.3511, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.47493403693931396, | |
| "grad_norm": 14.769970893859863, | |
| "learning_rate": 9.36842105263158e-06, | |
| "loss": 2.5317, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.5013192612137203, | |
| "grad_norm": 11.59249210357666, | |
| "learning_rate": 9.894736842105264e-06, | |
| "loss": 2.2466, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.5277044854881267, | |
| "grad_norm": 15.899335861206055, | |
| "learning_rate": 9.999459967758384e-06, | |
| "loss": 2.1577, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.554089709762533, | |
| "grad_norm": 18.471529006958008, | |
| "learning_rate": 9.99726628670463e-06, | |
| "loss": 1.9901, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.5804749340369393, | |
| "grad_norm": 13.399049758911133, | |
| "learning_rate": 9.993385944658086e-06, | |
| "loss": 1.9487, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.6068601583113457, | |
| "grad_norm": 11.711898803710938, | |
| "learning_rate": 9.987820251299121e-06, | |
| "loss": 1.6927, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.633245382585752, | |
| "grad_norm": 20.446998596191406, | |
| "learning_rate": 9.980571085142381e-06, | |
| "loss": 1.7541, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.6596306068601583, | |
| "grad_norm": 10.907441139221191, | |
| "learning_rate": 9.971640892902742e-06, | |
| "loss": 1.7187, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.6860158311345647, | |
| "grad_norm": 13.693713188171387, | |
| "learning_rate": 9.961032688669519e-06, | |
| "loss": 1.6546, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.712401055408971, | |
| "grad_norm": 9.666728019714355, | |
| "learning_rate": 9.94875005288915e-06, | |
| "loss": 1.6036, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.7387862796833773, | |
| "grad_norm": 12.30069637298584, | |
| "learning_rate": 9.934797131156745e-06, | |
| "loss": 1.3259, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.7651715039577837, | |
| "grad_norm": 10.972068786621094, | |
| "learning_rate": 9.919178632816864e-06, | |
| "loss": 1.3776, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.7915567282321899, | |
| "grad_norm": 13.099720001220703, | |
| "learning_rate": 9.901899829374048e-06, | |
| "loss": 1.2886, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.8179419525065963, | |
| "grad_norm": 9.663094520568848, | |
| "learning_rate": 9.88296655271359e-06, | |
| "loss": 1.133, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.8443271767810027, | |
| "grad_norm": 12.018184661865234, | |
| "learning_rate": 9.862385193133181e-06, | |
| "loss": 1.0579, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.8707124010554089, | |
| "grad_norm": 14.370702743530273, | |
| "learning_rate": 9.840162697186075e-06, | |
| "loss": 0.8807, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.8970976253298153, | |
| "grad_norm": 10.248346328735352, | |
| "learning_rate": 9.81630656533651e-06, | |
| "loss": 0.8824, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.9234828496042217, | |
| "grad_norm": 12.581779479980469, | |
| "learning_rate": 9.79082484942818e-06, | |
| "loss": 0.8748, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.9498680738786279, | |
| "grad_norm": 13.555765151977539, | |
| "learning_rate": 9.763726149966596e-06, | |
| "loss": 0.8773, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.9762532981530343, | |
| "grad_norm": 10.211019515991211, | |
| "learning_rate": 9.735019613216281e-06, | |
| "loss": 0.6417, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 7.966390609741211, | |
| "learning_rate": 9.704714928113743e-06, | |
| "loss": 0.7403, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.0263852242744063, | |
| "grad_norm": 6.398619174957275, | |
| "learning_rate": 9.672822322997305e-06, | |
| "loss": 0.4215, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.0527704485488127, | |
| "grad_norm": 35.487972259521484, | |
| "learning_rate": 9.639352562154862e-06, | |
| "loss": 0.4103, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.079155672823219, | |
| "grad_norm": 10.25015926361084, | |
| "learning_rate": 9.604316942190764e-06, | |
| "loss": 0.4638, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.1055408970976253, | |
| "grad_norm": 7.6288533210754395, | |
| "learning_rate": 9.567727288213005e-06, | |
| "loss": 0.3847, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.1319261213720317, | |
| "grad_norm": 8.111388206481934, | |
| "learning_rate": 9.529595949842077e-06, | |
| "loss": 0.3595, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.158311345646438, | |
| "grad_norm": 12.408935546875, | |
| "learning_rate": 9.489935797042741e-06, | |
| "loss": 0.3109, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.1846965699208443, | |
| "grad_norm": 8.753190994262695, | |
| "learning_rate": 9.448760215780218e-06, | |
| "loss": 0.3323, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.2110817941952507, | |
| "grad_norm": 6.783555030822754, | |
| "learning_rate": 9.406083103502184e-06, | |
| "loss": 0.2708, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.237467018469657, | |
| "grad_norm": 5.595340728759766, | |
| "learning_rate": 9.36191886444817e-06, | |
| "loss": 0.2853, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.2638522427440633, | |
| "grad_norm": 7.698513031005859, | |
| "learning_rate": 9.31628240478787e-06, | |
| "loss": 0.2702, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.2902374670184695, | |
| "grad_norm": 12.008724212646484, | |
| "learning_rate": 9.26918912759007e-06, | |
| "loss": 0.3292, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.316622691292876, | |
| "grad_norm": 9.176492691040039, | |
| "learning_rate": 9.220654927623855e-06, | |
| "loss": 0.3311, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.3430079155672823, | |
| "grad_norm": 6.505499362945557, | |
| "learning_rate": 9.17069618599385e-06, | |
| "loss": 0.3127, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.3693931398416885, | |
| "grad_norm": 6.490231513977051, | |
| "learning_rate": 9.119329764611336e-06, | |
| "loss": 0.2215, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.395778364116095, | |
| "grad_norm": 6.836956977844238, | |
| "learning_rate": 9.06657300050306e-06, | |
| "loss": 0.2988, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.4221635883905013, | |
| "grad_norm": 7.263693332672119, | |
| "learning_rate": 9.012443699959706e-06, | |
| "loss": 0.2536, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.4485488126649075, | |
| "grad_norm": 7.557298183441162, | |
| "learning_rate": 8.956960132525973e-06, | |
| "loss": 0.2264, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.474934036939314, | |
| "grad_norm": 6.503325462341309, | |
| "learning_rate": 8.900141024834296e-06, | |
| "loss": 0.2262, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.5013192612137203, | |
| "grad_norm": 6.369277477264404, | |
| "learning_rate": 8.842005554284296e-06, | |
| "loss": 0.1801, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.5277044854881265, | |
| "grad_norm": 7.811641693115234, | |
| "learning_rate": 8.782573342570084e-06, | |
| "loss": 0.178, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.554089709762533, | |
| "grad_norm": 6.305844783782959, | |
| "learning_rate": 8.721864449057614e-06, | |
| "loss": 0.1599, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.5804749340369393, | |
| "grad_norm": 4.9031877517700195, | |
| "learning_rate": 8.659899364014309e-06, | |
| "loss": 0.154, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.6068601583113455, | |
| "grad_norm": 7.324002265930176, | |
| "learning_rate": 8.596699001693257e-06, | |
| "loss": 0.2359, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.633245382585752, | |
| "grad_norm": 6.51141881942749, | |
| "learning_rate": 8.532284693274293e-06, | |
| "loss": 0.2, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.6596306068601583, | |
| "grad_norm": 7.332818031311035, | |
| "learning_rate": 8.466678179664378e-06, | |
| "loss": 0.1767, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.6860158311345645, | |
| "grad_norm": 6.019137859344482, | |
| "learning_rate": 8.39990160415967e-06, | |
| "loss": 0.1794, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.712401055408971, | |
| "grad_norm": 6.589385986328125, | |
| "learning_rate": 8.331977504971801e-06, | |
| "loss": 0.1579, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.7387862796833773, | |
| "grad_norm": 9.24649429321289, | |
| "learning_rate": 8.262928807620843e-06, | |
| "loss": 0.1548, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.7651715039577835, | |
| "grad_norm": 6.103082180023193, | |
| "learning_rate": 8.192778817197569e-06, | |
| "loss": 0.1608, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.79155672823219, | |
| "grad_norm": 6.764271259307861, | |
| "learning_rate": 8.12155121049759e-06, | |
| "loss": 0.1456, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.8179419525065963, | |
| "grad_norm": 6.516699314117432, | |
| "learning_rate": 8.049270028030045e-06, | |
| "loss": 0.1675, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.8443271767810026, | |
| "grad_norm": 4.027854919433594, | |
| "learning_rate": 7.975959665903526e-06, | |
| "loss": 0.1311, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.870712401055409, | |
| "grad_norm": 4.875086784362793, | |
| "learning_rate": 7.901644867591976e-06, | |
| "loss": 0.1174, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.8970976253298153, | |
| "grad_norm": 5.186245441436768, | |
| "learning_rate": 7.82635071558336e-06, | |
| "loss": 0.0908, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.9234828496042216, | |
| "grad_norm": 4.586931228637695, | |
| "learning_rate": 7.750102622913907e-06, | |
| "loss": 0.1033, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.949868073878628, | |
| "grad_norm": 5.11731481552124, | |
| "learning_rate": 7.672926324590778e-06, | |
| "loss": 0.0793, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.9762532981530343, | |
| "grad_norm": 4.95916223526001, | |
| "learning_rate": 7.594847868906076e-06, | |
| "loss": 0.0849, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 4.0288472175598145, | |
| "learning_rate": 7.51589360864511e-06, | |
| "loss": 0.0836, | |
| "step": 380 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 950, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 5.180251151961948e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |