| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9908256880733946, | |
| "eval_steps": 500, | |
| "global_step": 72, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.013761467889908258, | |
| "grad_norm": 38.02450942993164, | |
| "learning_rate": 5.0000000000000004e-08, | |
| "loss": 3.125, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.027522935779816515, | |
| "grad_norm": 37.864768981933594, | |
| "learning_rate": 1.0000000000000001e-07, | |
| "loss": 3.0998, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.04128440366972477, | |
| "grad_norm": 38.34700012207031, | |
| "learning_rate": 1.5000000000000002e-07, | |
| "loss": 3.1533, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.05504587155963303, | |
| "grad_norm": 38.33641815185547, | |
| "learning_rate": 2.0000000000000002e-07, | |
| "loss": 3.1542, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.06880733944954129, | |
| "grad_norm": 38.064449310302734, | |
| "learning_rate": 2.5000000000000004e-07, | |
| "loss": 3.1153, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.08256880733944955, | |
| "grad_norm": 37.92089080810547, | |
| "learning_rate": 3.0000000000000004e-07, | |
| "loss": 3.0867, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.0963302752293578, | |
| "grad_norm": 38.120323181152344, | |
| "learning_rate": 3.5000000000000004e-07, | |
| "loss": 3.093, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.11009174311926606, | |
| "grad_norm": 38.47222900390625, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 3.1056, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.12385321100917432, | |
| "grad_norm": 38.013702392578125, | |
| "learning_rate": 4.5000000000000003e-07, | |
| "loss": 3.0474, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.13761467889908258, | |
| "grad_norm": 38.17593002319336, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 3.0264, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.15137614678899083, | |
| "grad_norm": 38.60066604614258, | |
| "learning_rate": 5.5e-07, | |
| "loss": 2.9404, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.1651376146788991, | |
| "grad_norm": 38.83498764038086, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 2.9571, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.17889908256880735, | |
| "grad_norm": 38.942047119140625, | |
| "learning_rate": 6.5e-07, | |
| "loss": 2.8849, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.1926605504587156, | |
| "grad_norm": 38.0286865234375, | |
| "learning_rate": 7.000000000000001e-07, | |
| "loss": 2.7486, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.20642201834862386, | |
| "grad_norm": 38.31856155395508, | |
| "learning_rate": 7.5e-07, | |
| "loss": 2.6876, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.22018348623853212, | |
| "grad_norm": 38.124759674072266, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 2.5992, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.23394495412844038, | |
| "grad_norm": 36.59762191772461, | |
| "learning_rate": 8.500000000000001e-07, | |
| "loss": 2.4063, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.24770642201834864, | |
| "grad_norm": 36.63874435424805, | |
| "learning_rate": 9.000000000000001e-07, | |
| "loss": 2.3109, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.26146788990825687, | |
| "grad_norm": 36.768577575683594, | |
| "learning_rate": 9.500000000000001e-07, | |
| "loss": 2.1677, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.27522935779816515, | |
| "grad_norm": 36.187137603759766, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 1.9551, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.2889908256880734, | |
| "grad_norm": 35.55617141723633, | |
| "learning_rate": 1.0500000000000001e-06, | |
| "loss": 1.8053, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.30275229357798167, | |
| "grad_norm": 34.60952377319336, | |
| "learning_rate": 1.1e-06, | |
| "loss": 1.5654, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.3165137614678899, | |
| "grad_norm": 33.69620895385742, | |
| "learning_rate": 1.1500000000000002e-06, | |
| "loss": 1.3454, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.3302752293577982, | |
| "grad_norm": 34.33642578125, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 1.2417, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.3440366972477064, | |
| "grad_norm": 31.23066520690918, | |
| "learning_rate": 1.25e-06, | |
| "loss": 0.9839, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.3577981651376147, | |
| "grad_norm": 25.810237884521484, | |
| "learning_rate": 1.3e-06, | |
| "loss": 0.793, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.37155963302752293, | |
| "grad_norm": 23.06692886352539, | |
| "learning_rate": 1.3500000000000002e-06, | |
| "loss": 0.6082, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.3853211009174312, | |
| "grad_norm": 19.828439712524414, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 0.4845, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.39908256880733944, | |
| "grad_norm": 14.150300025939941, | |
| "learning_rate": 1.45e-06, | |
| "loss": 0.348, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.41284403669724773, | |
| "grad_norm": 9.044266700744629, | |
| "learning_rate": 1.5e-06, | |
| "loss": 0.2516, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.42660550458715596, | |
| "grad_norm": 5.704404354095459, | |
| "learning_rate": 1.5500000000000002e-06, | |
| "loss": 0.177, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.44036697247706424, | |
| "grad_norm": 3.2953503131866455, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 0.1391, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.4541284403669725, | |
| "grad_norm": 2.453219413757324, | |
| "learning_rate": 1.6500000000000003e-06, | |
| "loss": 0.0982, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.46788990825688076, | |
| "grad_norm": 2.0325512886047363, | |
| "learning_rate": 1.7000000000000002e-06, | |
| "loss": 0.0807, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.481651376146789, | |
| "grad_norm": 1.6322681903839111, | |
| "learning_rate": 1.75e-06, | |
| "loss": 0.0725, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.4954128440366973, | |
| "grad_norm": 0.9713364839553833, | |
| "learning_rate": 1.8000000000000001e-06, | |
| "loss": 0.067, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.5091743119266054, | |
| "grad_norm": 0.7980225682258606, | |
| "learning_rate": 1.85e-06, | |
| "loss": 0.0582, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.5229357798165137, | |
| "grad_norm": 1.0616590976715088, | |
| "learning_rate": 1.9000000000000002e-06, | |
| "loss": 0.0562, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.536697247706422, | |
| "grad_norm": 1.053462028503418, | |
| "learning_rate": 1.9500000000000004e-06, | |
| "loss": 0.0537, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.5504587155963303, | |
| "grad_norm": 0.9452660083770752, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.0602, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.5642201834862385, | |
| "grad_norm": 0.830368161201477, | |
| "learning_rate": 2.05e-06, | |
| "loss": 0.0549, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.5779816513761468, | |
| "grad_norm": 0.5791187882423401, | |
| "learning_rate": 2.1000000000000002e-06, | |
| "loss": 0.0479, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.591743119266055, | |
| "grad_norm": 0.44175243377685547, | |
| "learning_rate": 2.15e-06, | |
| "loss": 0.0461, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.6055045871559633, | |
| "grad_norm": 0.37655699253082275, | |
| "learning_rate": 2.2e-06, | |
| "loss": 0.043, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.6192660550458715, | |
| "grad_norm": 0.34382495284080505, | |
| "learning_rate": 2.25e-06, | |
| "loss": 0.0454, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.6330275229357798, | |
| "grad_norm": 0.5047216415405273, | |
| "learning_rate": 2.3000000000000004e-06, | |
| "loss": 0.0437, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.6467889908256881, | |
| "grad_norm": 0.6318779587745667, | |
| "learning_rate": 2.35e-06, | |
| "loss": 0.0468, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.6605504587155964, | |
| "grad_norm": 0.5135455131530762, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 0.0494, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.6743119266055045, | |
| "grad_norm": 0.4802612066268921, | |
| "learning_rate": 2.4500000000000003e-06, | |
| "loss": 0.0441, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.6880733944954128, | |
| "grad_norm": 0.6157718300819397, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.0398, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.7018348623853211, | |
| "grad_norm": 0.4327130913734436, | |
| "learning_rate": 2.55e-06, | |
| "loss": 0.0438, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.7155963302752294, | |
| "grad_norm": 0.46133658289909363, | |
| "learning_rate": 2.6e-06, | |
| "loss": 0.041, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.7293577981651376, | |
| "grad_norm": 0.5729146003723145, | |
| "learning_rate": 2.6500000000000005e-06, | |
| "loss": 0.0406, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.7431192660550459, | |
| "grad_norm": 0.32373812794685364, | |
| "learning_rate": 2.7000000000000004e-06, | |
| "loss": 0.0419, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.7568807339449541, | |
| "grad_norm": 0.29006752371788025, | |
| "learning_rate": 2.7500000000000004e-06, | |
| "loss": 0.0415, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.7706422018348624, | |
| "grad_norm": 0.31038960814476013, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 0.0344, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.7844036697247706, | |
| "grad_norm": 0.2324836701154709, | |
| "learning_rate": 2.85e-06, | |
| "loss": 0.0374, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.7981651376146789, | |
| "grad_norm": 0.5083625912666321, | |
| "learning_rate": 2.9e-06, | |
| "loss": 0.0324, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.8119266055045872, | |
| "grad_norm": 0.2873130142688751, | |
| "learning_rate": 2.95e-06, | |
| "loss": 0.0403, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.8256880733944955, | |
| "grad_norm": 0.437663197517395, | |
| "learning_rate": 3e-06, | |
| "loss": 0.0368, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.8394495412844036, | |
| "grad_norm": 0.5645247101783752, | |
| "learning_rate": 3.05e-06, | |
| "loss": 0.0386, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.8532110091743119, | |
| "grad_norm": 0.40374210476875305, | |
| "learning_rate": 3.1000000000000004e-06, | |
| "loss": 0.0425, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.8669724770642202, | |
| "grad_norm": 0.46468955278396606, | |
| "learning_rate": 3.1500000000000003e-06, | |
| "loss": 0.0323, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.8807339449541285, | |
| "grad_norm": 0.29952895641326904, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 0.0325, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.8944954128440367, | |
| "grad_norm": 0.3678436279296875, | |
| "learning_rate": 3.2500000000000002e-06, | |
| "loss": 0.036, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.908256880733945, | |
| "grad_norm": 0.5068934559822083, | |
| "learning_rate": 3.3000000000000006e-06, | |
| "loss": 0.0357, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.9220183486238532, | |
| "grad_norm": 0.2723177671432495, | |
| "learning_rate": 3.3500000000000005e-06, | |
| "loss": 0.0333, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.9357798165137615, | |
| "grad_norm": 0.41696834564208984, | |
| "learning_rate": 3.4000000000000005e-06, | |
| "loss": 0.0347, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.9495412844036697, | |
| "grad_norm": 0.2582981288433075, | |
| "learning_rate": 3.45e-06, | |
| "loss": 0.0283, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.963302752293578, | |
| "grad_norm": 0.40648311376571655, | |
| "learning_rate": 3.5e-06, | |
| "loss": 0.0293, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.9770642201834863, | |
| "grad_norm": 0.4149394631385803, | |
| "learning_rate": 3.5500000000000003e-06, | |
| "loss": 0.0311, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.9908256880733946, | |
| "grad_norm": 0.3800952136516571, | |
| "learning_rate": 3.6000000000000003e-06, | |
| "loss": 0.0346, | |
| "step": 72 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 432, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 6, | |
| "save_steps": 72, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 4.631891949769458e+18, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |