| { | |
| "best_global_step": 229, | |
| "best_metric": 0.22058944404125214, | |
| "best_model_checkpoint": "./lora_qwen32b_python_abdiff_v1/checkpoint-229", | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 687, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.043835616438356165, | |
| "grad_norm": 0.40441757440567017, | |
| "learning_rate": 3.913043478260869e-06, | |
| "loss": 1.0102, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.08767123287671233, | |
| "grad_norm": 0.533530056476593, | |
| "learning_rate": 8.260869565217392e-06, | |
| "loss": 0.9822, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.13150684931506848, | |
| "grad_norm": 0.4025503993034363, | |
| "learning_rate": 1.2608695652173912e-05, | |
| "loss": 0.8889, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.17534246575342466, | |
| "grad_norm": 0.4503121078014374, | |
| "learning_rate": 1.6956521739130433e-05, | |
| "loss": 0.7945, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.2191780821917808, | |
| "grad_norm": 0.49986398220062256, | |
| "learning_rate": 2.1304347826086958e-05, | |
| "loss": 0.5944, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.26301369863013696, | |
| "grad_norm": 0.2905503809452057, | |
| "learning_rate": 2.565217391304348e-05, | |
| "loss": 0.3898, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.30684931506849317, | |
| "grad_norm": 0.2511512339115143, | |
| "learning_rate": 3e-05, | |
| "loss": 0.3061, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.3506849315068493, | |
| "grad_norm": 0.1779242306947708, | |
| "learning_rate": 2.9514563106796115e-05, | |
| "loss": 0.2615, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.39452054794520547, | |
| "grad_norm": 0.1652904450893402, | |
| "learning_rate": 2.9029126213592237e-05, | |
| "loss": 0.2475, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.4383561643835616, | |
| "grad_norm": 0.20759467780590057, | |
| "learning_rate": 2.854368932038835e-05, | |
| "loss": 0.2251, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.4821917808219178, | |
| "grad_norm": 0.1608869731426239, | |
| "learning_rate": 2.8058252427184466e-05, | |
| "loss": 0.2169, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.5260273972602739, | |
| "grad_norm": 0.26152652502059937, | |
| "learning_rate": 2.757281553398058e-05, | |
| "loss": 0.2109, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.5698630136986301, | |
| "grad_norm": 0.18110939860343933, | |
| "learning_rate": 2.7087378640776702e-05, | |
| "loss": 0.2007, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.6136986301369863, | |
| "grad_norm": 0.16133858263492584, | |
| "learning_rate": 2.6601941747572816e-05, | |
| "loss": 0.1955, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.6575342465753424, | |
| "grad_norm": 0.19972704350948334, | |
| "learning_rate": 2.611650485436893e-05, | |
| "loss": 0.187, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.7013698630136986, | |
| "grad_norm": 0.16258665919303894, | |
| "learning_rate": 2.563106796116505e-05, | |
| "loss": 0.1772, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.7452054794520548, | |
| "grad_norm": 0.16635586321353912, | |
| "learning_rate": 2.5145631067961167e-05, | |
| "loss": 0.1758, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.7890410958904109, | |
| "grad_norm": 0.19430042803287506, | |
| "learning_rate": 2.466019417475728e-05, | |
| "loss": 0.1567, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.8328767123287671, | |
| "grad_norm": 0.19939476251602173, | |
| "learning_rate": 2.41747572815534e-05, | |
| "loss": 0.1588, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.8767123287671232, | |
| "grad_norm": 0.19492687284946442, | |
| "learning_rate": 2.3689320388349514e-05, | |
| "loss": 0.1609, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.9205479452054794, | |
| "grad_norm": 0.21758480370044708, | |
| "learning_rate": 2.3203883495145632e-05, | |
| "loss": 0.1458, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.9643835616438357, | |
| "grad_norm": 0.19706270098686218, | |
| "learning_rate": 2.2718446601941746e-05, | |
| "loss": 0.1365, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 0.22058944404125214, | |
| "eval_runtime": 238.0297, | |
| "eval_samples_per_second": 5.184, | |
| "eval_steps_per_second": 2.592, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.0043835616438357, | |
| "grad_norm": 0.26862189173698425, | |
| "learning_rate": 2.2233009708737864e-05, | |
| "loss": 0.1412, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.0482191780821917, | |
| "grad_norm": 0.20033638179302216, | |
| "learning_rate": 2.1747572815533982e-05, | |
| "loss": 0.1294, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.0920547945205479, | |
| "grad_norm": 0.23465846478939056, | |
| "learning_rate": 2.1262135922330097e-05, | |
| "loss": 0.1126, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.135890410958904, | |
| "grad_norm": 0.2290247231721878, | |
| "learning_rate": 2.0776699029126215e-05, | |
| "loss": 0.1218, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.1797260273972603, | |
| "grad_norm": 0.20325443148612976, | |
| "learning_rate": 2.029126213592233e-05, | |
| "loss": 0.1118, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.2235616438356165, | |
| "grad_norm": 1.6609997749328613, | |
| "learning_rate": 1.9805825242718447e-05, | |
| "loss": 0.1145, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.2673972602739725, | |
| "grad_norm": 0.2203844040632248, | |
| "learning_rate": 1.9320388349514565e-05, | |
| "loss": 0.1061, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.3112328767123287, | |
| "grad_norm": 0.2386603206396103, | |
| "learning_rate": 1.883495145631068e-05, | |
| "loss": 0.0905, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.355068493150685, | |
| "grad_norm": 0.20047076046466827, | |
| "learning_rate": 1.8349514563106795e-05, | |
| "loss": 0.0999, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.398904109589041, | |
| "grad_norm": 0.3278600871562958, | |
| "learning_rate": 1.7864077669902913e-05, | |
| "loss": 0.0905, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.4427397260273973, | |
| "grad_norm": 0.33833301067352295, | |
| "learning_rate": 1.737864077669903e-05, | |
| "loss": 0.0984, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.4865753424657533, | |
| "grad_norm": 0.3847464621067047, | |
| "learning_rate": 1.6893203883495145e-05, | |
| "loss": 0.0879, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.5304109589041097, | |
| "grad_norm": 0.3624022305011749, | |
| "learning_rate": 1.6407766990291263e-05, | |
| "loss": 0.0861, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.5742465753424657, | |
| "grad_norm": 0.24037344753742218, | |
| "learning_rate": 1.592233009708738e-05, | |
| "loss": 0.0867, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.618082191780822, | |
| "grad_norm": 0.33310437202453613, | |
| "learning_rate": 1.5436893203883496e-05, | |
| "loss": 0.0768, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.6619178082191781, | |
| "grad_norm": 0.25728777050971985, | |
| "learning_rate": 1.4951456310679612e-05, | |
| "loss": 0.0793, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.705753424657534, | |
| "grad_norm": 0.2507905662059784, | |
| "learning_rate": 1.4466019417475728e-05, | |
| "loss": 0.0764, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.7495890410958905, | |
| "grad_norm": 0.32498112320899963, | |
| "learning_rate": 1.3980582524271846e-05, | |
| "loss": 0.0756, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.7934246575342465, | |
| "grad_norm": 0.2852451801300049, | |
| "learning_rate": 1.349514563106796e-05, | |
| "loss": 0.0659, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.8372602739726027, | |
| "grad_norm": 0.3044949769973755, | |
| "learning_rate": 1.3009708737864079e-05, | |
| "loss": 0.0756, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.881095890410959, | |
| "grad_norm": 0.27822017669677734, | |
| "learning_rate": 1.2524271844660193e-05, | |
| "loss": 0.0729, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.924931506849315, | |
| "grad_norm": 0.2542266547679901, | |
| "learning_rate": 1.2038834951456311e-05, | |
| "loss": 0.066, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.9687671232876713, | |
| "grad_norm": 0.28479745984077454, | |
| "learning_rate": 1.1553398058252427e-05, | |
| "loss": 0.0721, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 0.22873879969120026, | |
| "eval_runtime": 237.2802, | |
| "eval_samples_per_second": 5.201, | |
| "eval_steps_per_second": 2.6, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 2.0087671232876714, | |
| "grad_norm": 0.2700972557067871, | |
| "learning_rate": 1.1067961165048544e-05, | |
| "loss": 0.0541, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 2.0526027397260274, | |
| "grad_norm": 0.26581254601478577, | |
| "learning_rate": 1.058252427184466e-05, | |
| "loss": 0.0602, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 2.0964383561643833, | |
| "grad_norm": 0.2851812243461609, | |
| "learning_rate": 1.0097087378640776e-05, | |
| "loss": 0.0475, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 2.1402739726027398, | |
| "grad_norm": 0.24946200847625732, | |
| "learning_rate": 9.611650485436894e-06, | |
| "loss": 0.0519, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 2.1841095890410958, | |
| "grad_norm": 0.28942641615867615, | |
| "learning_rate": 9.12621359223301e-06, | |
| "loss": 0.0545, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.227945205479452, | |
| "grad_norm": 0.30540767312049866, | |
| "learning_rate": 8.640776699029127e-06, | |
| "loss": 0.0523, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.271780821917808, | |
| "grad_norm": 0.33800721168518066, | |
| "learning_rate": 8.155339805825243e-06, | |
| "loss": 0.0593, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.315616438356164, | |
| "grad_norm": 0.2955901324748993, | |
| "learning_rate": 7.66990291262136e-06, | |
| "loss": 0.052, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.3594520547945206, | |
| "grad_norm": 0.3215559720993042, | |
| "learning_rate": 7.1844660194174755e-06, | |
| "loss": 0.0476, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.4032876712328766, | |
| "grad_norm": 0.2469370812177658, | |
| "learning_rate": 6.699029126213593e-06, | |
| "loss": 0.0478, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.447123287671233, | |
| "grad_norm": 0.3024570643901825, | |
| "learning_rate": 6.213592233009709e-06, | |
| "loss": 0.0449, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.490958904109589, | |
| "grad_norm": 0.37865644693374634, | |
| "learning_rate": 5.728155339805826e-06, | |
| "loss": 0.0458, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.534794520547945, | |
| "grad_norm": 0.23588547110557556, | |
| "learning_rate": 5.242718446601942e-06, | |
| "loss": 0.052, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.5786301369863014, | |
| "grad_norm": 0.3039775788784027, | |
| "learning_rate": 4.7572815533980585e-06, | |
| "loss": 0.0582, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.6224657534246574, | |
| "grad_norm": 0.29674655199050903, | |
| "learning_rate": 4.271844660194175e-06, | |
| "loss": 0.0512, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.666301369863014, | |
| "grad_norm": 0.3092315196990967, | |
| "learning_rate": 3.7864077669902915e-06, | |
| "loss": 0.0469, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.71013698630137, | |
| "grad_norm": 0.23795856535434723, | |
| "learning_rate": 3.3009708737864078e-06, | |
| "loss": 0.0489, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.7539726027397258, | |
| "grad_norm": 0.31328457593917847, | |
| "learning_rate": 2.815533980582524e-06, | |
| "loss": 0.0501, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.797808219178082, | |
| "grad_norm": 0.27806833386421204, | |
| "learning_rate": 2.3300970873786407e-06, | |
| "loss": 0.0421, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.8416438356164386, | |
| "grad_norm": 0.29145801067352295, | |
| "learning_rate": 1.8446601941747572e-06, | |
| "loss": 0.0394, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.8854794520547946, | |
| "grad_norm": 0.20057809352874756, | |
| "learning_rate": 1.359223300970874e-06, | |
| "loss": 0.0526, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.9293150684931506, | |
| "grad_norm": 0.3271646499633789, | |
| "learning_rate": 8.737864077669904e-07, | |
| "loss": 0.0503, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 2.9731506849315066, | |
| "grad_norm": 0.343369722366333, | |
| "learning_rate": 3.883495145631068e-07, | |
| "loss": 0.0482, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_loss": 0.2385365217924118, | |
| "eval_runtime": 237.4718, | |
| "eval_samples_per_second": 5.196, | |
| "eval_steps_per_second": 2.598, | |
| "step": 687 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 687, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 3.339343907394097e+18, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |