| { | |
| "best_global_step": 244, | |
| "best_metric": 0.241361603140831, | |
| "best_model_checkpoint": "lora_qwen7b_java_abdiff_v2/checkpoint-244", | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 732, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.041109969167523124, | |
| "grad_norm": 0.17571452260017395, | |
| "learning_rate": 3.648648648648649e-06, | |
| "loss": 0.5537, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.08221993833504625, | |
| "grad_norm": 0.12462153285741806, | |
| "learning_rate": 7.702702702702703e-06, | |
| "loss": 0.5427, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.12332990750256938, | |
| "grad_norm": 0.13460873067378998, | |
| "learning_rate": 1.1756756756756757e-05, | |
| "loss": 0.5255, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.1644398766700925, | |
| "grad_norm": 0.2371131032705307, | |
| "learning_rate": 1.5810810810810808e-05, | |
| "loss": 0.4849, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.20554984583761562, | |
| "grad_norm": 0.2611907720565796, | |
| "learning_rate": 1.9864864864864866e-05, | |
| "loss": 0.3981, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.24665981500513876, | |
| "grad_norm": 0.10762301832437515, | |
| "learning_rate": 2.3918918918918917e-05, | |
| "loss": 0.3275, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.28776978417266186, | |
| "grad_norm": 0.06910783052444458, | |
| "learning_rate": 2.7972972972972975e-05, | |
| "loss": 0.2976, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.328879753340185, | |
| "grad_norm": 0.10020674765110016, | |
| "learning_rate": 2.9772036474164135e-05, | |
| "loss": 0.2958, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.3699897225077081, | |
| "grad_norm": 0.06338030099868774, | |
| "learning_rate": 2.9316109422492404e-05, | |
| "loss": 0.2775, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.41109969167523125, | |
| "grad_norm": 0.06356582045555115, | |
| "learning_rate": 2.886018237082067e-05, | |
| "loss": 0.2671, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.4522096608427544, | |
| "grad_norm": 0.06176166608929634, | |
| "learning_rate": 2.8404255319148935e-05, | |
| "loss": 0.273, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.4933196300102775, | |
| "grad_norm": 0.0641980841755867, | |
| "learning_rate": 2.7948328267477204e-05, | |
| "loss": 0.2708, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.5344295991778006, | |
| "grad_norm": 0.060578711330890656, | |
| "learning_rate": 2.7492401215805473e-05, | |
| "loss": 0.2492, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.5755395683453237, | |
| "grad_norm": 0.07500634342432022, | |
| "learning_rate": 2.7036474164133738e-05, | |
| "loss": 0.2516, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.6166495375128469, | |
| "grad_norm": 0.06346568465232849, | |
| "learning_rate": 2.6580547112462007e-05, | |
| "loss": 0.2423, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.65775950668037, | |
| "grad_norm": 0.214496448636055, | |
| "learning_rate": 2.6124620060790272e-05, | |
| "loss": 0.245, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.6988694758478932, | |
| "grad_norm": 0.07425080984830856, | |
| "learning_rate": 2.566869300911854e-05, | |
| "loss": 0.2455, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.7399794450154162, | |
| "grad_norm": 0.08862721920013428, | |
| "learning_rate": 2.521276595744681e-05, | |
| "loss": 0.2377, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.7810894141829393, | |
| "grad_norm": 0.09345080703496933, | |
| "learning_rate": 2.4756838905775076e-05, | |
| "loss": 0.2493, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.8221993833504625, | |
| "grad_norm": 0.12085441499948502, | |
| "learning_rate": 2.4300911854103345e-05, | |
| "loss": 0.243, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.8633093525179856, | |
| "grad_norm": 0.09944415092468262, | |
| "learning_rate": 2.3844984802431613e-05, | |
| "loss": 0.2276, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.9044193216855088, | |
| "grad_norm": 0.11600342392921448, | |
| "learning_rate": 2.338905775075988e-05, | |
| "loss": 0.2288, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.9455292908530318, | |
| "grad_norm": 0.11351948231458664, | |
| "learning_rate": 2.2933130699088144e-05, | |
| "loss": 0.2136, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.986639260020555, | |
| "grad_norm": 0.12985852360725403, | |
| "learning_rate": 2.2477203647416413e-05, | |
| "loss": 0.2166, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 0.241361603140831, | |
| "eval_runtime": 48.3711, | |
| "eval_samples_per_second": 13.479, | |
| "eval_steps_per_second": 3.37, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.024665981500514, | |
| "grad_norm": 0.12141554057598114, | |
| "learning_rate": 2.2021276595744682e-05, | |
| "loss": 0.2116, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.065775950668037, | |
| "grad_norm": 0.1510457694530487, | |
| "learning_rate": 2.156534954407295e-05, | |
| "loss": 0.2004, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.10688591983556, | |
| "grad_norm": 0.18226052820682526, | |
| "learning_rate": 2.1109422492401216e-05, | |
| "loss": 0.2089, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.1479958890030832, | |
| "grad_norm": 0.1632615327835083, | |
| "learning_rate": 2.0653495440729482e-05, | |
| "loss": 0.1878, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.1891058581706064, | |
| "grad_norm": 0.2921697199344635, | |
| "learning_rate": 2.019756838905775e-05, | |
| "loss": 0.1798, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.2302158273381294, | |
| "grad_norm": 0.19414232671260834, | |
| "learning_rate": 1.974164133738602e-05, | |
| "loss": 0.1851, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.2713257965056526, | |
| "grad_norm": 0.2113204449415207, | |
| "learning_rate": 1.928571428571429e-05, | |
| "loss": 0.1884, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.3124357656731758, | |
| "grad_norm": 0.25703924894332886, | |
| "learning_rate": 1.8829787234042554e-05, | |
| "loss": 0.1666, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.353545734840699, | |
| "grad_norm": 0.32676252722740173, | |
| "learning_rate": 1.837386018237082e-05, | |
| "loss": 0.1673, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.394655704008222, | |
| "grad_norm": 0.2629919946193695, | |
| "learning_rate": 1.7917933130699088e-05, | |
| "loss": 0.1656, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.435765673175745, | |
| "grad_norm": 0.2691596448421478, | |
| "learning_rate": 1.7462006079027357e-05, | |
| "loss": 0.1513, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.4768756423432683, | |
| "grad_norm": 0.2598915994167328, | |
| "learning_rate": 1.7006079027355622e-05, | |
| "loss": 0.15, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.5179856115107913, | |
| "grad_norm": 0.2840367257595062, | |
| "learning_rate": 1.655015197568389e-05, | |
| "loss": 0.1393, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.5590955806783144, | |
| "grad_norm": 0.27585548162460327, | |
| "learning_rate": 1.609422492401216e-05, | |
| "loss": 0.1326, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.6002055498458376, | |
| "grad_norm": 0.3558744490146637, | |
| "learning_rate": 1.5638297872340426e-05, | |
| "loss": 0.1386, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.6413155190133608, | |
| "grad_norm": 0.3336169719696045, | |
| "learning_rate": 1.5182370820668691e-05, | |
| "loss": 0.135, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.682425488180884, | |
| "grad_norm": 0.4432605504989624, | |
| "learning_rate": 1.4726443768996962e-05, | |
| "loss": 0.1246, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.723535457348407, | |
| "grad_norm": 0.3988304138183594, | |
| "learning_rate": 1.4270516717325229e-05, | |
| "loss": 0.129, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.7646454265159301, | |
| "grad_norm": 0.3611379563808441, | |
| "learning_rate": 1.3814589665653496e-05, | |
| "loss": 0.1079, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.8057553956834531, | |
| "grad_norm": 0.4067041277885437, | |
| "learning_rate": 1.3358662613981763e-05, | |
| "loss": 0.1275, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.8468653648509763, | |
| "grad_norm": 0.36210691928863525, | |
| "learning_rate": 1.2902735562310032e-05, | |
| "loss": 0.1122, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.8879753340184995, | |
| "grad_norm": 0.3510965406894684, | |
| "learning_rate": 1.2446808510638298e-05, | |
| "loss": 0.0966, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.9290853031860227, | |
| "grad_norm": 0.38730406761169434, | |
| "learning_rate": 1.1990881458966566e-05, | |
| "loss": 0.1089, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.9701952723535459, | |
| "grad_norm": 0.39076584577560425, | |
| "learning_rate": 1.1534954407294832e-05, | |
| "loss": 0.1038, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 0.2861343026161194, | |
| "eval_runtime": 48.2041, | |
| "eval_samples_per_second": 13.526, | |
| "eval_steps_per_second": 3.381, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 2.0082219938335046, | |
| "grad_norm": 0.4124261438846588, | |
| "learning_rate": 1.10790273556231e-05, | |
| "loss": 0.0975, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 2.049331963001028, | |
| "grad_norm": 0.4260999262332916, | |
| "learning_rate": 1.0623100303951368e-05, | |
| "loss": 0.0973, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.090441932168551, | |
| "grad_norm": 0.4411214292049408, | |
| "learning_rate": 1.0167173252279635e-05, | |
| "loss": 0.0803, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.131551901336074, | |
| "grad_norm": 0.5388243794441223, | |
| "learning_rate": 9.711246200607902e-06, | |
| "loss": 0.089, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.172661870503597, | |
| "grad_norm": 0.36110010743141174, | |
| "learning_rate": 9.255319148936171e-06, | |
| "loss": 0.0955, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.21377183967112, | |
| "grad_norm": 0.30860260128974915, | |
| "learning_rate": 8.799392097264438e-06, | |
| "loss": 0.0824, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.2548818088386433, | |
| "grad_norm": 0.42028531432151794, | |
| "learning_rate": 8.343465045592705e-06, | |
| "loss": 0.0936, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.2959917780061665, | |
| "grad_norm": 0.36835429072380066, | |
| "learning_rate": 7.887537993920974e-06, | |
| "loss": 0.074, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.3371017471736897, | |
| "grad_norm": 0.43269503116607666, | |
| "learning_rate": 7.43161094224924e-06, | |
| "loss": 0.0778, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.378211716341213, | |
| "grad_norm": 0.4608248770236969, | |
| "learning_rate": 6.975683890577508e-06, | |
| "loss": 0.0731, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.419321685508736, | |
| "grad_norm": 0.36565056443214417, | |
| "learning_rate": 6.519756838905775e-06, | |
| "loss": 0.0812, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.460431654676259, | |
| "grad_norm": 0.37470170855522156, | |
| "learning_rate": 6.063829787234042e-06, | |
| "loss": 0.071, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.501541623843782, | |
| "grad_norm": 0.47044429183006287, | |
| "learning_rate": 5.607902735562311e-06, | |
| "loss": 0.075, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.542651593011305, | |
| "grad_norm": 0.3516956865787506, | |
| "learning_rate": 5.151975683890578e-06, | |
| "loss": 0.0684, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.5837615621788284, | |
| "grad_norm": 0.4521888196468353, | |
| "learning_rate": 4.696048632218845e-06, | |
| "loss": 0.076, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.6248715313463515, | |
| "grad_norm": 0.45891526341438293, | |
| "learning_rate": 4.240121580547112e-06, | |
| "loss": 0.0701, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.6659815005138747, | |
| "grad_norm": 0.33456680178642273, | |
| "learning_rate": 3.7841945288753804e-06, | |
| "loss": 0.0682, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.707091469681398, | |
| "grad_norm": 0.3150039613246918, | |
| "learning_rate": 3.3282674772036475e-06, | |
| "loss": 0.0785, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.7482014388489207, | |
| "grad_norm": 0.3293338716030121, | |
| "learning_rate": 2.872340425531915e-06, | |
| "loss": 0.0634, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 2.789311408016444, | |
| "grad_norm": 0.37423741817474365, | |
| "learning_rate": 2.4164133738601823e-06, | |
| "loss": 0.0715, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 2.830421377183967, | |
| "grad_norm": 0.32468822598457336, | |
| "learning_rate": 1.96048632218845e-06, | |
| "loss": 0.0755, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 2.87153134635149, | |
| "grad_norm": 0.388679563999176, | |
| "learning_rate": 1.5045592705167174e-06, | |
| "loss": 0.0688, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.9126413155190134, | |
| "grad_norm": 0.36400821805000305, | |
| "learning_rate": 1.0486322188449848e-06, | |
| "loss": 0.0705, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 2.9537512846865366, | |
| "grad_norm": 0.34037601947784424, | |
| "learning_rate": 5.927051671732523e-07, | |
| "loss": 0.0647, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 2.9948612538540598, | |
| "grad_norm": 0.30876415967941284, | |
| "learning_rate": 1.3677811550151974e-07, | |
| "loss": 0.075, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_loss": 0.3031086027622223, | |
| "eval_runtime": 48.1817, | |
| "eval_samples_per_second": 13.532, | |
| "eval_steps_per_second": 3.383, | |
| "step": 732 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 732, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.0098660093853901e+18, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |