| { | |
| "best_metric": 0.019755465909838676, | |
| "best_model_checkpoint": "influence-longformer/checkpoint-1995", | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 1995, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.03759398496240601, | |
| "grad_norm": 1.6022701263427734, | |
| "learning_rate": 4.229323308270677e-06, | |
| "loss": 0.1482, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.07518796992481203, | |
| "grad_norm": 2.4601035118103027, | |
| "learning_rate": 8.458646616541353e-06, | |
| "loss": 0.0555, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.11278195488721804, | |
| "grad_norm": 0.42991551756858826, | |
| "learning_rate": 1.2687969924812032e-05, | |
| "loss": 0.038, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.15037593984962405, | |
| "grad_norm": 1.5615288019180298, | |
| "learning_rate": 1.6917293233082707e-05, | |
| "loss": 0.0332, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.18796992481203006, | |
| "grad_norm": 1.0025038719177246, | |
| "learning_rate": 2.1146616541353385e-05, | |
| "loss": 0.0334, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.22556390977443608, | |
| "grad_norm": 0.8407266736030579, | |
| "learning_rate": 2.5375939849624064e-05, | |
| "loss": 0.0291, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.2631578947368421, | |
| "grad_norm": 2.030592441558838, | |
| "learning_rate": 2.9605263157894742e-05, | |
| "loss": 0.031, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.3007518796992481, | |
| "grad_norm": 0.8694236278533936, | |
| "learning_rate": 3.3834586466165414e-05, | |
| "loss": 0.0312, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.3383458646616541, | |
| "grad_norm": 0.6539453268051147, | |
| "learning_rate": 3.806390977443609e-05, | |
| "loss": 0.0282, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.37593984962406013, | |
| "grad_norm": 1.2007642984390259, | |
| "learning_rate": 4.229323308270677e-05, | |
| "loss": 0.0281, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.41353383458646614, | |
| "grad_norm": 0.33443132042884827, | |
| "learning_rate": 4.4830827067669176e-05, | |
| "loss": 0.0295, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.45112781954887216, | |
| "grad_norm": 1.085522174835205, | |
| "learning_rate": 4.43609022556391e-05, | |
| "loss": 0.027, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.48872180451127817, | |
| "grad_norm": 0.7158887982368469, | |
| "learning_rate": 4.3890977443609024e-05, | |
| "loss": 0.0264, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.5263157894736842, | |
| "grad_norm": 0.23328174650669098, | |
| "learning_rate": 4.342105263157895e-05, | |
| "loss": 0.0259, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.5639097744360902, | |
| "grad_norm": 0.4180866777896881, | |
| "learning_rate": 4.295112781954887e-05, | |
| "loss": 0.0265, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.6015037593984962, | |
| "grad_norm": 0.2783370018005371, | |
| "learning_rate": 4.24812030075188e-05, | |
| "loss": 0.0264, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.6390977443609023, | |
| "grad_norm": 1.226720929145813, | |
| "learning_rate": 4.201127819548873e-05, | |
| "loss": 0.0252, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.6766917293233082, | |
| "grad_norm": 0.7401314973831177, | |
| "learning_rate": 4.154135338345865e-05, | |
| "loss": 0.0275, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.7142857142857143, | |
| "grad_norm": 0.986824631690979, | |
| "learning_rate": 4.107142857142857e-05, | |
| "loss": 0.0279, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.7518796992481203, | |
| "grad_norm": 0.8208933472633362, | |
| "learning_rate": 4.0601503759398494e-05, | |
| "loss": 0.0268, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.7894736842105263, | |
| "grad_norm": 0.4981847405433655, | |
| "learning_rate": 4.0131578947368425e-05, | |
| "loss": 0.025, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.8270676691729323, | |
| "grad_norm": 0.2331264764070511, | |
| "learning_rate": 3.966165413533835e-05, | |
| "loss": 0.0245, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.8646616541353384, | |
| "grad_norm": 0.8994643092155457, | |
| "learning_rate": 3.9191729323308274e-05, | |
| "loss": 0.0258, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.9022556390977443, | |
| "grad_norm": 0.1817372441291809, | |
| "learning_rate": 3.87218045112782e-05, | |
| "loss": 0.0251, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.9398496240601504, | |
| "grad_norm": 0.5260267853736877, | |
| "learning_rate": 3.825187969924812e-05, | |
| "loss": 0.0251, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.9774436090225563, | |
| "grad_norm": 0.4542248547077179, | |
| "learning_rate": 3.780075187969925e-05, | |
| "loss": 0.0239, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_explained_variance": 0.28540539741516113, | |
| "eval_loss": 0.02087027207016945, | |
| "eval_mae": 0.11132633686065674, | |
| "eval_mse": 0.020866233855485916, | |
| "eval_r2": 0.2827957272529602, | |
| "eval_rmse": 0.14445149308846175, | |
| "eval_runtime": 333.1624, | |
| "eval_samples_per_second": 63.858, | |
| "eval_steps_per_second": 1.0, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 1.0150375939849625, | |
| "grad_norm": 0.3731153607368469, | |
| "learning_rate": 3.7330827067669176e-05, | |
| "loss": 0.0238, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 1.0526315789473684, | |
| "grad_norm": 0.5566776990890503, | |
| "learning_rate": 3.68609022556391e-05, | |
| "loss": 0.0237, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.0902255639097744, | |
| "grad_norm": 0.7956374287605286, | |
| "learning_rate": 3.6390977443609025e-05, | |
| "loss": 0.0231, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 1.1278195488721805, | |
| "grad_norm": 0.40257030725479126, | |
| "learning_rate": 3.592105263157895e-05, | |
| "loss": 0.0231, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.1654135338345863, | |
| "grad_norm": 0.20914725959300995, | |
| "learning_rate": 3.5451127819548873e-05, | |
| "loss": 0.0245, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 1.2030075187969924, | |
| "grad_norm": 0.20414456725120544, | |
| "learning_rate": 3.49812030075188e-05, | |
| "loss": 0.0235, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.2406015037593985, | |
| "grad_norm": 0.5779170393943787, | |
| "learning_rate": 3.451127819548872e-05, | |
| "loss": 0.0238, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 1.2781954887218046, | |
| "grad_norm": 0.9034671783447266, | |
| "learning_rate": 3.404135338345865e-05, | |
| "loss": 0.0237, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.3157894736842106, | |
| "grad_norm": 0.2771829068660736, | |
| "learning_rate": 3.357142857142858e-05, | |
| "loss": 0.0225, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 1.3533834586466165, | |
| "grad_norm": 0.5990172028541565, | |
| "learning_rate": 3.3101503759398495e-05, | |
| "loss": 0.024, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.3909774436090225, | |
| "grad_norm": 0.6648531556129456, | |
| "learning_rate": 3.263157894736842e-05, | |
| "loss": 0.0229, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 1.4285714285714286, | |
| "grad_norm": 0.2648594379425049, | |
| "learning_rate": 3.216165413533835e-05, | |
| "loss": 0.0224, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.4661654135338344, | |
| "grad_norm": 0.26627054810523987, | |
| "learning_rate": 3.1691729323308274e-05, | |
| "loss": 0.0242, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 1.5037593984962405, | |
| "grad_norm": 0.5062589645385742, | |
| "learning_rate": 3.12218045112782e-05, | |
| "loss": 0.0223, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.5413533834586466, | |
| "grad_norm": 0.21417106688022614, | |
| "learning_rate": 3.075187969924812e-05, | |
| "loss": 0.0218, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 1.5789473684210527, | |
| "grad_norm": 0.9766432046890259, | |
| "learning_rate": 3.028195488721805e-05, | |
| "loss": 0.023, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.6165413533834587, | |
| "grad_norm": 0.17392820119857788, | |
| "learning_rate": 2.981203007518797e-05, | |
| "loss": 0.023, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 1.6541353383458648, | |
| "grad_norm": 0.23946884274482727, | |
| "learning_rate": 2.9342105263157895e-05, | |
| "loss": 0.0218, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.6917293233082706, | |
| "grad_norm": 0.3760274648666382, | |
| "learning_rate": 2.887218045112782e-05, | |
| "loss": 0.0226, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 1.7293233082706767, | |
| "grad_norm": 0.3618028461933136, | |
| "learning_rate": 2.8402255639097747e-05, | |
| "loss": 0.0215, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.7669172932330826, | |
| "grad_norm": 0.24896082282066345, | |
| "learning_rate": 2.793233082706767e-05, | |
| "loss": 0.0218, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 1.8045112781954886, | |
| "grad_norm": 0.9801834225654602, | |
| "learning_rate": 2.7462406015037596e-05, | |
| "loss": 0.0222, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.8421052631578947, | |
| "grad_norm": 0.2100023478269577, | |
| "learning_rate": 2.699248120300752e-05, | |
| "loss": 0.024, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 1.8796992481203008, | |
| "grad_norm": 0.4694213569164276, | |
| "learning_rate": 2.6522556390977448e-05, | |
| "loss": 0.0221, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.9172932330827068, | |
| "grad_norm": 0.5287728309631348, | |
| "learning_rate": 2.6052631578947372e-05, | |
| "loss": 0.0223, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 1.954887218045113, | |
| "grad_norm": 1.2186554670333862, | |
| "learning_rate": 2.5582706766917296e-05, | |
| "loss": 0.0221, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.9924812030075187, | |
| "grad_norm": 0.18354646861553192, | |
| "learning_rate": 2.5112781954887217e-05, | |
| "loss": 0.0222, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_explained_variance": 0.3022691607475281, | |
| "eval_loss": 0.02030428871512413, | |
| "eval_mae": 0.11059005558490753, | |
| "eval_mse": 0.020300239324569702, | |
| "eval_r2": 0.3022497892379761, | |
| "eval_rmse": 0.14247890834986665, | |
| "eval_runtime": 332.2379, | |
| "eval_samples_per_second": 64.035, | |
| "eval_steps_per_second": 1.002, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 2.030075187969925, | |
| "grad_norm": 0.13428334891796112, | |
| "learning_rate": 2.4642857142857148e-05, | |
| "loss": 0.0212, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 2.0676691729323307, | |
| "grad_norm": 0.31226059794425964, | |
| "learning_rate": 2.417293233082707e-05, | |
| "loss": 0.0211, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 2.1052631578947367, | |
| "grad_norm": 0.3287919759750366, | |
| "learning_rate": 2.3703007518796993e-05, | |
| "loss": 0.022, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 2.142857142857143, | |
| "grad_norm": 0.43540850281715393, | |
| "learning_rate": 2.3233082706766917e-05, | |
| "loss": 0.0213, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 2.180451127819549, | |
| "grad_norm": 0.25581884384155273, | |
| "learning_rate": 2.2763157894736845e-05, | |
| "loss": 0.0205, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 2.218045112781955, | |
| "grad_norm": 0.45723602175712585, | |
| "learning_rate": 2.2293233082706766e-05, | |
| "loss": 0.021, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 2.255639097744361, | |
| "grad_norm": 0.5476940274238586, | |
| "learning_rate": 2.1823308270676693e-05, | |
| "loss": 0.0212, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 2.293233082706767, | |
| "grad_norm": 0.1626814901828766, | |
| "learning_rate": 2.1353383458646618e-05, | |
| "loss": 0.021, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 2.3308270676691727, | |
| "grad_norm": 0.29517662525177, | |
| "learning_rate": 2.0883458646616542e-05, | |
| "loss": 0.0204, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 2.3684210526315788, | |
| "grad_norm": 0.6911824345588684, | |
| "learning_rate": 2.0413533834586466e-05, | |
| "loss": 0.0203, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 2.406015037593985, | |
| "grad_norm": 0.42551133036613464, | |
| "learning_rate": 1.9943609022556394e-05, | |
| "loss": 0.02, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 2.443609022556391, | |
| "grad_norm": 0.44191497564315796, | |
| "learning_rate": 1.9473684210526315e-05, | |
| "loss": 0.0206, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 2.481203007518797, | |
| "grad_norm": 0.853766143321991, | |
| "learning_rate": 1.9003759398496242e-05, | |
| "loss": 0.0209, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 2.518796992481203, | |
| "grad_norm": 1.0731028318405151, | |
| "learning_rate": 1.8533834586466166e-05, | |
| "loss": 0.0212, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 2.556390977443609, | |
| "grad_norm": 0.3144964575767517, | |
| "learning_rate": 1.806390977443609e-05, | |
| "loss": 0.021, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 2.593984962406015, | |
| "grad_norm": 0.3022163212299347, | |
| "learning_rate": 1.7593984962406015e-05, | |
| "loss": 0.0216, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 2.6315789473684212, | |
| "grad_norm": 0.45970025658607483, | |
| "learning_rate": 1.7124060150375943e-05, | |
| "loss": 0.0204, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 2.6691729323308273, | |
| "grad_norm": 0.2722722291946411, | |
| "learning_rate": 1.6654135338345863e-05, | |
| "loss": 0.021, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 2.706766917293233, | |
| "grad_norm": 0.24549676477909088, | |
| "learning_rate": 1.618421052631579e-05, | |
| "loss": 0.0207, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 2.744360902255639, | |
| "grad_norm": 0.28373944759368896, | |
| "learning_rate": 1.5714285714285715e-05, | |
| "loss": 0.0204, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 2.781954887218045, | |
| "grad_norm": 0.47003814578056335, | |
| "learning_rate": 1.5244360902255641e-05, | |
| "loss": 0.0207, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 2.819548872180451, | |
| "grad_norm": 0.967040479183197, | |
| "learning_rate": 1.4774436090225564e-05, | |
| "loss": 0.0215, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 2.857142857142857, | |
| "grad_norm": 0.46782004833221436, | |
| "learning_rate": 1.430451127819549e-05, | |
| "loss": 0.0217, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 2.8947368421052633, | |
| "grad_norm": 0.5362545251846313, | |
| "learning_rate": 1.3834586466165414e-05, | |
| "loss": 0.0211, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 2.932330827067669, | |
| "grad_norm": 0.25926506519317627, | |
| "learning_rate": 1.336466165413534e-05, | |
| "loss": 0.0204, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 2.969924812030075, | |
| "grad_norm": 0.44383227825164795, | |
| "learning_rate": 1.2894736842105262e-05, | |
| "loss": 0.0204, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_explained_variance": 0.32502323389053345, | |
| "eval_loss": 0.019755465909838676, | |
| "eval_mae": 0.11072802543640137, | |
| "eval_mse": 0.019751867279410362, | |
| "eval_r2": 0.3210982084274292, | |
| "eval_rmse": 0.14054133655053364, | |
| "eval_runtime": 333.2303, | |
| "eval_samples_per_second": 63.845, | |
| "eval_steps_per_second": 0.999, | |
| "step": 1995 | |
| } | |
| ], | |
| "logging_steps": 25, | |
| "max_steps": 2660, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 4, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "EarlyStoppingCallback": { | |
| "args": { | |
| "early_stopping_patience": 5, | |
| "early_stopping_threshold": 0.01 | |
| }, | |
| "attributes": { | |
| "early_stopping_patience_counter": 2 | |
| } | |
| }, | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 6.709296778916659e+17, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |