| { | |
| "best_metric": 2.0116560459136963, | |
| "best_model_checkpoint": "./checkpoints/ultrafeedback_binarized/phi-2-ultrafeedback_binarized-lambda0.15-ORPO-1-14-26/checkpoint-4491", | |
| "epoch": 1.5, | |
| "eval_steps": 500, | |
| "global_step": 4491, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.016700066800267203, | |
| "grad_norm": 120.0, | |
| "learning_rate": 1e-07, | |
| "loss": 3.6046, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.033400133600534405, | |
| "grad_norm": 42.5, | |
| "learning_rate": 2e-07, | |
| "loss": 3.5223, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.050100200400801605, | |
| "grad_norm": 34.0, | |
| "learning_rate": 3e-07, | |
| "loss": 3.4339, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.06680026720106881, | |
| "grad_norm": 38.0, | |
| "learning_rate": 4e-07, | |
| "loss": 3.4468, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.08350033400133601, | |
| "grad_norm": 39.25, | |
| "learning_rate": 5e-07, | |
| "loss": 3.3893, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.10020040080160321, | |
| "grad_norm": 49.75, | |
| "learning_rate": 6e-07, | |
| "loss": 3.3252, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.11690046760187041, | |
| "grad_norm": 111.5, | |
| "learning_rate": 7e-07, | |
| "loss": 3.2598, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.13360053440213762, | |
| "grad_norm": 31.0, | |
| "learning_rate": 8e-07, | |
| "loss": 3.2579, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.15030060120240482, | |
| "grad_norm": 111.0, | |
| "learning_rate": 9e-07, | |
| "loss": 3.2124, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.16700066800267202, | |
| "grad_norm": 356.0, | |
| "learning_rate": 1e-06, | |
| "loss": 3.1667, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.18370073480293922, | |
| "grad_norm": 22.875, | |
| "learning_rate": 9.996127778044774e-07, | |
| "loss": 3.1074, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.20040080160320642, | |
| "grad_norm": 154.0, | |
| "learning_rate": 9.984517109820244e-07, | |
| "loss": 3.0856, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.21710086840347362, | |
| "grad_norm": 197.0, | |
| "learning_rate": 9.965185978960174e-07, | |
| "loss": 3.0459, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.23380093520374082, | |
| "grad_norm": 83.0, | |
| "learning_rate": 9.9381643272363e-07, | |
| "loss": 2.9253, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.250501002004008, | |
| "grad_norm": 125.0, | |
| "learning_rate": 9.90349400818185e-07, | |
| "loss": 2.9436, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.26720106880427524, | |
| "grad_norm": 64.5, | |
| "learning_rate": 9.86122872226508e-07, | |
| "loss": 2.9872, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.28390113560454244, | |
| "grad_norm": 37.5, | |
| "learning_rate": 9.811433933713217e-07, | |
| "loss": 2.9256, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.30060120240480964, | |
| "grad_norm": 51.0, | |
| "learning_rate": 9.754186769115654e-07, | |
| "loss": 2.8895, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.31730126920507684, | |
| "grad_norm": 46.0, | |
| "learning_rate": 9.689575897963448e-07, | |
| "loss": 2.8348, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.33400133600534404, | |
| "grad_norm": 115.0, | |
| "learning_rate": 9.617701395310122e-07, | |
| "loss": 2.83, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.35070140280561124, | |
| "grad_norm": 29.625, | |
| "learning_rate": 9.538674586766557e-07, | |
| "loss": 2.7793, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.36740146960587844, | |
| "grad_norm": 286.0, | |
| "learning_rate": 9.45261787606999e-07, | |
| "loss": 2.7292, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.38410153640614564, | |
| "grad_norm": 64.5, | |
| "learning_rate": 9.359664555494242e-07, | |
| "loss": 2.7686, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.40080160320641284, | |
| "grad_norm": 117.5, | |
| "learning_rate": 9.259958599394812e-07, | |
| "loss": 2.7036, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.41750167000668004, | |
| "grad_norm": 27.25, | |
| "learning_rate": 9.153654441208611e-07, | |
| "loss": 2.6712, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.43420173680694724, | |
| "grad_norm": 99.5, | |
| "learning_rate": 9.040916734253739e-07, | |
| "loss": 2.6642, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.45090180360721444, | |
| "grad_norm": 255.0, | |
| "learning_rate": 8.921920096699821e-07, | |
| "loss": 2.6795, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.46760187040748163, | |
| "grad_norm": 143.0, | |
| "learning_rate": 8.796848841103867e-07, | |
| "loss": 2.603, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.48430193720774883, | |
| "grad_norm": 286.0, | |
| "learning_rate": 8.665896688930638e-07, | |
| "loss": 2.6033, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.501002004008016, | |
| "grad_norm": 25.25, | |
| "learning_rate": 8.529266470499618e-07, | |
| "loss": 2.5379, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.5177020708082832, | |
| "grad_norm": 564.0, | |
| "learning_rate": 8.387169810823436e-07, | |
| "loss": 2.5032, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.5344021376085505, | |
| "grad_norm": 45.25, | |
| "learning_rate": 8.239826801824232e-07, | |
| "loss": 2.4736, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.5511022044088176, | |
| "grad_norm": 53.5, | |
| "learning_rate": 8.087465661435767e-07, | |
| "loss": 2.5829, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.5678022712090849, | |
| "grad_norm": 66.0, | |
| "learning_rate": 7.930322380119213e-07, | |
| "loss": 2.4656, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.584502338009352, | |
| "grad_norm": 22.125, | |
| "learning_rate": 7.768640355340184e-07, | |
| "loss": 2.4649, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.6012024048096193, | |
| "grad_norm": 84.0, | |
| "learning_rate": 7.602670014573127e-07, | |
| "loss": 2.3869, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.6179024716098864, | |
| "grad_norm": 26.5, | |
| "learning_rate": 7.432668427417013e-07, | |
| "loss": 2.4197, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.6346025384101537, | |
| "grad_norm": 68.0, | |
| "learning_rate": 7.258898907423128e-07, | |
| "loss": 2.38, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.6513026052104208, | |
| "grad_norm": 75.0, | |
| "learning_rate": 7.081630604251657e-07, | |
| "loss": 2.4333, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.6680026720106881, | |
| "grad_norm": 29.5, | |
| "learning_rate": 6.901138086788807e-07, | |
| "loss": 2.3658, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.6847027388109552, | |
| "grad_norm": 27.375, | |
| "learning_rate": 6.717700917870123e-07, | |
| "loss": 2.3418, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.7014028056112225, | |
| "grad_norm": 20.125, | |
| "learning_rate": 6.531603221268764e-07, | |
| "loss": 2.3305, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.7181028724114896, | |
| "grad_norm": 82.0, | |
| "learning_rate": 6.343133241619368e-07, | |
| "loss": 2.2667, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.7348029392117569, | |
| "grad_norm": 63.25, | |
| "learning_rate": 6.152582897959174e-07, | |
| "loss": 2.2144, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.751503006012024, | |
| "grad_norm": 91.0, | |
| "learning_rate": 5.960247331577903e-07, | |
| "loss": 2.3461, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.7682030728122913, | |
| "grad_norm": 40.5, | |
| "learning_rate": 5.766424448876717e-07, | |
| "loss": 2.3372, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.7849031396125584, | |
| "grad_norm": 112.5, | |
| "learning_rate": 5.571414459944348e-07, | |
| "loss": 2.2002, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.8016032064128257, | |
| "grad_norm": 30.375, | |
| "learning_rate": 5.375519413565045e-07, | |
| "loss": 2.3136, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.8183032732130928, | |
| "grad_norm": 98.0, | |
| "learning_rate": 5.179042729378615e-07, | |
| "loss": 2.2905, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.8350033400133601, | |
| "grad_norm": 34.0, | |
| "learning_rate": 4.982288727917136e-07, | |
| "loss": 2.2557, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.8517034068136272, | |
| "grad_norm": 35.25, | |
| "learning_rate": 4.785562159246301e-07, | |
| "loss": 2.2342, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.8684034736138945, | |
| "grad_norm": 80.0, | |
| "learning_rate": 4.5891677309414674e-07, | |
| "loss": 2.2967, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.8851035404141616, | |
| "grad_norm": 162.0, | |
| "learning_rate": 4.3934096361294974e-07, | |
| "loss": 2.2667, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.9018036072144289, | |
| "grad_norm": 208.0, | |
| "learning_rate": 4.198591082327453e-07, | |
| "loss": 2.2909, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.918503674014696, | |
| "grad_norm": 149.0, | |
| "learning_rate": 4.0050138218078553e-07, | |
| "loss": 2.2519, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.9352037408149633, | |
| "grad_norm": 53.25, | |
| "learning_rate": 3.812977684217997e-07, | |
| "loss": 2.1819, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.9519038076152304, | |
| "grad_norm": 21.875, | |
| "learning_rate": 3.622780112177145e-07, | |
| "loss": 2.1872, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.9686038744154977, | |
| "grad_norm": 79.5, | |
| "learning_rate": 3.4347157005710127e-07, | |
| "loss": 2.254, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.9853039412157648, | |
| "grad_norm": 73.5, | |
| "learning_rate": 3.249075740257048e-07, | |
| "loss": 2.1445, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 2.1521201133728027, | |
| "eval_runtime": 175.2067, | |
| "eval_samples_per_second": 9.201, | |
| "eval_steps_per_second": 1.153, | |
| "step": 2994 | |
| }, | |
| { | |
| "epoch": 1.002004008016032, | |
| "grad_norm": 115.5, | |
| "learning_rate": 3.066147766887289e-07, | |
| "loss": 2.1825, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.0187040748162992, | |
| "grad_norm": 52.5, | |
| "learning_rate": 2.886215115547621e-07, | |
| "loss": 2.1662, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 1.0354041416165665, | |
| "grad_norm": 34.75, | |
| "learning_rate": 2.7095564819032335e-07, | |
| "loss": 2.2043, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 1.0521042084168337, | |
| "grad_norm": 87.5, | |
| "learning_rate": 2.5364454905300353e-07, | |
| "loss": 2.1481, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 1.0688042752171008, | |
| "grad_norm": 76.5, | |
| "learning_rate": 2.3671502711006264e-07, | |
| "loss": 2.0757, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.085504342017368, | |
| "grad_norm": 176.0, | |
| "learning_rate": 2.201933043081239e-07, | |
| "loss": 2.1454, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.1022044088176353, | |
| "grad_norm": 32.75, | |
| "learning_rate": 2.0410497095829616e-07, | |
| "loss": 2.1603, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 1.1189044756179025, | |
| "grad_norm": 576.0, | |
| "learning_rate": 1.8847494609962726e-07, | |
| "loss": 2.1754, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 1.1356045424181698, | |
| "grad_norm": 154.0, | |
| "learning_rate": 1.7332743890228485e-07, | |
| "loss": 2.2309, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 1.1523046092184368, | |
| "grad_norm": 23.5, | |
| "learning_rate": 1.586859111702436e-07, | |
| "loss": 2.0781, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 1.169004676018704, | |
| "grad_norm": 43.0, | |
| "learning_rate": 1.445730410015602e-07, | |
| "loss": 2.0762, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.1857047428189713, | |
| "grad_norm": 28.375, | |
| "learning_rate": 1.310106876625218e-07, | |
| "loss": 2.0587, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 1.2024048096192386, | |
| "grad_norm": 70.5, | |
| "learning_rate": 1.180198577300745e-07, | |
| "loss": 2.0736, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 1.2191048764195056, | |
| "grad_norm": 104.5, | |
| "learning_rate": 1.0562067255497037e-07, | |
| "loss": 2.0893, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 1.2358049432197729, | |
| "grad_norm": 181.0, | |
| "learning_rate": 9.383233709603422e-08, | |
| "loss": 2.1176, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 1.25250501002004, | |
| "grad_norm": 114.0, | |
| "learning_rate": 8.267311017381778e-08, | |
| "loss": 2.1095, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 1.2692050768203074, | |
| "grad_norm": 217.0, | |
| "learning_rate": 7.216027618971804e-08, | |
| "loss": 2.0956, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 1.2859051436205746, | |
| "grad_norm": 404.0, | |
| "learning_rate": 6.231011835436073e-08, | |
| "loss": 2.0772, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 1.3026052104208417, | |
| "grad_norm": 144.0, | |
| "learning_rate": 5.3137893466718706e-08, | |
| "loss": 2.1183, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 1.319305277221109, | |
| "grad_norm": 29.625, | |
| "learning_rate": 4.465780828302756e-08, | |
| "loss": 2.0784, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 1.3360053440213762, | |
| "grad_norm": 278.0, | |
| "learning_rate": 3.6882997512099334e-08, | |
| "loss": 2.0097, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.3527054108216432, | |
| "grad_norm": 39.0, | |
| "learning_rate": 2.982550347111995e-08, | |
| "loss": 2.0364, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 1.3694054776219104, | |
| "grad_norm": 124.0, | |
| "learning_rate": 2.3496257433439193e-08, | |
| "loss": 2.007, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 1.3861055444221777, | |
| "grad_norm": 44.5, | |
| "learning_rate": 1.7905062697243956e-08, | |
| "loss": 2.0192, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 1.402805611222445, | |
| "grad_norm": 29.25, | |
| "learning_rate": 1.3060579401339644e-08, | |
| "loss": 2.0723, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 1.4195056780227122, | |
| "grad_norm": 103.0, | |
| "learning_rate": 8.970311111558215e-09, | |
| "loss": 2.0223, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 1.4362057448229792, | |
| "grad_norm": 246.0, | |
| "learning_rate": 5.64059319856941e-09, | |
| "loss": 2.0725, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 1.4529058116232465, | |
| "grad_norm": 43.75, | |
| "learning_rate": 3.0765830250963354e-09, | |
| "loss": 2.0345, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 1.4696058784235138, | |
| "grad_norm": 136.0, | |
| "learning_rate": 1.2822519577337932e-09, | |
| "loss": 2.0416, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 1.4863059452237808, | |
| "grad_norm": 126.5, | |
| "learning_rate": 2.6037921574328935e-10, | |
| "loss": 1.9981, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "eval_loss": 2.0116560459136963, | |
| "eval_runtime": 175.2242, | |
| "eval_samples_per_second": 9.2, | |
| "eval_steps_per_second": 1.153, | |
| "step": 4491 | |
| } | |
| ], | |
| "logging_steps": 50, | |
| "max_steps": 4491, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.1692852540499558e+18, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |