| { | |
| "best_metric": 1.627042293548584, | |
| "best_model_checkpoint": "./checkpoints/ultrafeedback_binarized/phi-2-ultrafeedback_binarized-lambda0.22-ORPO-2-5-54/checkpoint-2994", | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 2994, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.016700066800267203, | |
| "grad_norm": 170.0, | |
| "learning_rate": 4e-07, | |
| "loss": 2.9498, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.033400133600534405, | |
| "grad_norm": 44.25, | |
| "learning_rate": 8e-07, | |
| "loss": 2.9012, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.050100200400801605, | |
| "grad_norm": 113.5, | |
| "learning_rate": 1.2e-06, | |
| "loss": 2.8343, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.06680026720106881, | |
| "grad_norm": 25.0, | |
| "learning_rate": 1.6e-06, | |
| "loss": 2.9081, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.08350033400133601, | |
| "grad_norm": 31.125, | |
| "learning_rate": 2e-06, | |
| "loss": 2.8496, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.10020040080160321, | |
| "grad_norm": 47.5, | |
| "learning_rate": 2.4e-06, | |
| "loss": 2.8085, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.11690046760187041, | |
| "grad_norm": 87.5, | |
| "learning_rate": 2.8e-06, | |
| "loss": 2.7414, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.13360053440213762, | |
| "grad_norm": 33.5, | |
| "learning_rate": 3.2e-06, | |
| "loss": 2.7483, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.15030060120240482, | |
| "grad_norm": 116.5, | |
| "learning_rate": 3.6e-06, | |
| "loss": 2.6925, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.16700066800267202, | |
| "grad_norm": 123.5, | |
| "learning_rate": 4e-06, | |
| "loss": 2.6456, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.18370073480293922, | |
| "grad_norm": 21.75, | |
| "learning_rate": 4.4e-06, | |
| "loss": 2.5764, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.20040080160320642, | |
| "grad_norm": 120.0, | |
| "learning_rate": 4.8e-06, | |
| "loss": 2.5388, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.21710086840347362, | |
| "grad_norm": 168.0, | |
| "learning_rate": 5.2e-06, | |
| "loss": 2.48, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.23380093520374082, | |
| "grad_norm": 54.0, | |
| "learning_rate": 5.6e-06, | |
| "loss": 2.3606, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.250501002004008, | |
| "grad_norm": 98.5, | |
| "learning_rate": 6e-06, | |
| "loss": 2.2963, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.26720106880427524, | |
| "grad_norm": 125.5, | |
| "learning_rate": 6.4e-06, | |
| "loss": 2.2326, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.28390113560454244, | |
| "grad_norm": 24.375, | |
| "learning_rate": 6.799999999999999e-06, | |
| "loss": 2.1845, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.30060120240480964, | |
| "grad_norm": 34.5, | |
| "learning_rate": 7.2e-06, | |
| "loss": 2.1695, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.31730126920507684, | |
| "grad_norm": 39.75, | |
| "learning_rate": 7.599999999999999e-06, | |
| "loss": 2.1012, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.33400133600534404, | |
| "grad_norm": 50.5, | |
| "learning_rate": 8e-06, | |
| "loss": 2.0348, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.35070140280561124, | |
| "grad_norm": 103.5, | |
| "learning_rate": 7.995951475442089e-06, | |
| "loss": 2.0471, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.36740146960587844, | |
| "grad_norm": 161.0, | |
| "learning_rate": 7.983814097043909e-06, | |
| "loss": 2.0154, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.38410153640614564, | |
| "grad_norm": 27.375, | |
| "learning_rate": 7.963612434042712e-06, | |
| "loss": 1.9808, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.40080160320641284, | |
| "grad_norm": 45.75, | |
| "learning_rate": 7.935387379902886e-06, | |
| "loss": 1.9637, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.41750167000668004, | |
| "grad_norm": 24.875, | |
| "learning_rate": 7.899196069536848e-06, | |
| "loss": 1.9415, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.43420173680694724, | |
| "grad_norm": 48.75, | |
| "learning_rate": 7.855111763648997e-06, | |
| "loss": 1.9427, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.45090180360721444, | |
| "grad_norm": 74.5, | |
| "learning_rate": 7.803223700436834e-06, | |
| "loss": 1.9173, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.46760187040748163, | |
| "grad_norm": 57.0, | |
| "learning_rate": 7.743636914949452e-06, | |
| "loss": 1.8808, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.48430193720774883, | |
| "grad_norm": 33.5, | |
| "learning_rate": 7.676472026469033e-06, | |
| "loss": 1.8879, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.501002004008016, | |
| "grad_norm": 16.5, | |
| "learning_rate": 7.6018649943458e-06, | |
| "loss": 1.8772, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.5177020708082832, | |
| "grad_norm": 552.0, | |
| "learning_rate": 7.519966842780625e-06, | |
| "loss": 1.8883, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.5344021376085505, | |
| "grad_norm": 147.0, | |
| "learning_rate": 7.430943355112437e-06, | |
| "loss": 1.8713, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.5511022044088176, | |
| "grad_norm": 35.25, | |
| "learning_rate": 7.334974738229263e-06, | |
| "loss": 1.8667, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.5678022712090849, | |
| "grad_norm": 38.5, | |
| "learning_rate": 7.232255257782226e-06, | |
| "loss": 1.813, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.584502338009352, | |
| "grad_norm": 20.5, | |
| "learning_rate": 7.122992844940905e-06, | |
| "loss": 1.8531, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.6012024048096193, | |
| "grad_norm": 52.0, | |
| "learning_rate": 7.0074086754861235e-06, | |
| "loss": 1.7999, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.6179024716098864, | |
| "grad_norm": 21.125, | |
| "learning_rate": 6.885736722092155e-06, | |
| "loss": 1.8156, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.6346025384101537, | |
| "grad_norm": 42.25, | |
| "learning_rate": 6.75822328070466e-06, | |
| "loss": 1.8131, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.6513026052104208, | |
| "grad_norm": 120.5, | |
| "learning_rate": 6.625126471973098e-06, | |
| "loss": 1.7886, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.6680026720106881, | |
| "grad_norm": 46.5, | |
| "learning_rate": 6.486715718746836e-06, | |
| "loss": 1.7696, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.6847027388109552, | |
| "grad_norm": 29.375, | |
| "learning_rate": 6.343271200692631e-06, | |
| "loss": 1.775, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.7014028056112225, | |
| "grad_norm": 16.875, | |
| "learning_rate": 6.195083287137502e-06, | |
| "loss": 1.7571, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.7181028724114896, | |
| "grad_norm": 56.25, | |
| "learning_rate": 6.042451949285056e-06, | |
| "loss": 1.7446, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.7348029392117569, | |
| "grad_norm": 75.5, | |
| "learning_rate": 5.8856861529950934e-06, | |
| "loss": 1.7819, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.751503006012024, | |
| "grad_norm": 48.0, | |
| "learning_rate": 5.725103233355676e-06, | |
| "loss": 1.7428, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.7682030728122913, | |
| "grad_norm": 24.5, | |
| "learning_rate": 5.5610282523136734e-06, | |
| "loss": 1.7615, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.7849031396125584, | |
| "grad_norm": 89.5, | |
| "learning_rate": 5.393793340664129e-06, | |
| "loss": 1.7216, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.8016032064128257, | |
| "grad_norm": 29.125, | |
| "learning_rate": 5.22373702573042e-06, | |
| "loss": 1.737, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.8183032732130928, | |
| "grad_norm": 47.75, | |
| "learning_rate": 5.0512035460961645e-06, | |
| "loss": 1.7184, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.8350033400133601, | |
| "grad_norm": 34.5, | |
| "learning_rate": 4.876542154776043e-06, | |
| "loss": 1.7214, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.8517034068136272, | |
| "grad_norm": 33.0, | |
| "learning_rate": 4.7001064122360936e-06, | |
| "loss": 1.7212, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.8684034736138945, | |
| "grad_norm": 33.0, | |
| "learning_rate": 4.522253470694602e-06, | |
| "loss": 1.7121, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.8851035404141616, | |
| "grad_norm": 41.25, | |
| "learning_rate": 4.3433433511523285e-06, | |
| "loss": 1.7444, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.9018036072144289, | |
| "grad_norm": 107.5, | |
| "learning_rate": 4.1637382146155875e-06, | |
| "loss": 1.7298, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.918503674014696, | |
| "grad_norm": 130.0, | |
| "learning_rate": 3.983801628987376e-06, | |
| "loss": 1.669, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.9352037408149633, | |
| "grad_norm": 39.25, | |
| "learning_rate": 3.803897833110589e-06, | |
| "loss": 1.7082, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.9519038076152304, | |
| "grad_norm": 22.125, | |
| "learning_rate": 3.6243909994530542e-06, | |
| "loss": 1.6648, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.9686038744154977, | |
| "grad_norm": 122.5, | |
| "learning_rate": 3.445644496926965e-06, | |
| "loss": 1.7098, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.9853039412157648, | |
| "grad_norm": 65.0, | |
| "learning_rate": 3.2680201553348756e-06, | |
| "loss": 1.6523, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 1.627042293548584, | |
| "eval_runtime": 174.6574, | |
| "eval_samples_per_second": 9.229, | |
| "eval_steps_per_second": 1.157, | |
| "step": 2994 | |
| } | |
| ], | |
| "logging_steps": 50, | |
| "max_steps": 4491, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 7.795126539858739e+17, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |