zaas12's picture
Upload folder using huggingface_hub
294ada4 verified
{
"best_metric": 1.5239022970199585,
"best_model_checkpoint": "./checkpoints/ultrafeedback_binarized/phi-2-ultrafeedback_binarized-lambda0.22-ORPO-2-5-54/checkpoint-4491",
"epoch": 1.5,
"eval_steps": 500,
"global_step": 4491,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.016700066800267203,
"grad_norm": 170.0,
"learning_rate": 4e-07,
"loss": 2.9498,
"step": 50
},
{
"epoch": 0.033400133600534405,
"grad_norm": 44.25,
"learning_rate": 8e-07,
"loss": 2.9012,
"step": 100
},
{
"epoch": 0.050100200400801605,
"grad_norm": 113.5,
"learning_rate": 1.2e-06,
"loss": 2.8343,
"step": 150
},
{
"epoch": 0.06680026720106881,
"grad_norm": 25.0,
"learning_rate": 1.6e-06,
"loss": 2.9081,
"step": 200
},
{
"epoch": 0.08350033400133601,
"grad_norm": 31.125,
"learning_rate": 2e-06,
"loss": 2.8496,
"step": 250
},
{
"epoch": 0.10020040080160321,
"grad_norm": 47.5,
"learning_rate": 2.4e-06,
"loss": 2.8085,
"step": 300
},
{
"epoch": 0.11690046760187041,
"grad_norm": 87.5,
"learning_rate": 2.8e-06,
"loss": 2.7414,
"step": 350
},
{
"epoch": 0.13360053440213762,
"grad_norm": 33.5,
"learning_rate": 3.2e-06,
"loss": 2.7483,
"step": 400
},
{
"epoch": 0.15030060120240482,
"grad_norm": 116.5,
"learning_rate": 3.6e-06,
"loss": 2.6925,
"step": 450
},
{
"epoch": 0.16700066800267202,
"grad_norm": 123.5,
"learning_rate": 4e-06,
"loss": 2.6456,
"step": 500
},
{
"epoch": 0.18370073480293922,
"grad_norm": 21.75,
"learning_rate": 4.4e-06,
"loss": 2.5764,
"step": 550
},
{
"epoch": 0.20040080160320642,
"grad_norm": 120.0,
"learning_rate": 4.8e-06,
"loss": 2.5388,
"step": 600
},
{
"epoch": 0.21710086840347362,
"grad_norm": 168.0,
"learning_rate": 5.2e-06,
"loss": 2.48,
"step": 650
},
{
"epoch": 0.23380093520374082,
"grad_norm": 54.0,
"learning_rate": 5.6e-06,
"loss": 2.3606,
"step": 700
},
{
"epoch": 0.250501002004008,
"grad_norm": 98.5,
"learning_rate": 6e-06,
"loss": 2.2963,
"step": 750
},
{
"epoch": 0.26720106880427524,
"grad_norm": 125.5,
"learning_rate": 6.4e-06,
"loss": 2.2326,
"step": 800
},
{
"epoch": 0.28390113560454244,
"grad_norm": 24.375,
"learning_rate": 6.799999999999999e-06,
"loss": 2.1845,
"step": 850
},
{
"epoch": 0.30060120240480964,
"grad_norm": 34.5,
"learning_rate": 7.2e-06,
"loss": 2.1695,
"step": 900
},
{
"epoch": 0.31730126920507684,
"grad_norm": 39.75,
"learning_rate": 7.599999999999999e-06,
"loss": 2.1012,
"step": 950
},
{
"epoch": 0.33400133600534404,
"grad_norm": 50.5,
"learning_rate": 8e-06,
"loss": 2.0348,
"step": 1000
},
{
"epoch": 0.35070140280561124,
"grad_norm": 103.5,
"learning_rate": 7.995951475442089e-06,
"loss": 2.0471,
"step": 1050
},
{
"epoch": 0.36740146960587844,
"grad_norm": 161.0,
"learning_rate": 7.983814097043909e-06,
"loss": 2.0154,
"step": 1100
},
{
"epoch": 0.38410153640614564,
"grad_norm": 27.375,
"learning_rate": 7.963612434042712e-06,
"loss": 1.9808,
"step": 1150
},
{
"epoch": 0.40080160320641284,
"grad_norm": 45.75,
"learning_rate": 7.935387379902886e-06,
"loss": 1.9637,
"step": 1200
},
{
"epoch": 0.41750167000668004,
"grad_norm": 24.875,
"learning_rate": 7.899196069536848e-06,
"loss": 1.9415,
"step": 1250
},
{
"epoch": 0.43420173680694724,
"grad_norm": 48.75,
"learning_rate": 7.855111763648997e-06,
"loss": 1.9427,
"step": 1300
},
{
"epoch": 0.45090180360721444,
"grad_norm": 74.5,
"learning_rate": 7.803223700436834e-06,
"loss": 1.9173,
"step": 1350
},
{
"epoch": 0.46760187040748163,
"grad_norm": 57.0,
"learning_rate": 7.743636914949452e-06,
"loss": 1.8808,
"step": 1400
},
{
"epoch": 0.48430193720774883,
"grad_norm": 33.5,
"learning_rate": 7.676472026469033e-06,
"loss": 1.8879,
"step": 1450
},
{
"epoch": 0.501002004008016,
"grad_norm": 16.5,
"learning_rate": 7.6018649943458e-06,
"loss": 1.8772,
"step": 1500
},
{
"epoch": 0.5177020708082832,
"grad_norm": 552.0,
"learning_rate": 7.519966842780625e-06,
"loss": 1.8883,
"step": 1550
},
{
"epoch": 0.5344021376085505,
"grad_norm": 147.0,
"learning_rate": 7.430943355112437e-06,
"loss": 1.8713,
"step": 1600
},
{
"epoch": 0.5511022044088176,
"grad_norm": 35.25,
"learning_rate": 7.334974738229263e-06,
"loss": 1.8667,
"step": 1650
},
{
"epoch": 0.5678022712090849,
"grad_norm": 38.5,
"learning_rate": 7.232255257782226e-06,
"loss": 1.813,
"step": 1700
},
{
"epoch": 0.584502338009352,
"grad_norm": 20.5,
"learning_rate": 7.122992844940905e-06,
"loss": 1.8531,
"step": 1750
},
{
"epoch": 0.6012024048096193,
"grad_norm": 52.0,
"learning_rate": 7.0074086754861235e-06,
"loss": 1.7999,
"step": 1800
},
{
"epoch": 0.6179024716098864,
"grad_norm": 21.125,
"learning_rate": 6.885736722092155e-06,
"loss": 1.8156,
"step": 1850
},
{
"epoch": 0.6346025384101537,
"grad_norm": 42.25,
"learning_rate": 6.75822328070466e-06,
"loss": 1.8131,
"step": 1900
},
{
"epoch": 0.6513026052104208,
"grad_norm": 120.5,
"learning_rate": 6.625126471973098e-06,
"loss": 1.7886,
"step": 1950
},
{
"epoch": 0.6680026720106881,
"grad_norm": 46.5,
"learning_rate": 6.486715718746836e-06,
"loss": 1.7696,
"step": 2000
},
{
"epoch": 0.6847027388109552,
"grad_norm": 29.375,
"learning_rate": 6.343271200692631e-06,
"loss": 1.775,
"step": 2050
},
{
"epoch": 0.7014028056112225,
"grad_norm": 16.875,
"learning_rate": 6.195083287137502e-06,
"loss": 1.7571,
"step": 2100
},
{
"epoch": 0.7181028724114896,
"grad_norm": 56.25,
"learning_rate": 6.042451949285056e-06,
"loss": 1.7446,
"step": 2150
},
{
"epoch": 0.7348029392117569,
"grad_norm": 75.5,
"learning_rate": 5.8856861529950934e-06,
"loss": 1.7819,
"step": 2200
},
{
"epoch": 0.751503006012024,
"grad_norm": 48.0,
"learning_rate": 5.725103233355676e-06,
"loss": 1.7428,
"step": 2250
},
{
"epoch": 0.7682030728122913,
"grad_norm": 24.5,
"learning_rate": 5.5610282523136734e-06,
"loss": 1.7615,
"step": 2300
},
{
"epoch": 0.7849031396125584,
"grad_norm": 89.5,
"learning_rate": 5.393793340664129e-06,
"loss": 1.7216,
"step": 2350
},
{
"epoch": 0.8016032064128257,
"grad_norm": 29.125,
"learning_rate": 5.22373702573042e-06,
"loss": 1.737,
"step": 2400
},
{
"epoch": 0.8183032732130928,
"grad_norm": 47.75,
"learning_rate": 5.0512035460961645e-06,
"loss": 1.7184,
"step": 2450
},
{
"epoch": 0.8350033400133601,
"grad_norm": 34.5,
"learning_rate": 4.876542154776043e-06,
"loss": 1.7214,
"step": 2500
},
{
"epoch": 0.8517034068136272,
"grad_norm": 33.0,
"learning_rate": 4.7001064122360936e-06,
"loss": 1.7212,
"step": 2550
},
{
"epoch": 0.8684034736138945,
"grad_norm": 33.0,
"learning_rate": 4.522253470694602e-06,
"loss": 1.7121,
"step": 2600
},
{
"epoch": 0.8851035404141616,
"grad_norm": 41.25,
"learning_rate": 4.3433433511523285e-06,
"loss": 1.7444,
"step": 2650
},
{
"epoch": 0.9018036072144289,
"grad_norm": 107.5,
"learning_rate": 4.1637382146155875e-06,
"loss": 1.7298,
"step": 2700
},
{
"epoch": 0.918503674014696,
"grad_norm": 130.0,
"learning_rate": 3.983801628987376e-06,
"loss": 1.669,
"step": 2750
},
{
"epoch": 0.9352037408149633,
"grad_norm": 39.25,
"learning_rate": 3.803897833110589e-06,
"loss": 1.7082,
"step": 2800
},
{
"epoch": 0.9519038076152304,
"grad_norm": 22.125,
"learning_rate": 3.6243909994530542e-06,
"loss": 1.6648,
"step": 2850
},
{
"epoch": 0.9686038744154977,
"grad_norm": 122.5,
"learning_rate": 3.445644496926965e-06,
"loss": 1.7098,
"step": 2900
},
{
"epoch": 0.9853039412157648,
"grad_norm": 65.0,
"learning_rate": 3.2680201553348756e-06,
"loss": 1.6523,
"step": 2950
},
{
"epoch": 1.0,
"eval_loss": 1.627042293548584,
"eval_runtime": 174.6574,
"eval_samples_per_second": 9.229,
"eval_steps_per_second": 1.157,
"step": 2994
},
{
"epoch": 1.002004008016032,
"grad_norm": 132.0,
"learning_rate": 3.091877532931298e-06,
"loss": 1.669,
"step": 3000
},
{
"epoch": 1.0187040748162992,
"grad_norm": 84.0,
"learning_rate": 2.9175731885824783e-06,
"loss": 1.679,
"step": 3050
},
{
"epoch": 1.0354041416165665,
"grad_norm": 24.75,
"learning_rate": 2.745459959997742e-06,
"loss": 1.6495,
"step": 3100
},
{
"epoch": 1.0521042084168337,
"grad_norm": 152.0,
"learning_rate": 2.5758862494934193e-06,
"loss": 1.6473,
"step": 3150
},
{
"epoch": 1.0688042752171008,
"grad_norm": 33.0,
"learning_rate": 2.4091953187351898e-06,
"loss": 1.6427,
"step": 3200
},
{
"epoch": 1.085504342017368,
"grad_norm": 72.0,
"learning_rate": 2.245724593886428e-06,
"loss": 1.6354,
"step": 3250
},
{
"epoch": 1.1022044088176353,
"grad_norm": 20.75,
"learning_rate": 2.0858049825691605e-06,
"loss": 1.6054,
"step": 3300
},
{
"epoch": 1.1189044756179025,
"grad_norm": 82.0,
"learning_rate": 1.929760204020242e-06,
"loss": 1.6499,
"step": 3350
},
{
"epoch": 1.1356045424181698,
"grad_norm": 59.75,
"learning_rate": 1.7779061337987172e-06,
"loss": 1.6453,
"step": 3400
},
{
"epoch": 1.1523046092184368,
"grad_norm": 23.875,
"learning_rate": 1.6305501643708403e-06,
"loss": 1.6213,
"step": 3450
},
{
"epoch": 1.169004676018704,
"grad_norm": 25.375,
"learning_rate": 1.4879905828671037e-06,
"loss": 1.6047,
"step": 3500
},
{
"epoch": 1.1857047428189713,
"grad_norm": 20.5,
"learning_rate": 1.3505159672708488e-06,
"loss": 1.6216,
"step": 3550
},
{
"epoch": 1.2024048096192386,
"grad_norm": 52.75,
"learning_rate": 1.218404602260741e-06,
"loss": 1.6315,
"step": 3600
},
{
"epoch": 1.2191048764195056,
"grad_norm": 143.0,
"learning_rate": 1.091923915889592e-06,
"loss": 1.6075,
"step": 3650
},
{
"epoch": 1.2358049432197729,
"grad_norm": 49.75,
"learning_rate": 9.713299382398368e-07,
"loss": 1.6066,
"step": 3700
},
{
"epoch": 1.25250501002004,
"grad_norm": 79.5,
"learning_rate": 8.568667831515051e-07,
"loss": 1.6075,
"step": 3750
},
{
"epoch": 1.2692050768203074,
"grad_norm": 35.25,
"learning_rate": 7.487661540717676e-07,
"loss": 1.5862,
"step": 3800
},
{
"epoch": 1.2859051436205746,
"grad_norm": 35.0,
"learning_rate": 6.472468750264064e-07,
"loss": 1.6117,
"step": 3850
},
{
"epoch": 1.3026052104208417,
"grad_norm": 28.875,
"learning_rate": 5.525144476625759e-07,
"loss": 1.6138,
"step": 3900
},
{
"epoch": 1.319305277221109,
"grad_norm": 27.0,
"learning_rate": 4.6476063525958544e-07,
"loss": 1.5814,
"step": 3950
},
{
"epoch": 1.3360053440213762,
"grad_norm": 87.0,
"learning_rate": 3.8416307454971443e-07,
"loss": 1.5842,
"step": 4000
},
{
"epoch": 1.3527054108216432,
"grad_norm": 30.75,
"learning_rate": 3.108849161348859e-07,
"loss": 1.5809,
"step": 4050
},
{
"epoch": 1.3694054776219104,
"grad_norm": 37.25,
"learning_rate": 2.45074494227048e-07,
"loss": 1.5923,
"step": 4100
},
{
"epoch": 1.3861055444221777,
"grad_norm": 55.75,
"learning_rate": 1.8686502638083312e-07,
"loss": 1.5884,
"step": 4150
},
{
"epoch": 1.402805611222445,
"grad_norm": 30.125,
"learning_rate": 1.3637434382627855e-07,
"loss": 1.615,
"step": 4200
},
{
"epoch": 1.4195056780227122,
"grad_norm": 87.0,
"learning_rate": 9.370465294751984e-08,
"loss": 1.5556,
"step": 4250
},
{
"epoch": 1.4362057448229792,
"grad_norm": 62.75,
"learning_rate": 5.894232839025548e-08,
"loss": 1.5773,
"step": 4300
},
{
"epoch": 1.4529058116232465,
"grad_norm": 21.875,
"learning_rate": 3.215773821681766e-08,
"loss": 1.6198,
"step": 4350
},
{
"epoch": 1.4696058784235138,
"grad_norm": 50.5,
"learning_rate": 1.3405101462750313e-08,
"loss": 1.5676,
"step": 4400
},
{
"epoch": 1.4863059452237808,
"grad_norm": 49.75,
"learning_rate": 2.722378383265944e-09,
"loss": 1.5386,
"step": 4450
},
{
"epoch": 1.5,
"eval_loss": 1.5239022970199585,
"eval_runtime": 174.6449,
"eval_samples_per_second": 9.23,
"eval_steps_per_second": 1.157,
"step": 4491
}
],
"logging_steps": 50,
"max_steps": 4491,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.1692852540499558e+18,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}