lora-wsc / trainer_state.json
igzi's picture
Upload latest checkpoint for wsc
32b738f verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 5.0,
"eval_steps": 500,
"global_step": 5000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.05,
"grad_norm": 4.71875,
"learning_rate": 0.00019800000000000002,
"loss": 1.1991,
"step": 50
},
{
"epoch": 0.1,
"grad_norm": 7.03125,
"learning_rate": 0.000196,
"loss": 0.5348,
"step": 100
},
{
"epoch": 0.15,
"grad_norm": 13.625,
"learning_rate": 0.000194,
"loss": 0.7719,
"step": 150
},
{
"epoch": 0.2,
"grad_norm": 1.7578125,
"learning_rate": 0.000192,
"loss": 0.5357,
"step": 200
},
{
"epoch": 0.25,
"grad_norm": 0.52734375,
"learning_rate": 0.00019,
"loss": 0.5368,
"step": 250
},
{
"epoch": 0.3,
"grad_norm": 1.34375,
"learning_rate": 0.000188,
"loss": 0.447,
"step": 300
},
{
"epoch": 0.35,
"grad_norm": 0.9140625,
"learning_rate": 0.00018600000000000002,
"loss": 0.4369,
"step": 350
},
{
"epoch": 0.4,
"grad_norm": 1.2421875,
"learning_rate": 0.00018400000000000003,
"loss": 0.5182,
"step": 400
},
{
"epoch": 0.45,
"grad_norm": 1.90625,
"learning_rate": 0.000182,
"loss": 0.4342,
"step": 450
},
{
"epoch": 0.5,
"grad_norm": 3.265625,
"learning_rate": 0.00018,
"loss": 0.4876,
"step": 500
},
{
"epoch": 0.55,
"grad_norm": 2.1875,
"learning_rate": 0.00017800000000000002,
"loss": 0.6829,
"step": 550
},
{
"epoch": 0.6,
"grad_norm": 29.125,
"learning_rate": 0.00017600000000000002,
"loss": 0.5333,
"step": 600
},
{
"epoch": 0.65,
"grad_norm": 4.34375,
"learning_rate": 0.000174,
"loss": 0.544,
"step": 650
},
{
"epoch": 0.7,
"grad_norm": 1.4453125,
"learning_rate": 0.000172,
"loss": 0.4168,
"step": 700
},
{
"epoch": 0.75,
"grad_norm": 4.1875,
"learning_rate": 0.00017,
"loss": 0.503,
"step": 750
},
{
"epoch": 0.8,
"grad_norm": 5.6875,
"learning_rate": 0.000168,
"loss": 0.619,
"step": 800
},
{
"epoch": 0.85,
"grad_norm": 1.1171875,
"learning_rate": 0.000166,
"loss": 0.4079,
"step": 850
},
{
"epoch": 0.9,
"grad_norm": 2.46875,
"learning_rate": 0.000164,
"loss": 0.3853,
"step": 900
},
{
"epoch": 0.95,
"grad_norm": 1.0078125,
"learning_rate": 0.000162,
"loss": 0.4464,
"step": 950
},
{
"epoch": 1.0,
"grad_norm": 0.64453125,
"learning_rate": 0.00016,
"loss": 0.3638,
"step": 1000
},
{
"epoch": 1.05,
"grad_norm": 0.52734375,
"learning_rate": 0.00015800000000000002,
"loss": 0.3634,
"step": 1050
},
{
"epoch": 1.1,
"grad_norm": 0.6875,
"learning_rate": 0.00015600000000000002,
"loss": 0.3604,
"step": 1100
},
{
"epoch": 1.15,
"grad_norm": 0.337890625,
"learning_rate": 0.000154,
"loss": 0.3328,
"step": 1150
},
{
"epoch": 1.2,
"grad_norm": 1.078125,
"learning_rate": 0.000152,
"loss": 0.4488,
"step": 1200
},
{
"epoch": 1.25,
"grad_norm": 1.859375,
"learning_rate": 0.00015000000000000001,
"loss": 0.5788,
"step": 1250
},
{
"epoch": 1.3,
"grad_norm": 2.140625,
"learning_rate": 0.000148,
"loss": 0.3634,
"step": 1300
},
{
"epoch": 1.35,
"grad_norm": 17.5,
"learning_rate": 0.000146,
"loss": 0.4267,
"step": 1350
},
{
"epoch": 1.4,
"grad_norm": 1.859375,
"learning_rate": 0.000144,
"loss": 0.6031,
"step": 1400
},
{
"epoch": 1.45,
"grad_norm": 32.5,
"learning_rate": 0.000142,
"loss": 0.6059,
"step": 1450
},
{
"epoch": 1.5,
"grad_norm": 0.8125,
"learning_rate": 0.00014,
"loss": 0.655,
"step": 1500
},
{
"epoch": 1.55,
"grad_norm": 1.2109375,
"learning_rate": 0.000138,
"loss": 0.5061,
"step": 1550
},
{
"epoch": 1.6,
"grad_norm": 0.64453125,
"learning_rate": 0.00013600000000000003,
"loss": 0.4995,
"step": 1600
},
{
"epoch": 1.65,
"grad_norm": 0.8359375,
"learning_rate": 0.000134,
"loss": 0.5439,
"step": 1650
},
{
"epoch": 1.7,
"grad_norm": 1.21875,
"learning_rate": 0.000132,
"loss": 0.3714,
"step": 1700
},
{
"epoch": 1.75,
"grad_norm": 1.0703125,
"learning_rate": 0.00013000000000000002,
"loss": 0.4673,
"step": 1750
},
{
"epoch": 1.8,
"grad_norm": 2.421875,
"learning_rate": 0.00012800000000000002,
"loss": 0.3626,
"step": 1800
},
{
"epoch": 1.85,
"grad_norm": 0.70703125,
"learning_rate": 0.000126,
"loss": 0.6073,
"step": 1850
},
{
"epoch": 1.9,
"grad_norm": 1.421875,
"learning_rate": 0.000124,
"loss": 0.4671,
"step": 1900
},
{
"epoch": 1.95,
"grad_norm": 0.859375,
"learning_rate": 0.000122,
"loss": 0.3424,
"step": 1950
},
{
"epoch": 2.0,
"grad_norm": 0.7421875,
"learning_rate": 0.00012,
"loss": 0.3802,
"step": 2000
},
{
"epoch": 2.05,
"grad_norm": 0.4375,
"learning_rate": 0.000118,
"loss": 0.3936,
"step": 2050
},
{
"epoch": 2.1,
"grad_norm": 1.0546875,
"learning_rate": 0.000116,
"loss": 0.3808,
"step": 2100
},
{
"epoch": 2.15,
"grad_norm": 0.78125,
"learning_rate": 0.00011399999999999999,
"loss": 0.3487,
"step": 2150
},
{
"epoch": 2.2,
"grad_norm": 0.7265625,
"learning_rate": 0.00011200000000000001,
"loss": 0.3627,
"step": 2200
},
{
"epoch": 2.25,
"grad_norm": 0.67578125,
"learning_rate": 0.00011000000000000002,
"loss": 0.3638,
"step": 2250
},
{
"epoch": 2.3,
"grad_norm": 0.58203125,
"learning_rate": 0.00010800000000000001,
"loss": 0.3568,
"step": 2300
},
{
"epoch": 2.35,
"grad_norm": 0.71484375,
"learning_rate": 0.00010600000000000002,
"loss": 0.4174,
"step": 2350
},
{
"epoch": 2.4,
"grad_norm": 0.69140625,
"learning_rate": 0.00010400000000000001,
"loss": 0.3676,
"step": 2400
},
{
"epoch": 2.45,
"grad_norm": 4.625,
"learning_rate": 0.00010200000000000001,
"loss": 0.3714,
"step": 2450
},
{
"epoch": 2.5,
"grad_norm": 0.8046875,
"learning_rate": 0.0001,
"loss": 0.4879,
"step": 2500
},
{
"epoch": 2.55,
"grad_norm": 4.59375,
"learning_rate": 9.8e-05,
"loss": 0.5212,
"step": 2550
},
{
"epoch": 2.6,
"grad_norm": 5.03125,
"learning_rate": 9.6e-05,
"loss": 0.5051,
"step": 2600
},
{
"epoch": 2.65,
"grad_norm": 0.8203125,
"learning_rate": 9.4e-05,
"loss": 0.4212,
"step": 2650
},
{
"epoch": 2.7,
"grad_norm": 0.66796875,
"learning_rate": 9.200000000000001e-05,
"loss": 0.3505,
"step": 2700
},
{
"epoch": 2.75,
"grad_norm": 0.90234375,
"learning_rate": 9e-05,
"loss": 0.3521,
"step": 2750
},
{
"epoch": 2.8,
"grad_norm": 0.7265625,
"learning_rate": 8.800000000000001e-05,
"loss": 0.3495,
"step": 2800
},
{
"epoch": 2.85,
"grad_norm": 0.5,
"learning_rate": 8.6e-05,
"loss": 0.3034,
"step": 2850
},
{
"epoch": 2.9,
"grad_norm": 0.65625,
"learning_rate": 8.4e-05,
"loss": 0.636,
"step": 2900
},
{
"epoch": 2.95,
"grad_norm": 3.171875,
"learning_rate": 8.2e-05,
"loss": 0.5193,
"step": 2950
},
{
"epoch": 3.0,
"grad_norm": 2.65625,
"learning_rate": 8e-05,
"loss": 0.4152,
"step": 3000
},
{
"epoch": 3.05,
"grad_norm": 1.140625,
"learning_rate": 7.800000000000001e-05,
"loss": 0.4252,
"step": 3050
},
{
"epoch": 3.1,
"grad_norm": 0.78515625,
"learning_rate": 7.6e-05,
"loss": 0.3521,
"step": 3100
},
{
"epoch": 3.15,
"grad_norm": 0.76171875,
"learning_rate": 7.4e-05,
"loss": 0.3409,
"step": 3150
},
{
"epoch": 3.2,
"grad_norm": 0.7734375,
"learning_rate": 7.2e-05,
"loss": 0.3497,
"step": 3200
},
{
"epoch": 3.25,
"grad_norm": 0.53125,
"learning_rate": 7e-05,
"loss": 0.3548,
"step": 3250
},
{
"epoch": 3.3,
"grad_norm": 0.5390625,
"learning_rate": 6.800000000000001e-05,
"loss": 0.3416,
"step": 3300
},
{
"epoch": 3.35,
"grad_norm": 0.55859375,
"learning_rate": 6.6e-05,
"loss": 0.3448,
"step": 3350
},
{
"epoch": 3.4,
"grad_norm": 0.75,
"learning_rate": 6.400000000000001e-05,
"loss": 0.3453,
"step": 3400
},
{
"epoch": 3.45,
"grad_norm": 0.50390625,
"learning_rate": 6.2e-05,
"loss": 0.3461,
"step": 3450
},
{
"epoch": 3.5,
"grad_norm": 0.5859375,
"learning_rate": 6e-05,
"loss": 0.3597,
"step": 3500
},
{
"epoch": 3.55,
"grad_norm": 0.546875,
"learning_rate": 5.8e-05,
"loss": 0.3448,
"step": 3550
},
{
"epoch": 3.6,
"grad_norm": 0.58203125,
"learning_rate": 5.6000000000000006e-05,
"loss": 0.3486,
"step": 3600
},
{
"epoch": 3.65,
"grad_norm": 0.51171875,
"learning_rate": 5.4000000000000005e-05,
"loss": 0.3503,
"step": 3650
},
{
"epoch": 3.7,
"grad_norm": 0.53515625,
"learning_rate": 5.2000000000000004e-05,
"loss": 0.3457,
"step": 3700
},
{
"epoch": 3.75,
"grad_norm": 0.51171875,
"learning_rate": 5e-05,
"loss": 0.347,
"step": 3750
},
{
"epoch": 3.8,
"grad_norm": 0.57421875,
"learning_rate": 4.8e-05,
"loss": 0.3489,
"step": 3800
},
{
"epoch": 3.85,
"grad_norm": 0.53515625,
"learning_rate": 4.600000000000001e-05,
"loss": 0.3488,
"step": 3850
},
{
"epoch": 3.9,
"grad_norm": 0.55859375,
"learning_rate": 4.4000000000000006e-05,
"loss": 0.3504,
"step": 3900
},
{
"epoch": 3.95,
"grad_norm": 0.5078125,
"learning_rate": 4.2e-05,
"loss": 0.3465,
"step": 3950
},
{
"epoch": 4.0,
"grad_norm": 0.65625,
"learning_rate": 4e-05,
"loss": 0.3474,
"step": 4000
},
{
"epoch": 4.05,
"grad_norm": 0.53515625,
"learning_rate": 3.8e-05,
"loss": 0.3433,
"step": 4050
},
{
"epoch": 4.1,
"grad_norm": 0.5546875,
"learning_rate": 3.6e-05,
"loss": 0.3431,
"step": 4100
},
{
"epoch": 4.15,
"grad_norm": 0.73828125,
"learning_rate": 3.4000000000000007e-05,
"loss": 0.3436,
"step": 4150
},
{
"epoch": 4.2,
"grad_norm": 0.6015625,
"learning_rate": 3.2000000000000005e-05,
"loss": 0.3447,
"step": 4200
},
{
"epoch": 4.25,
"grad_norm": 0.53515625,
"learning_rate": 3e-05,
"loss": 0.3436,
"step": 4250
},
{
"epoch": 4.3,
"grad_norm": 0.6875,
"learning_rate": 2.8000000000000003e-05,
"loss": 0.3494,
"step": 4300
},
{
"epoch": 4.35,
"grad_norm": 0.55078125,
"learning_rate": 2.6000000000000002e-05,
"loss": 0.343,
"step": 4350
},
{
"epoch": 4.4,
"grad_norm": 0.6015625,
"learning_rate": 2.4e-05,
"loss": 0.3465,
"step": 4400
},
{
"epoch": 4.45,
"grad_norm": 0.59375,
"learning_rate": 2.2000000000000003e-05,
"loss": 0.3521,
"step": 4450
},
{
"epoch": 4.5,
"grad_norm": 0.59375,
"learning_rate": 2e-05,
"loss": 0.3431,
"step": 4500
},
{
"epoch": 4.55,
"grad_norm": 0.578125,
"learning_rate": 1.8e-05,
"loss": 0.3395,
"step": 4550
},
{
"epoch": 4.6,
"grad_norm": 0.62890625,
"learning_rate": 1.6000000000000003e-05,
"loss": 0.3409,
"step": 4600
},
{
"epoch": 4.65,
"grad_norm": 0.5703125,
"learning_rate": 1.4000000000000001e-05,
"loss": 0.3463,
"step": 4650
},
{
"epoch": 4.7,
"grad_norm": 0.6796875,
"learning_rate": 1.2e-05,
"loss": 0.3521,
"step": 4700
},
{
"epoch": 4.75,
"grad_norm": 0.546875,
"learning_rate": 1e-05,
"loss": 0.34,
"step": 4750
},
{
"epoch": 4.8,
"grad_norm": 0.54296875,
"learning_rate": 8.000000000000001e-06,
"loss": 0.3486,
"step": 4800
},
{
"epoch": 4.85,
"grad_norm": 0.578125,
"learning_rate": 6e-06,
"loss": 0.3537,
"step": 4850
},
{
"epoch": 4.9,
"grad_norm": 0.64453125,
"learning_rate": 4.000000000000001e-06,
"loss": 0.3435,
"step": 4900
},
{
"epoch": 4.95,
"grad_norm": 0.5703125,
"learning_rate": 2.0000000000000003e-06,
"loss": 0.3514,
"step": 4950
},
{
"epoch": 5.0,
"grad_norm": 0.578125,
"learning_rate": 0.0,
"loss": 0.3446,
"step": 5000
}
],
"logging_steps": 50,
"max_steps": 5000,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 1000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 2.14270352818176e+16,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}