robertou2's picture
Upload folder using huggingface_hub
d107b8d verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 5.0,
"eval_steps": 500,
"global_step": 50,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.5333333333333333,
"grad_norm": 0.21942713856697083,
"learning_rate": 0.000199107748815478,
"loss": 0.7714,
"step": 5
},
{
"epoch": 1.0,
"grad_norm": 0.27160170674324036,
"learning_rate": 0.00018925188358598813,
"loss": 0.6282,
"step": 10
},
{
"epoch": 1.5333333333333332,
"grad_norm": 0.16776053607463837,
"learning_rate": 0.00016951924276746425,
"loss": 0.5601,
"step": 15
},
{
"epoch": 2.0,
"grad_norm": 0.2320907711982727,
"learning_rate": 0.0001420934762428335,
"loss": 0.5471,
"step": 20
},
{
"epoch": 2.533333333333333,
"grad_norm": 0.15466105937957764,
"learning_rate": 0.00011000956916240985,
"loss": 0.4923,
"step": 25
},
{
"epoch": 3.0,
"grad_norm": 0.31745201349258423,
"learning_rate": 7.681798497324716e-05,
"loss": 0.4651,
"step": 30
},
{
"epoch": 3.533333333333333,
"grad_norm": 0.19490917026996613,
"learning_rate": 4.6191764683662744e-05,
"loss": 0.4367,
"step": 35
},
{
"epoch": 4.0,
"grad_norm": 0.3345278799533844,
"learning_rate": 2.1520061472133902e-05,
"loss": 0.4268,
"step": 40
},
{
"epoch": 4.533333333333333,
"grad_norm": 0.156062051653862,
"learning_rate": 5.533090839208133e-06,
"loss": 0.402,
"step": 45
},
{
"epoch": 5.0,
"grad_norm": 0.24310481548309326,
"learning_rate": 0.0,
"loss": 0.4084,
"step": 50
}
],
"logging_steps": 5,
"max_steps": 50,
"num_input_tokens_seen": 0,
"num_train_epochs": 6,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 2.5642849233567744e+16,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}