File size: 1,090 Bytes
8aa674c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 |
import os
import torch
def save_checkpoint(model, model_ema, optimizer, scheduler, epoch, work_dir=None):
save_dir = os.path.join(work_dir, "checkpoint")
save_states = {
"epoch": epoch,
"state_dict": model.state_dict(),
"optimizer": optimizer.state_dict(),
"scheduler": scheduler.state_dict(),
}
if model_ema != None:
save_states.update({"state_dict_ema": model_ema.module.state_dict()})
if not os.path.exists(save_dir):
os.mkdir(save_dir)
checkpoint_path = os.path.join(save_dir, f"epoch_{epoch}.pth")
torch.save(save_states, checkpoint_path)
def save_best_checkpoint(model, model_ema, epoch, work_dir=None):
save_dir = os.path.join(work_dir, "checkpoint")
save_states = {"epoch": epoch, "state_dict": model.state_dict()}
if model_ema != None:
save_states.update({"state_dict_ema": model_ema.module.state_dict()})
if not os.path.exists(save_dir):
os.mkdir(save_dir)
checkpoint_path = os.path.join(save_dir, f"best.pth")
torch.save(save_states, checkpoint_path)
|