Xsmos commited on
Commit
3a8ee62
·
verified ·
1 Parent(s): a1fae17
Files changed (2) hide show
  1. .diffusion.py.swp +0 -0
  2. diffusion.py +2 -2
.diffusion.py.swp CHANGED
Binary files a/.diffusion.py.swp and b/.diffusion.py.swp differ
 
diffusion.py CHANGED
@@ -601,8 +601,8 @@ class DDPM21CM:
601
  self.config.logger.add_scalar("learning_rate", logs["lr"], global_step = global_step)
602
  global_step += 1
603
 
604
- if (i+i) % self.config.gradient_accumulation_steps != 0:
605
- print(f"(i+1)%self.config.gradient_accumulation_steps = {(i+1)%self.config.gradient_accumulation_steps}, i = {i}, scg = {self.config.gradient_accumulation_steps}".center(240,'-'))
606
  torch.nn.utils.clip_grad_norm_(self.nn_model.parameters(), max_norm=1.0)
607
  self.optimizer.step()
608
  self.lr_scheduler.step()
 
601
  self.config.logger.add_scalar("learning_rate", logs["lr"], global_step = global_step)
602
  global_step += 1
603
 
604
+ if (i+1) % self.config.gradient_accumulation_steps != 0:
605
+ print(f"(i+1)%self.config.gradient_accumulation_steps = {(i+1)%self.config.gradient_accumulation_steps}, i = {i}, scg = {self.config.gradient_accumulation_steps}".center(120,'-'))
606
  torch.nn.utils.clip_grad_norm_(self.nn_model.parameters(), max_norm=1.0)
607
  self.optimizer.step()
608
  self.lr_scheduler.step()