File size: 1,951 Bytes
f3d3ebc |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 |
# coding: utf-8
# In[2]:
# import nbimporter
# In[3]:
import numpy as np
from torch import nn
from torch import autograd
import torch
import os
import pdb
# In[ ]:
class Concat_embed(nn.Module):
def __init__(self, embed_dim, projected_embed_dim):
super(Concat_embed, self).__init__()
self.projection = nn.Sequential(
nn.Linear(in_features=embed_dim, out_features=projected_embed_dim),
nn.BatchNorm1d(num_features=projected_embed_dim),
nn.LeakyReLU(negative_slope=0.2, inplace=True)
)
def forward(self, inp, embed):
projected_embed = self.projection(embed)
replicated_embed = projected_embed.repeat(4, 4, 1, 1).permute(2, 3, 0, 1)
hidden_concat = torch.cat([inp, replicated_embed], 1)
return hidden_concat
class Utils(object):
@staticmethod
def smooth_label(tensor, offset):
return tensor + offset
@staticmethod
def save_checkpoint(netD, netG, dir_path, subdir_path, epoch):
path = os.path.join(dir_path, subdir_path)
if not os.path.exists(path):
os.makedirs(path)
torch.save(netD.state_dict(), '{0}/disc_{1}.pth'.format(path, epoch))
torch.save(netG.state_dict(), '{0}/gen_{1}.pth'.format(path, epoch))
@staticmethod
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
m.weight.data.normal_(0.0, 0.02)
elif classname.find('BatchNorm') != -1:
m.weight.data.normal_(1.0, 0.02)
m.bias.data.fill_(0)
class Logger(object):
def log_iteration_gan(self, epoch, iteration, d_loss, g_loss, real_score, fake_score):
print("Epoch: %d, Iter: %d, d_loss= %f, g_loss= %f, D(X)= %f, D(G(X))= %f" % (
epoch, iteration, d_loss.data.cpu().mean(), g_loss.data.cpu().mean(), real_score.data.cpu().mean(),
fake_score.data.cpu().mean())) |