File size: 1,664 Bytes
9205b56 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 | import torch
import torch.nn as nn
class Autoencoder(nn.Module):
def __init__(self, encoder_hidden_dims, decoder_hidden_dims):
super(Autoencoder, self).__init__()
encoder_layers = []
for i in range(len(encoder_hidden_dims)):
if i == 0:
encoder_layers.append(nn.Linear(512, encoder_hidden_dims[i]))
else:
encoder_layers.append(torch.nn.BatchNorm1d(encoder_hidden_dims[i-1]))
encoder_layers.append(nn.ReLU())
encoder_layers.append(nn.Linear(encoder_hidden_dims[i-1], encoder_hidden_dims[i]))
self.encoder = nn.ModuleList(encoder_layers)
decoder_layers = []
for i in range(len(decoder_hidden_dims)):
if i == 0:
decoder_layers.append(nn.Linear(encoder_hidden_dims[-1], decoder_hidden_dims[i]))
else:
decoder_layers.append(nn.ReLU())
decoder_layers.append(nn.Linear(decoder_hidden_dims[i-1], decoder_hidden_dims[i]))
self.decoder = nn.ModuleList(decoder_layers)
print(self.encoder, self.decoder)
def forward(self, x):
for m in self.encoder:
x = m(x)
x = x / x.norm(dim=-1, keepdim=True)
for m in self.decoder:
x = m(x)
x = x / x.norm(dim=-1, keepdim=True)
return x
def encode(self, x):
for m in self.encoder:
x = m(x)
x = x / x.norm(dim=-1, keepdim=True)
return x
def decode(self, x):
for m in self.decoder:
x = m(x)
x = x / x.norm(dim=-1, keepdim=True)
return x
|