File size: 1,982 Bytes
197d4ca |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 |
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
class mapping(nn.Module):
def __init__(self, input_dim=1024, hidden_dim = 512, out_dim=1024, layernum=4):
'''
'''
super().__init__()
self.layernum = layernum
if layernum == 4:
self.fc1 = nn.Linear(input_dim, hidden_dim)
self.fc2 = nn.Linear(hidden_dim, hidden_dim)
self.fc3 = nn.Linear(hidden_dim, hidden_dim)
self.fc4 = nn.Linear(hidden_dim, out_dim)
elif layernum == 2:
self.fc1 = nn.Linear(input_dim, hidden_dim)
self.fc2 = nn.Linear(hidden_dim, out_dim)
self.relu = nn.ReLU(inplace=True)
def forward(self, x):
''' x '''
if self.layernum == 4:
x = self.relu(self.fc1(x))
x = self.relu(self.fc2(x))
x = self.relu(self.fc3(x))
x = self.fc4(x)
elif self.layernum == 2:
x = self.relu(self.fc1(x))
x = self.fc2(x)
return x
class effect_to_weight(nn.Module):
def __init__(self, input_dim = 512, hidden_dim = 256, out_dim = 1, layernum=2, hidden_dim2 = 128):
'''
'''
super().__init__()
self.layernum = layernum
if layernum == 2:
self.fc1 = nn.Linear(input_dim, hidden_dim)
self.fc2 = nn.Linear(hidden_dim, out_dim)
elif layernum == 3:
self.fc1 = nn.Linear(input_dim, hidden_dim)
self.fc2 = nn.Linear(hidden_dim, hidden_dim2)
self.fc3 = nn.Linear(hidden_dim2, out_dim)
self.relu = nn.ReLU(inplace=True)
def forward(self, x):
''' x '''
if self.layernum == 2:
x = self.relu(self.fc1(x))
x = self.fc2(x)
else:
x = self.relu(self.fc1(x))
x = self.relu(self.fc2(x))
x = self.fc3(x)
return x
|