Spaces:
Sleeping
Sleeping
File size: 2,767 Bytes
8f72b1f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 |
from torch import nn
import torch
class UpsamplingLayer(nn.Module):
def __init__(self, in_channels, out_channels, leaky=True):
super(UpsamplingLayer, self).__init__()
self.layer = nn.Sequential(
nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1),
nn.LeakyReLU() if leaky else nn.ReLU(),
nn.UpsamplingBilinear2d(scale_factor=2)
)
def forward(self, x):
return self.layer(x)
class DensityMapRegressor(nn.Module):
def __init__(self, in_channels, reduction):
super(DensityMapRegressor, self).__init__()
if reduction == 8:
self.regressor = nn.Sequential(
UpsamplingLayer(in_channels, 128),
UpsamplingLayer(128, 64),
UpsamplingLayer(64, 32),
nn.Conv2d(32, 1, kernel_size=1),
nn.LeakyReLU()
)
elif reduction == 16:
self.regressor = nn.Sequential(
UpsamplingLayer(in_channels, 128),
UpsamplingLayer(128, 64),
UpsamplingLayer(64, 32),
UpsamplingLayer(32, 16),
nn.Conv2d(16, 1, kernel_size=1),
nn.LeakyReLU()
)
self.reset_parameters()
def forward(self, x):
return self.regressor(x)
def reset_parameters(self):
for module in self.modules():
if isinstance(module, nn.Conv2d):
nn.init.normal_(module.weight, std=0.01)
if module.bias is not None:
nn.init.constant_(module.bias, 0)
class DensityMapRegressor_(nn.Module):
def __init__(self, in_channels, reduction):
super(DensityMapRegressor, self).__init__()
if reduction == 8:
self.regressor = nn.Sequential(
UpsamplingLayer(in_channels, 128),
UpsamplingLayer(128, 64),
UpsamplingLayer(64, 32),
nn.Conv2d(32, 1, kernel_size=1),
nn.LeakyReLU()
)
elif reduction == 16:
self.regressor = nn.Sequential(
UpsamplingLayer(in_channels, 128),
UpsamplingLayer(128, 64),
UpsamplingLayer(64, 32),
UpsamplingLayer(32, 16),
nn.Conv2d(16, 1, kernel_size=1),
nn.LeakyReLU()
)
self.reset_parameters()
def forward(self, x):
return self.regressor(x)
def reset_parameters(self):
for module in self.modules():
if isinstance(module, nn.Conv2d):
nn.init.normal_(module.weight, std=0.01)
if module.bias is not None:
nn.init.constant_(module.bias, 0)
|