Spaces:
Running
Running
File size: 1,412 Bytes
522645f | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 | import torch
from torch import nn
class Conv2dBlock(nn.Module):
def __init__(self, in_c, out_c, ker_size=(3, 3), stride=1, padding=1, batch_norm=True, relu=True):
super(Conv2dBlock, self).__init__()
self.conv2d = nn.Conv2d(in_c, out_c, ker_size, stride, padding)
if batch_norm:
self.batch_norm = nn.BatchNorm2d(out_c, eps=1e-5, momentum=0.05, affine=True)
if relu:
self.relu = nn.ReLU(inplace=True)
def forward(self, x):
for module in self.children():
x = module(x)
return x
class DownSampleBlock(nn.Module):
def __init__(self, ker_size=(2, 2), stride=2, dilation=(1, 1), ceil_mode=False, p=0.0):
super(DownSampleBlock, self).__init__()
self.maxpooling = nn.MaxPool2d(kernel_size=ker_size, stride=stride, dilation=dilation, ceil_mode=ceil_mode)
if p:
self.dropout = nn.Dropout(p)
def forward(self, x):
for module in self.children():
x = module(x)
return x
class UpSampleBlock(nn.Module):
def __init__(self, scale_factor=(2, 2), mode="bilinear", p=0.0):
super(UpSampleBlock, self).__init__()
self.upsample = nn.Upsample(scale_factor=scale_factor, mode=mode)
if p:
self.dropout = nn.Dropout(p)
def forward(self, x):
for module in self.children():
x = module(x)
return x |