|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import os |
|
|
import requests |
|
|
from tqdm import tqdm |
|
|
import zipfile |
|
|
import torch.utils.model_zoo as modelzoo |
|
|
import torch.nn.functional as F |
|
|
import torch |
|
|
import torch.nn as nn |
|
|
|
|
|
__all__ = [ |
|
|
"ResNet", |
|
|
"resnet18", |
|
|
"resnet34", |
|
|
"resnet50", |
|
|
] |
|
|
weights_downloaded = False |
|
|
|
|
|
|
|
|
|
|
|
def conv3x3(in_planes, out_planes, stride=1, groups=1, dilation=1): |
|
|
"""3x3 convolution with padding""" |
|
|
return nn.Conv2d( |
|
|
in_planes, |
|
|
out_planes, |
|
|
kernel_size=3, |
|
|
stride=stride, |
|
|
padding=dilation, |
|
|
groups=groups, |
|
|
bias=False, |
|
|
dilation=dilation, |
|
|
) |
|
|
|
|
|
|
|
|
def conv1x1(in_planes, out_planes, stride=1): |
|
|
"""1x1 convolution""" |
|
|
return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False) |
|
|
|
|
|
|
|
|
class BasicBlock(nn.Module): |
|
|
expansion = 1 |
|
|
|
|
|
def __init__( |
|
|
self, |
|
|
inplanes, |
|
|
planes, |
|
|
stride=1, |
|
|
downsample=None, |
|
|
groups=1, |
|
|
base_width=64, |
|
|
dilation=1, |
|
|
norm_layer=None, |
|
|
): |
|
|
super(BasicBlock, self).__init__() |
|
|
if norm_layer is None: |
|
|
norm_layer = nn.BatchNorm2d |
|
|
if groups != 1 or base_width != 64: |
|
|
raise ValueError("BasicBlock only supports groups=1 and base_width=64") |
|
|
if dilation > 1: |
|
|
raise NotImplementedError("Dilation > 1 not supported in BasicBlock") |
|
|
|
|
|
self.conv1 = conv3x3(inplanes, planes, stride) |
|
|
self.bn1 = norm_layer(planes) |
|
|
self.relu = nn.ReLU(inplace=True) |
|
|
self.conv2 = conv3x3(planes, planes) |
|
|
self.bn2 = norm_layer(planes) |
|
|
self.downsample = downsample |
|
|
self.stride = stride |
|
|
|
|
|
def forward(self, x): |
|
|
identity = x |
|
|
|
|
|
out = self.conv1(x) |
|
|
out = self.bn1(out) |
|
|
out = self.relu(out) |
|
|
|
|
|
out = self.conv2(out) |
|
|
out = self.bn2(out) |
|
|
|
|
|
if self.downsample is not None: |
|
|
identity = self.downsample(x) |
|
|
|
|
|
out += identity |
|
|
out = self.relu(out) |
|
|
|
|
|
return out |
|
|
|
|
|
|
|
|
class Bottleneck(nn.Module): |
|
|
expansion = 4 |
|
|
|
|
|
def __init__( |
|
|
self, |
|
|
inplanes, |
|
|
planes, |
|
|
stride=1, |
|
|
downsample=None, |
|
|
groups=1, |
|
|
base_width=64, |
|
|
dilation=1, |
|
|
norm_layer=None, |
|
|
): |
|
|
super(Bottleneck, self).__init__() |
|
|
if norm_layer is None: |
|
|
norm_layer = nn.BatchNorm2d |
|
|
width = int(planes * (base_width / 64.0)) * groups |
|
|
|
|
|
self.conv1 = conv1x1(inplanes, width) |
|
|
self.bn1 = norm_layer(width) |
|
|
self.conv2 = conv3x3(width, width, stride, groups, dilation) |
|
|
self.bn2 = norm_layer(width) |
|
|
self.conv3 = conv1x1(width, planes * self.expansion) |
|
|
self.bn3 = norm_layer(planes * self.expansion) |
|
|
self.relu = nn.ReLU(inplace=True) |
|
|
self.downsample = downsample |
|
|
self.stride = stride |
|
|
|
|
|
def forward(self, x): |
|
|
identity = x |
|
|
|
|
|
out = self.conv1(x) |
|
|
out = self.bn1(out) |
|
|
out = self.relu(out) |
|
|
|
|
|
out = self.conv2(out) |
|
|
out = self.bn2(out) |
|
|
out = self.relu(out) |
|
|
|
|
|
out = self.conv3(out) |
|
|
out = self.bn3(out) |
|
|
|
|
|
if self.downsample is not None: |
|
|
identity = self.downsample(x) |
|
|
|
|
|
out += identity |
|
|
out = self.relu(out) |
|
|
|
|
|
return out |
|
|
|
|
|
|
|
|
class ResNet(nn.Module): |
|
|
def __init__( |
|
|
self, |
|
|
block, |
|
|
layers, |
|
|
num_classes=10, |
|
|
zero_init_residual=False, |
|
|
groups=1, |
|
|
width_per_group=64, |
|
|
replace_stride_with_dilation=None, |
|
|
norm_layer=None, |
|
|
): |
|
|
super(ResNet, self).__init__() |
|
|
if norm_layer is None: |
|
|
norm_layer = nn.BatchNorm2d |
|
|
self._norm_layer = norm_layer |
|
|
|
|
|
self.inplanes = 64 |
|
|
self.dilation = 1 |
|
|
if replace_stride_with_dilation is None: |
|
|
|
|
|
|
|
|
replace_stride_with_dilation = [False, False, False] |
|
|
if len(replace_stride_with_dilation) != 3: |
|
|
raise ValueError( |
|
|
"replace_stride_with_dilation should be None " |
|
|
"or a 3-element tuple, got {}".format(replace_stride_with_dilation) |
|
|
) |
|
|
self.groups = groups |
|
|
self.base_width = width_per_group |
|
|
|
|
|
|
|
|
self.conv1 = nn.Conv2d(3, self.inplanes, kernel_size=3, stride=1, padding=1, bias=False) |
|
|
|
|
|
|
|
|
self.bn1 = norm_layer(self.inplanes) |
|
|
self.relu = nn.ReLU(inplace=True) |
|
|
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) |
|
|
self.layer1 = self._make_layer(block, 64, layers[0]) |
|
|
self.layer2 = self._make_layer( |
|
|
block, 128, layers[1], stride=2, dilate=replace_stride_with_dilation[0] |
|
|
) |
|
|
self.layer3 = self._make_layer( |
|
|
block, 256, layers[2], stride=2, dilate=replace_stride_with_dilation[1] |
|
|
) |
|
|
self.layer4 = self._make_layer( |
|
|
block, 512, layers[3], stride=2, dilate=replace_stride_with_dilation[2] |
|
|
) |
|
|
self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) |
|
|
self.fc = nn.Linear(512 * block.expansion, num_classes) |
|
|
|
|
|
for m in self.modules(): |
|
|
if isinstance(m, nn.Conv2d): |
|
|
nn.init.kaiming_normal_(m.weight, mode="fan_out", nonlinearity="relu") |
|
|
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): |
|
|
nn.init.constant_(m.weight, 1) |
|
|
nn.init.constant_(m.bias, 0) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if zero_init_residual: |
|
|
for m in self.modules(): |
|
|
if isinstance(m, Bottleneck): |
|
|
nn.init.constant_(m.bn3.weight, 0) |
|
|
elif isinstance(m, BasicBlock): |
|
|
nn.init.constant_(m.bn2.weight, 0) |
|
|
|
|
|
def _make_layer(self, block, planes, blocks, stride=1, dilate=False): |
|
|
norm_layer = self._norm_layer |
|
|
downsample = None |
|
|
previous_dilation = self.dilation |
|
|
if dilate: |
|
|
self.dilation *= stride |
|
|
stride = 1 |
|
|
if stride != 1 or self.inplanes != planes * block.expansion: |
|
|
downsample = nn.Sequential( |
|
|
conv1x1(self.inplanes, planes * block.expansion, stride), |
|
|
norm_layer(planes * block.expansion), |
|
|
) |
|
|
|
|
|
layers = [] |
|
|
layers.append( |
|
|
block( |
|
|
self.inplanes, |
|
|
planes, |
|
|
stride, |
|
|
downsample, |
|
|
self.groups, |
|
|
self.base_width, |
|
|
previous_dilation, |
|
|
norm_layer, |
|
|
) |
|
|
) |
|
|
self.inplanes = planes * block.expansion |
|
|
for _ in range(1, blocks): |
|
|
layers.append( |
|
|
block( |
|
|
self.inplanes, |
|
|
planes, |
|
|
groups=self.groups, |
|
|
base_width=self.base_width, |
|
|
dilation=self.dilation, |
|
|
norm_layer=norm_layer, |
|
|
) |
|
|
) |
|
|
|
|
|
return nn.Sequential(*layers) |
|
|
|
|
|
def forward(self, x): |
|
|
x = self.conv1(x) |
|
|
x = self.bn1(x) |
|
|
x = self.relu(x) |
|
|
x = self.maxpool(x) |
|
|
|
|
|
x = self.layer1(x) |
|
|
x = self.layer2(x) |
|
|
x = self.layer3(x) |
|
|
x = self.layer4(x) |
|
|
|
|
|
x = self.avgpool(x) |
|
|
x = x.reshape(x.size(0), -1) |
|
|
x = self.fc(x) |
|
|
|
|
|
return x |
|
|
|
|
|
def _resnet(arch, block, layers, pretrained, progress, device, **kwargs): |
|
|
global weights_downloaded |
|
|
model = ResNet(block, layers, **kwargs) |
|
|
if pretrained: |
|
|
if not weights_downloaded: |
|
|
download_weights() |
|
|
weights_downloaded = True |
|
|
|
|
|
script_dir = os.path.dirname(__file__) |
|
|
state_dict_path = os.path.join(script_dir, "../../cifar10_models/state_dicts", arch + ".pt") |
|
|
if os.path.isfile(state_dict_path): |
|
|
state_dict = torch.load(state_dict_path, map_location=device) |
|
|
model.load_state_dict(state_dict) |
|
|
else: |
|
|
raise FileNotFoundError(f"No such file or directory: '{state_dict_path}'") |
|
|
return model |
|
|
|
|
|
|
|
|
def resnet18(pretrained=False, progress=True, device="cpu", **kwargs): |
|
|
"""Constructs a ResNet-18 model. |
|
|
Args: |
|
|
pretrained (bool): If True, returns a model pre-trained on ImageNet |
|
|
progress (bool): If True, displays a progress bar of the download to stderr |
|
|
""" |
|
|
return _resnet("resnet18", BasicBlock, [2, 2, 2, 2], pretrained, progress, device, **kwargs) |
|
|
|
|
|
|
|
|
def resnet34(pretrained=False, progress=True, device="cpu", **kwargs): |
|
|
"""Constructs a ResNet-34 model. |
|
|
Args: |
|
|
pretrained (bool): If True, returns a model pre-trained on ImageNet |
|
|
progress (bool): If True, displays a progress bar of the download to stderr |
|
|
""" |
|
|
return _resnet("resnet34", BasicBlock, [3, 4, 6, 3], pretrained, progress, device, **kwargs) |
|
|
|
|
|
|
|
|
def resnet50(pretrained=False, progress=True, device="cpu", **kwargs): |
|
|
"""Constructs a ResNet-50 model. |
|
|
Args: |
|
|
pretrained (bool): If True, returns a model pre-trained on ImageNet |
|
|
progress (bool): If True, displays a progress bar of the download to stderr |
|
|
""" |
|
|
return _resnet("resnet50", Bottleneck, [3, 4, 6, 3], pretrained, progress, device, **kwargs) |
|
|
|
|
|
|
|
|
def download_weights(): |
|
|
|
|
|
script_dir = os.path.dirname(__file__) |
|
|
state_dicts_dir = os.path.join(script_dir, "cifar10_models") |
|
|
|
|
|
if os.path.isdir(state_dicts_dir) and len(os.listdir(state_dicts_dir)) > 0: |
|
|
print("Weights already downloaded. Skipping download.") |
|
|
return |
|
|
|
|
|
url = "https://rutgers.box.com/shared/static/gkw08ecs797j2et1ksmbg1w5t3idf5r5.zip" |
|
|
|
|
|
|
|
|
r = requests.get(url, stream=True) |
|
|
|
|
|
|
|
|
total_size = int(r.headers.get("content-length", 0)) |
|
|
block_size = 2**20 |
|
|
t = tqdm(total=total_size, unit="MiB", unit_scale=True) |
|
|
|
|
|
with open("state_dicts.zip", "wb") as f: |
|
|
for data in r.iter_content(block_size): |
|
|
t.update(len(data)) |
|
|
f.write(data) |
|
|
t.close() |
|
|
|
|
|
if total_size != 0 and t.n != total_size: |
|
|
raise Exception("Error, something went wrong") |
|
|
|
|
|
print("Download successful. Unzipping file...") |
|
|
path_to_zip_file = os.path.join(os.getcwd(), "state_dicts.zip") |
|
|
directory_to_extract_to = os.path.join(os.getcwd(), "cifar10_models") |
|
|
|
|
|
with zipfile.ZipFile(path_to_zip_file, "r") as zip_ref: |
|
|
zip_ref.extractall(directory_to_extract_to) |
|
|
print("Unzip file successful!") |
|
|
|
|
|
|
|
|
|
|
|
class ResBlock2D(nn.Module): |
|
|
def __init__(self, n_c, kernel=3, dilation=1, p_drop=0.15): |
|
|
super(ResBlock2D, self).__init__() |
|
|
padding = self._get_same_padding(kernel, dilation) |
|
|
|
|
|
layer_s = list() |
|
|
layer_s.append(nn.Conv2d(n_c, n_c, kernel, padding=padding, dilation=dilation, bias=False)) |
|
|
layer_s.append(nn.InstanceNorm2d(n_c, affine=True, eps=1e-6)) |
|
|
layer_s.append(nn.ELU(inplace=True)) |
|
|
|
|
|
layer_s.append(nn.Dropout(p_drop)) |
|
|
|
|
|
layer_s.append(nn.Conv2d(n_c, n_c, kernel, dilation=dilation, padding=padding, bias=False)) |
|
|
layer_s.append(nn.InstanceNorm2d(n_c, affine=True, eps=1e-6)) |
|
|
self.layer = nn.Sequential(*layer_s) |
|
|
self.final_activation = nn.ELU(inplace=True) |
|
|
|
|
|
def _get_same_padding(self, kernel, dilation): |
|
|
return (kernel + (kernel - 1) * (dilation - 1) - 1) // 2 |
|
|
|
|
|
def forward(self, x): |
|
|
out = self.layer(x) |
|
|
return self.final_activation(x + out) |
|
|
|
|
|
|
|
|
def create_layer_basic(in_chan, out_chan, bnum, stride=1): |
|
|
layers = [BasicBlock(in_chan, out_chan, stride=stride)] |
|
|
for i in range(bnum-1): |
|
|
layers.append(BasicBlock(out_chan, out_chan, stride=1)) |
|
|
return nn.Sequential(*layers) |
|
|
|
|
|
|
|
|
resnet18_url = 'https://download.pytorch.org/models/resnet18-5c106cde.pth' |
|
|
class ResNet18(nn.Module): |
|
|
def __init__(self): |
|
|
super(ResNet18, self).__init__() |
|
|
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, |
|
|
bias=False) |
|
|
self.bn1 = nn.BatchNorm2d(64) |
|
|
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) |
|
|
self.layer1 = create_layer_basic(64, 64, bnum=2, stride=1) |
|
|
self.layer2 = create_layer_basic(64, 128, bnum=2, stride=2) |
|
|
self.layer3 = create_layer_basic(128, 256, bnum=2, stride=2) |
|
|
self.layer4 = create_layer_basic(256, 512, bnum=2, stride=2) |
|
|
self.init_weight() |
|
|
|
|
|
def forward(self, x): |
|
|
x = self.conv1(x) |
|
|
x = F.relu(self.bn1(x)) |
|
|
x = self.maxpool(x) |
|
|
|
|
|
x = self.layer1(x) |
|
|
feat8 = self.layer2(x) |
|
|
feat16 = self.layer3(feat8) |
|
|
feat32 = self.layer4(feat16) |
|
|
return feat8, feat16, feat32 |
|
|
|
|
|
def init_weight(self): |
|
|
state_dict = modelzoo.load_url(resnet18_url) |
|
|
|
|
|
self_state_dict = self.state_dict() |
|
|
for k, v in state_dict.items(): |
|
|
if 'fc' in k: continue |
|
|
self_state_dict.update({k: v}) |
|
|
self.load_state_dict(self_state_dict) |
|
|
|
|
|
def get_params(self): |
|
|
wd_params, nowd_params = [], [] |
|
|
for name, module in self.named_modules(): |
|
|
if isinstance(module, (nn.Linear, nn.Conv2d)): |
|
|
wd_params.append(module.weight) |
|
|
if not module.bias is None: |
|
|
nowd_params.append(module.bias) |
|
|
elif isinstance(module, nn.BatchNorm2d): |
|
|
nowd_params += list(module.parameters()) |
|
|
return wd_params, nowd_params |
|
|
|
|
|
|
|
|
|
|
|
|