File size: 2,665 Bytes
340ccea fa13b6c 340ccea fa13b6c 340ccea fa13b6c 340ccea fa13b6c 340ccea fa13b6c 340ccea fa13b6c 340ccea fa13b6c 340ccea fa13b6c 340ccea fa13b6c 340ccea fa13b6c 340ccea fa13b6c 5f436de fa13b6c 340ccea fa13b6c 340ccea fa13b6c 340ccea | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 | import torch
import torch.nn as nn
class ResidualBlock(nn.Module):
def __init__(self, in_channels, out_channels, dropout_rate=0.2):
super(ResidualBlock, self).__init__()
self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1)
self.bn1 = nn.BatchNorm2d(out_channels)
self.dropout1 = nn.Dropout2d(p=dropout_rate)
self.conv2 = nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1)
self.bn2 = nn.BatchNorm2d(out_channels)
self.dropout2 = nn.Dropout2d(p=dropout_rate)
self.skip_connection = nn.Sequential()
if in_channels != out_channels:
self.skip_connection = nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=1, padding=0)
def forward(self, x):
residual = self.skip_connection(x)
out = nn.functional.relu(self.bn1(self.conv1(x)))
out = self.dropout1(out)
out = self.bn2(self.conv2(out))
out = self.dropout2(out)
out += residual
out = nn.functional.relu(out)
return out
class MyModel(nn.Module):
def __init__(self, num_classes=100, dropout_rate=0.2):
super(MyModel, self).__init__()
self.dropout_rate = dropout_rate
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3)
self.bn1 = nn.BatchNorm2d(64)
self.pool1 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.dropout1 = nn.Dropout2d(p=self.dropout_rate)
# Increase the number of residual blocks
self.block1 = self._resnet_layers(64, 128, num_blocks=4)
self.block2 = self._resnet_layers(128, 256, num_blocks=4)
self.block3 = self._resnet_layers(256, 512, num_blocks=4)
self.global_avg_pool = nn.AdaptiveAvgPool2d(1)
self.dropout2 = nn.Dropout(p=self.dropout_rate)
# Reduce the size of the fully connected layer
self.fc = nn.Linear(512, num_classes)
self.features = nn.Sequential(
self.conv1,
self.bn1,
nn.ReLU(),
self.pool1,
self.dropout1,
self.block1,
self.block2,
self.block3,
self.global_avg_pool,
self.dropout2
)
@staticmethod
def _resnet_layers(in_channels, out_channels, num_blocks):
return nn.Sequential(
ResidualBlock(in_channels, out_channels, dropout_rate=0.2),
*[ResidualBlock(out_channels, out_channels, dropout_rate=0.2) for _ in range(num_blocks)]
)
def forward(self, x):
x = self.features(x)
x = torch.flatten(x, 1)
x = self.fc(x)
return x
|