Spaces:
Runtime error
Runtime error
File size: 2,638 Bytes
d250771 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 |
import torch
import torch.nn as nn
import torch.nn.functional as F
class ResidualBlock(nn.Module):
def __init__(self, in_channels, out_channels):
super(ResidualBlock, self).__init__()
self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=1, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(out_channels)
self.relu = nn.ReLU()
self.conv2 = nn.Conv2d(out_channels, out_channels, kernel_size=3, stride=1, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(out_channels)
def forward(self, x):
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
return out
dropout_value = 0.01
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
# Prep Layer
self.convblock01 = nn.Sequential(
nn.Conv2d(in_channels=3, out_channels=64, kernel_size=(3, 3), padding=1, bias=False),
nn.ReLU(),
nn.BatchNorm2d(64),
nn.Dropout(dropout_value))
# Layer 1
self.convblock11 = nn.Sequential(
nn.Conv2d(in_channels=64, out_channels=128, kernel_size=(3, 3), padding=1, bias=False),
nn.MaxPool2d((2,2)),
nn.BatchNorm2d(128),
nn.ReLU(),
nn.Dropout(dropout_value)
)
self.residual11 = ResidualBlock(in_channels = 128, out_channels = 128)
# Layer 2
self.convblock21 = nn.Sequential(
nn.Conv2d(in_channels=128, out_channels=256, kernel_size=(3, 3), padding=1, bias=False),
nn.MaxPool2d((2,2)),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.Dropout(dropout_value)
)
# Layer 3
self.convblock31 = nn.Sequential(
nn.Conv2d(in_channels=256, out_channels=512, kernel_size=(3, 3), padding=1, bias=False),
nn.MaxPool2d((2,2)),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Dropout(dropout_value)
)
self.residual31 = ResidualBlock(in_channels = 512, out_channels = 512)
self.pool = nn.MaxPool2d((4,4))
## Fully Connected Layer
self.fc = nn.Linear(512, 10)
def forward(self, x):
x1 = self.convblock01(x)
x2 = self.convblock11(x1)
x3 = x2 + self.residual11(x2)
x4 = self.convblock21(x3)
x5 = self.convblock31(x4)
x6 = x5 + self.residual31(x5)
x = self.pool(x6)
x = x.view(-1, 512)
x = self.fc(x)
return x |