| class ConvBlock(nn.Module): | |
| def __init__(self, in_ch, out_ch, dropout=0.1): | |
| super().__init__() | |
| self.conv = nn.Sequential( | |
| nn.Conv2d(in_ch, out_ch, 3, padding=1), | |
| nn.BatchNorm2d(out_ch), | |
| nn.ReLU(inplace=True), | |
| nn.Conv2d(out_ch, out_ch, 3, padding=1), | |
| nn.BatchNorm2d(out_ch), | |
| nn.ReLU(inplace=True), | |
| nn.Dropout2d(dropout) | |
| ) | |
| def forward(self, x): | |
| return self.conv(x) | |
| class ImprovedUNet(nn.Module): | |
| def __init__(self): | |
| super().__init__() | |
| self.enc1 = ConvBlock(1, 64, dropout=0.1) | |
| self.enc2 = ConvBlock(64, 128, dropout=0.1) | |
| self.enc3 = ConvBlock(128, 256, dropout=0.2) | |
| self.enc4 = ConvBlock(256, 512, dropout=0.2) | |
| self.pool = nn.MaxPool2d(2) | |
| self.bottleneck = ConvBlock(512, 1024, dropout=0.3) | |
| self.up4 = nn.ConvTranspose2d(1024, 512, 2, stride=2) | |
| self.dec4 = ConvBlock(1024, 512, dropout=0.2) | |
| self.up3 = nn.ConvTranspose2d(512, 256, 2, stride=2) | |
| self.dec3 = ConvBlock(512, 256, dropout=0.2) | |
| self.up2 = nn.ConvTranspose2d(256, 128, 2, stride=2) | |
| self.dec2 = ConvBlock(256, 128, dropout=0.1) | |
| self.up1 = nn.ConvTranspose2d(128, 64, 2, stride=2) | |
| self.dec1 = ConvBlock(128, 64, dropout=0.1) | |
| self.out_conv = nn.Conv2d(64, 3, 1) | |
| self.out_act = nn.Tanh() | |
| def forward(self, x): | |
| e1 = self.enc1(x) | |
| e2 = self.enc2(self.pool(e1)) | |
| e3 = self.enc3(self.pool(e2)) | |
| e4 = self.enc4(self.pool(e3)) | |
| b = self.bottleneck(self.pool(e4)) | |
| d4 = self.up4(b) | |
| d4 = torch.cat([d4, e4], dim=1) | |
| d4 = self.dec4(d4) | |
| d3 = self.up3(d4) | |
| d3 = torch.cat([d3, e3], dim=1) | |
| d3 = self.dec3(d3) | |
| d2 = self.up2(d3) | |
| d2 = torch.cat([d2, e2], dim=1) | |
| d2 = self.dec2(d2) | |
| d1 = self.up1(d2) | |
| d1 = torch.cat([d1, e1], dim=1) | |
| d1 = self.dec1(d1) | |
| out = self.out_conv(d1) | |
| return self.out_act(out) | |