|
|
import copy |
|
|
import math |
|
|
import torch.nn.functional as F |
|
|
import torch.nn as nn |
|
|
from ..builder import MODELS |
|
|
|
|
|
|
|
|
@MODELS.register_module() |
|
|
class ConvModule(nn.Module): |
|
|
def __init__( |
|
|
self, |
|
|
in_channels, |
|
|
out_channels, |
|
|
kernel_size=1, |
|
|
stride=1, |
|
|
padding=0, |
|
|
conv_cfg=None, |
|
|
norm_cfg=None, |
|
|
act_cfg=None, |
|
|
): |
|
|
super().__init__() |
|
|
|
|
|
assert norm_cfg is None or isinstance(norm_cfg, dict) |
|
|
self.with_norm = norm_cfg is not None |
|
|
|
|
|
|
|
|
conv_cfg_base = dict( |
|
|
in_channels=in_channels, |
|
|
out_channels=out_channels, |
|
|
kernel_size=kernel_size, |
|
|
stride=stride, |
|
|
padding=padding, |
|
|
) |
|
|
|
|
|
if self.with_norm: |
|
|
conv_cfg_base["bias"] = False |
|
|
|
|
|
assert conv_cfg is None or isinstance(conv_cfg, dict) |
|
|
if conv_cfg is not None: |
|
|
conv_cfg_base.update(conv_cfg) |
|
|
|
|
|
|
|
|
self.conv = nn.Conv1d(**conv_cfg_base) |
|
|
|
|
|
|
|
|
if self.with_norm: |
|
|
norm_cfg = copy.copy(norm_cfg) |
|
|
norm_type = norm_cfg["type"] |
|
|
norm_cfg.pop("type") |
|
|
self.norm_type = norm_type |
|
|
|
|
|
if norm_type == "BN": |
|
|
self.norm = nn.BatchNorm1d(num_features=out_channels, **norm_cfg) |
|
|
elif norm_type == "GN": |
|
|
self.norm = nn.GroupNorm(num_channels=out_channels, **norm_cfg) |
|
|
elif norm_type == "LN": |
|
|
self.norm = nn.LayerNorm(out_channels) |
|
|
|
|
|
|
|
|
assert act_cfg is None or isinstance(act_cfg, dict) |
|
|
self.with_act = act_cfg is not None |
|
|
|
|
|
if self.with_act: |
|
|
act_cfg = copy.copy(act_cfg) |
|
|
act_type = act_cfg["type"] |
|
|
act_cfg.pop("type") |
|
|
|
|
|
if act_type == "relu": |
|
|
self.act = nn.ReLU(inplace=True, **act_cfg) |
|
|
else: |
|
|
self.act = eval(act_type)(**act_cfg) |
|
|
|
|
|
|
|
|
self.apply(self.__init_weights__) |
|
|
|
|
|
def __init_weights__(self, module): |
|
|
if isinstance(module, nn.Conv1d): |
|
|
|
|
|
nn.init.kaiming_uniform_(module.weight, a=math.sqrt(5)) |
|
|
|
|
|
if module.bias is not None: |
|
|
nn.init.constant_(module.bias, 0.0) |
|
|
elif isinstance(module, (nn.BatchNorm1d, nn.GroupNorm, nn.LayerNorm)): |
|
|
nn.init.constant_(module.weight, 1.0) |
|
|
nn.init.constant_(module.bias, 0.0) |
|
|
|
|
|
def forward(self, x, mask=None): |
|
|
x = self.conv(x) |
|
|
|
|
|
if mask is not None: |
|
|
if mask.shape[-1] != x.shape[-1]: |
|
|
mask = ( |
|
|
F.interpolate(mask.unsqueeze(1).to(x.dtype), size=x.size(-1), mode="nearest") |
|
|
.squeeze(1) |
|
|
.to(mask.dtype) |
|
|
) |
|
|
x = x * mask.unsqueeze(1).float().detach() |
|
|
|
|
|
if self.with_norm: |
|
|
if self.norm_type == "LN": |
|
|
x = self.norm(x.permute(0, 2, 1)).permute(0, 2, 1) |
|
|
else: |
|
|
x = self.norm(x) |
|
|
|
|
|
if self.with_act: |
|
|
x = self.act(x) |
|
|
|
|
|
if mask is not None: |
|
|
x = x * mask.unsqueeze(1).float().detach() |
|
|
return x, mask |
|
|
else: |
|
|
return x |
|
|
|