File size: 1,949 Bytes
78f9417 c9ee00c 78f9417 c9ee00c 78f9417 c9ee00c 78f9417 c9ee00c 78f9417 c9ee00c 78f9417 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 | import torch
import numpy as np
from torch import nn, einsum
from torch.nn import functional as F
from einops.layers.torch import Rearrange
from einops import rearrange, reduce
from math import ceil
from mamba import Mamba, MambaConfig
class FeedForward(nn.Module):
def __init__(self, dim, hidden_dim, dropout):
super().__init__()
self.net = nn.Sequential(
nn.Linear(dim, hidden_dim),
nn.GELU(),
nn.Dropout(dropout),
nn.Linear(hidden_dim, dim),
nn.Dropout(dropout)
)
def forward(self, x):
return self.net(x)
class MCGatingUnit(nn.Module):
def __init__(self,d_model,d_ffn,dropout):
super().__init__()
self.config = MambaConfig(d_model=d_model, n_layers=1)
self.COB_1 = Mamba(self.config)
self.COB_2 = Mamba(self.config)
def forward(self, x):
u, v = x, x
u = self.COB_1(u)
v = self.COB_2(v)
out = u * v
return out
class MCDPMAMBABlock(nn.Module):
def __init__(self, d_model, d_ffn,dropout):
super().__init__()
self.norm = nn.LayerNorm(d_model)
self.mcgu = MCGatingUnit(d_model,d_ffn,dropout)
self.ffn = FeedForward(d_model,d_ffn,dropout)
def forward(self, x):
residual = x
x = self.norm(x)
x = self.cobgu(x)
x = x + residual
residual = x
x = self.norm(x)
x = self.ffn(x)
out = x + residual
return out
class MCDPMAMBA(nn.Module):
def __init__(self, d_model, d_ffn, num_layers,dropout):
super().__init__()
self.model = nn.Sequential(
*[MCDPMAMBABlock(d_model,d_ffn,dropout) for _ in range(num_layers)],
)
def forward(self, x):
x = self.model(x)
return x
|