File size: 1,626 Bytes
8564004 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 |
import torch
import torch.nn as nn
class ResidualBlock(nn.Module):
def __init__(self, dim):
super().__init__()
self.ln = nn.LayerNorm(dim)
self.fc = nn.Sequential(
nn.Linear(dim, dim),
nn.GELU(),
nn.Linear(dim, dim)
)
def forward(self, x):
return x + self.fc(self.ln(x))
class SADIM_V2_Expert(nn.Module):
def __init__(self, input_dim=12, latent_dim=64):
super().__init__()
h = 3584
self.encoder = nn.Sequential(
nn.Linear(input_dim, h),
nn.LayerNorm(h),
nn.GELU(),
nn.Linear(h, h),
nn.LayerNorm(h),
nn.GELU(),
ResidualBlock(h)
)
self.fc_mu = nn.Linear(h, latent_dim)
self.fc_logvar = nn.Linear(h, latent_dim)
self.decoder = nn.Sequential(
nn.Linear(latent_dim, h),
nn.GELU(),
nn.Linear(h, h),
nn.LayerNorm(h),
nn.GELU(),
ResidualBlock(h),
nn.Linear(h, input_dim)
)
def forward(self, x):
h = self.encoder(x)
mu, logvar = self.fc_mu(h), self.fc_logvar(h)
logvar = torch.clamp(logvar, -10, 10)
std = torch.exp(0.5 * logvar)
z = mu + torch.randn_like(mu) * std
return self.decoder(z), mu, logvar
# --- مذكرة المعالجة المسبقة (Preprocessing Guide) ---
# للحصول على نتائج صحيحة غداً، استخدم هذه القسمة:
# ra, l: /360.0 | dec, b: /90.0 | d_pc: /10000.0
# pmra, pmdec: /500.0 | x, y, z: /15000.0
|