Spaces:
Sleeping
Sleeping
| import torch | |
| import torch.nn as nn | |
| import torch.nn.functional as F | |
| from torch.nn.utils import weight_norm | |
| import math | |
| class DataEmbedding_inverted(nn.Module): | |
| def __init__(self, c_in, d_model, dropout=0.1): | |
| super(DataEmbedding_inverted, self).__init__() | |
| self.value_embedding = nn.Linear(c_in, d_model) | |
| self.dropout = nn.Dropout(p=dropout) | |
| def forward(self, x, x_mark): | |
| x = x.permute(0, 2, 1) | |
| # x: [Batch Variate Time] | |
| if x_mark is None: | |
| x = self.value_embedding(x) | |
| else: | |
| x = self.value_embedding(torch.cat([x, x_mark.permute(0, 2, 1)], 1)) | |
| # x: [Batch Variate d_model] | |
| return self.dropout(x) | |