Spaces:
Runtime error
Runtime error
| """ Embeddings module """ | |
| import math | |
| import warnings | |
| import torch | |
| import torch.nn as nn | |
| class PositionalEncoding(nn.Module): | |
| """Sinusoidal positional encoding for non-recurrent neural networks. | |
| Implementation based on "Attention Is All You Need" | |
| :cite:`DBLP:journals/corr/VaswaniSPUJGKP17` | |
| Args: | |
| dropout (float): dropout parameter | |
| dim (int): embedding size | |
| """ | |
| def __init__(self, dropout, dim, max_len=5000): | |
| if dim % 2 != 0: | |
| raise ValueError("Cannot use sin/cos positional encoding with " | |
| "odd dim (got dim={:d})".format(dim)) | |
| pe = torch.zeros(max_len, dim) | |
| position = torch.arange(0, max_len).unsqueeze(1) | |
| div_term = torch.exp((torch.arange(0, dim, 2, dtype=torch.float) * | |
| -(math.log(10000.0) / dim))) | |
| pe[:, 0::2] = torch.sin(position.float() * div_term) | |
| pe[:, 1::2] = torch.cos(position.float() * div_term) | |
| pe = pe.unsqueeze(1) | |
| super(PositionalEncoding, self).__init__() | |
| self.register_buffer('pe', pe) | |
| self.dropout = nn.Dropout(p=dropout) | |
| self.dim = dim | |
| def forward(self, emb, step=None): | |
| """Embed inputs. | |
| Args: | |
| emb (FloatTensor): Sequence of word vectors | |
| ``(seq_len, batch_size, self.dim)`` | |
| step (int or NoneType): If stepwise (``seq_len = 1``), use | |
| the encoding for this position. | |
| """ | |
| emb = emb * math.sqrt(self.dim) | |
| if step is None: | |
| emb = emb + self.pe[:emb.size(0)] | |
| else: | |
| emb = emb + self.pe[step] | |
| emb = self.dropout(emb) | |
| return emb | |