File size: 1,086 Bytes
c668e80 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 |
""" Attention and normalization modules """
from onmt.modules.util_class import Elementwise
from onmt.modules.gate import context_gate_factory, ContextGate
from onmt.modules.global_attention import GlobalAttention
from onmt.modules.conv_multi_step_attention import ConvMultiStepAttention
from onmt.modules.copy_generator import CopyGenerator, CopyGeneratorLoss
from onmt.modules.multi_headed_attn import MultiHeadedAttention
from onmt.modules.embeddings import Embeddings, PositionalEncoding
from onmt.modules.weight_norm import WeightNormConv2d
from onmt.modules.average_attn import AverageAttention
from onmt.modules.alibi_position_bias import AlibiPositionalBias
from onmt.modules.rmsnorm import RMSNorm
__all__ = [
"Elementwise",
"context_gate_factory",
"ContextGate",
"GlobalAttention",
"ConvMultiStepAttention",
"CopyGenerator",
"CopyGeneratorLoss",
"CopyGeneratorLMLossCompute",
"MultiHeadedAttention",
"Embeddings",
"PositionalEncoding",
"AlibiPositionalBias",
"WeightNormConv2d",
"AverageAttention",
"RMSNorm",
]
|