File size: 573 Bytes
858e8b2 8a58ffe | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 | """Model architecture module — LLaMA-style Decoder-Only Transformer."""
from .norm import RMSNorm
from .rope import RotaryPositionalEmbedding
from .attention import GroupedQueryAttention
from .feedforward import SwiGLUFeedForward
from .transformer_block import TransformerBlock
from .llm_model import LLMModel
from .utils import count_parameters_detailed, estimate_memory_gb
__all__ = [
"RMSNorm", "RotaryPositionalEmbedding", "GroupedQueryAttention",
"SwiGLUFeedForward", "TransformerBlock", "LLMModel",
"count_parameters_detailed", "estimate_memory_gb",
]
|