File size: 579 Bytes
5fed0fc |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 |
import torch
import torch.nn.functional as F
def linear_gelu(X: torch.Tensor, W: torch.Tensor, B: torch.Tensor) -> torch.Tensor:
"""
Baseline linear layer with GELU activation implementation using PyTorch.
Args:
X: Input tensor of shape (M, K) - input features (float16)
W: Weight tensor of shape (K, N) - weight matrix (float16)
B: Bias tensor of shape (N,) - bias vector (float32)
Returns:
Output tensor of shape (M, N) - output with GELU activation (float16)
"""
return F.gelu((X @ W) + B).to(torch.float16)
|