| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| import torch |
| import torch.nn as nn |
| import torch.nn.functional as F |
| import math |
|
|
| def gelu_forward_0(x): |
| return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0))) |
|
|
| def gelu_forward_1(x): |
| return 0.5 * x * (1.0 + torch.tanh(math.sqrt(2.0 / math.pi) * (x + 0.044715 * torch.pow(x, 3.0)))) |
|
|
| class Model(nn.Module): |
| def __init__(self): |
| super(Model, self).__init__() |
|
|
| def forward(self, x, y, z, w): |
| x = F.gelu(x) |
| y = F.gelu(y) |
| z = gelu_forward_0(z) |
| w = gelu_forward_1(w) |
| return x, y, z, w |
|
|
| def test(): |
| net = Model() |
| net.eval() |
|
|
| torch.manual_seed(0) |
| x = torch.rand(1, 16) |
| y = torch.rand(12, 2, 16) |
| z = torch.rand(1, 3, 12, 16) |
| w = torch.rand(1, 5, 7, 9, 11) |
|
|
| a = net(x, y, z, w) |
|
|
| |
| mod = torch.jit.trace(net, (x, y, z, w)) |
| mod.save("test_F_gelu.pt") |
|
|
| |
| import os |
| os.system("../src/pnnx test_F_gelu.pt inputshape=[1,16],[12,2,16],[1,3,12,16],[1,5,7,9,11]") |
|
|
| |
| import test_F_gelu_pnnx |
| b = test_F_gelu_pnnx.test_inference() |
|
|
| for a0, b0 in zip(a, b): |
| if not torch.allclose(a0, b0, 1e-4, 1e-4): |
| return False |
| return True |
|
|
| if __name__ == "__main__": |
| if test(): |
| exit(0) |
| else: |
| exit(1) |
|
|