File size: 1,118 Bytes
8b4ec8a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
"""

Script based on:

Wang, Xueliang, Honge Ren, and Achuan Wang.

 "Smish: A Novel Activation Function for Deep Learning Methods.

 " Electronics 11.4 (2022): 540.

smish(x) = x * tanh(softplus(x)) = x * tanh(ln(1 + sigmoid(x)))

"""

# import pytorch
import torch
import torch.nn.functional as F
from torch import nn

# import activation functions
import TEED.utils.AF.Fsmish as Func


class Smish(nn.Module):
    """

    Applies the mish function element-wise:

    mish(x) = x * tanh(softplus(x)) = x * tanh(ln(1 + exp(x)))

    Shape:

        - Input: (N, *) where * means, any number of additional

          dimensions

        - Output: (N, *), same shape as the input

    Examples:

        >>> m = Mish()

        >>> input = torch.randn(2)

        >>> output = m(input)

    Reference: https://pytorch.org/docs/stable/generated/torch.nn.Mish.html

    """

    def __init__(self):
        """

        Init method.

        """
        super().__init__()

    def forward(self, input):
        """

        Forward pass of the function.

        """
        return Func.smish(input)