[docs]classSiLU(nn.Module):"""Implements the SiLU activation from `"Gaussian Error Linear Units (GELUs)" <https://arxiv.org/pdf/1606.08415.pdf>`_ (also known as Swish). This activation is computed as follows: .. math:: f(x) = x \\cdot \\sigma(x) """defforward(self,x):return_SiLU.apply(x)
[docs]classMish(nn.Module):"""Implements the Mish activation module from `"Mish: A Self Regularized Non-Monotonic Neural Activation Function" <https://arxiv.org/pdf/1908.08681.pdf>`_ This activation is computed as follows: .. math:: f(x) = x \\cdot \\tanh(ln(1 + e^x)) """defforward(self,input):returnF.mish(input)
[docs]classNLReLU(_Activation):"""Implements the Natural-Logarithm ReLU activation module from `"Natural-Logarithm-Rectified Activation Function in Convolutional Neural Networks" <https://arxiv.org/pdf/1908.03682.pdf>`_ This activation is computed as follows: .. math:: f(x) = ln(1 + \\beta \\cdot max(0, x)) Args: inplace (bool): should the operation be performed inplace """defforward(self,input):returnF.nl_relu(input,inplace=self.inplace)