"...resnet50_tensorflow.git" did not exist on "d3d2ad3d71eb091c7dfab746c700b5af1ebb58b5"
Unverified Commit 49912168 authored by Chi's avatar Chi Committed by GitHub
Browse files

Removed the redundant SiLUActivation class. (#27136)

* Removed the redundant SiLUActivation class and now use nn.functional.silu directly.

* I apologize for adding torch.functional.silu. I have replaced it with nn.SiLU.
parent 00d8502b
...@@ -137,19 +137,6 @@ class AccurateGELUActivation(nn.Module): ...@@ -137,19 +137,6 @@ class AccurateGELUActivation(nn.Module):
return 0.5 * input * (1 + torch.tanh(self.precomputed_constant * (input + 0.044715 * torch.pow(input, 3)))) return 0.5 * input * (1 + torch.tanh(self.precomputed_constant * (input + 0.044715 * torch.pow(input, 3))))
class SiLUActivation(nn.Module):
"""
See Gaussian Error Linear Units (Hendrycks et al., https://arxiv.org/abs/1606.08415) where the SiLU (Sigmoid Linear
Unit) was originally introduced and coined, and see Sigmoid-Weighted Linear Units for Neural Network Function
Approximation in Reinforcement Learning (Elfwing et al., https://arxiv.org/abs/1702.03118) and Swish: a Self-Gated
Activation Function (Ramachandran et al., https://arxiv.org/abs/1710.05941v1) where the SiLU was experimented with
later.
"""
def forward(self, input: Tensor) -> Tensor:
return nn.functional.silu(input)
class MishActivation(nn.Module): class MishActivation(nn.Module):
""" """
See Mish: A Self-Regularized Non-Monotonic Activation Function (Misra., https://arxiv.org/abs/1908.08681). Also See Mish: A Self-Regularized Non-Monotonic Activation Function (Misra., https://arxiv.org/abs/1908.08681). Also
...@@ -226,8 +213,8 @@ ACT2CLS = { ...@@ -226,8 +213,8 @@ ACT2CLS = {
"relu2": ReLUSquaredActivation, "relu2": ReLUSquaredActivation,
"relu6": nn.ReLU6, "relu6": nn.ReLU6,
"sigmoid": nn.Sigmoid, "sigmoid": nn.Sigmoid,
"silu": SiLUActivation, "silu": nn.SiLU,
"swish": SiLUActivation, "swish": nn.SiLU,
"tanh": nn.Tanh, "tanh": nn.Tanh,
} }
ACT2FN = ClassInstantier(ACT2CLS) ACT2FN = ClassInstantier(ACT2CLS)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment