"...git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "1ad1c4a864a385f6cebd5a450c95e50934a2c25c"
Unverified Commit edb17023 authored by amyeroberts's avatar amyeroberts Committed by GitHub
Browse files

SiLU activation wrapper for safe importing (#28509)

Add back in wrapper for safe importing
parent ff86bc36
......@@ -13,6 +13,7 @@
# limitations under the License.
import math
import warnings
from collections import OrderedDict
import torch
......@@ -137,6 +138,14 @@ class AccurateGELUActivation(nn.Module):
return 0.5 * input * (1 + torch.tanh(self.precomputed_constant * (input + 0.044715 * torch.pow(input, 3))))
class SiLUActivation(nn.SiLU):
def __init__(self, *args, **kwargs):
warnings.warn(
"The SiLUActivation class has been deprecated and will be removed in v4.39. Please use nn.SiLU instead.",
)
super().__init__(*args, **kwargs)
class MishActivation(nn.Module):
"""
See Mish: A Self-Regularized Non-Monotonic Activation Function (Misra., https://arxiv.org/abs/1908.08681). Also
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment