Unverified Commit 1dea291a authored by Santiago Castro's avatar Santiago Castro Committed by GitHub
Browse files

Remove unnecessary use of FusedLayerNorm in XLNet

parent f3e0218f
...@@ -188,11 +188,8 @@ def swish(x): ...@@ -188,11 +188,8 @@ def swish(x):
ACT2FN = {"gelu": gelu, "relu": torch.nn.functional.relu, "swish": swish} ACT2FN = {"gelu": gelu, "relu": torch.nn.functional.relu, "swish": swish}
try: XLNetLayerNorm = nn.LayerNorm
from apex.normalization.fused_layer_norm import FusedLayerNorm as XLNetLayerNorm
except (ImportError, AttributeError) as e:
logger.info("Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex .")
from torch.nn import LayerNorm as XLNetLayerNorm
class XLNetRelativeAttention(nn.Module): class XLNetRelativeAttention(nn.Module):
def __init__(self, config): def __init__(self, config):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment