Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ModelZoo
ResNet50_tensorflow
Commits
f8c2a917
Commit
f8c2a917
authored
Jul 27, 2020
by
Hongkun Yu
Committed by
A. Unique TensorFlower
Jul 27, 2020
Browse files
Migrate to tf gelu. Will remove activations/gelu.py after dependencies are cleaned up.
PiperOrigin-RevId: 323499265
parent
b8014d55
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
3 additions
and
29 deletions
+3
-29
official/modeling/activations/gelu.py
official/modeling/activations/gelu.py
+1
-9
official/nlp/xlnet/xlnet_modeling.py
official/nlp/xlnet/xlnet_modeling.py
+2
-20
No files found.
official/modeling/activations/gelu.py
View file @
f8c2a917
...
@@ -14,12 +14,6 @@
...
@@ -14,12 +14,6 @@
# ==============================================================================
# ==============================================================================
"""Gaussian error linear unit."""
"""Gaussian error linear unit."""
from
__future__
import
absolute_import
from
__future__
import
division
from
__future__
import
print_function
import
math
import
tensorflow
as
tf
import
tensorflow
as
tf
...
@@ -35,6 +29,4 @@ def gelu(x):
...
@@ -35,6 +29,4 @@ def gelu(x):
Returns:
Returns:
`x` with the GELU activation applied.
`x` with the GELU activation applied.
"""
"""
cdf
=
0.5
*
(
1.0
+
tf
.
tanh
(
return
tf
.
keras
.
activations
.
gelu
(
x
,
approximate
=
True
)
(
math
.
sqrt
(
2
/
math
.
pi
)
*
(
x
+
0.044715
*
tf
.
pow
(
x
,
3
)))))
return
x
*
cdf
official/nlp/xlnet/xlnet_modeling.py
View file @
f8c2a917
...
@@ -14,32 +14,14 @@
...
@@ -14,32 +14,14 @@
# ==============================================================================
# ==============================================================================
"""Keras layers of XLNet model in TF 2.0."""
"""Keras layers of XLNet model in TF 2.0."""
from
__future__
import
absolute_import
from
__future__
import
division
# from __future__ import google_type_annotations
from
__future__
import
print_function
import
copy
import
copy
import
numpy
as
np
import
functools
import
tensorflow
as
tf
import
tensorflow
as
tf
from
official.nlp.xlnet
import
data_utils
from
official.nlp.xlnet
import
data_utils
def
gelu
(
x
):
gelu
=
functools
.
partial
(
tf
.
keras
.
activations
.
gelu
,
approximate
=
True
)
"""Gaussian Error Linear Unit.
This is a smoother version of the RELU.
Original paper: https://arxiv.org/abs/1606.08415
Args:
x: float Tensor to perform activation.
Returns:
`x` with the GELU activation applied.
"""
cdf
=
0.5
*
(
1.0
+
tf
.
tanh
(
(
np
.
sqrt
(
2
/
np
.
pi
)
*
(
x
+
0.044715
*
tf
.
pow
(
x
,
3
)))))
return
x
*
cdf
def
rel_shift
(
x
,
klen
=-
1
):
def
rel_shift
(
x
,
klen
=-
1
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment