Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
ae333d04
Unverified
Commit
ae333d04
authored
Dec 30, 2020
by
Stas Bekman
Committed by
GitHub
Dec 30, 2020
Browse files
torch.cuda.is_available() is redundant as apex handles that internally (#9350)
parent
8217d4e3
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
15 additions
and
19 deletions
+15
-19
src/transformers/models/bart/modeling_bart.py
src/transformers/models/bart/modeling_bart.py
+5
-6
src/transformers/models/fsmt/modeling_fsmt.py
src/transformers/models/fsmt/modeling_fsmt.py
+5
-7
src/transformers/models/prophetnet/modeling_prophetnet.py
src/transformers/models/prophetnet/modeling_prophetnet.py
+5
-6
No files found.
src/transformers/models/bart/modeling_bart.py
View file @
ae333d04
...
@@ -110,13 +110,12 @@ def _expand_mask(mask: torch.Tensor, dtype: torch.dtype, tgt_len: Optional[int]
...
@@ -110,13 +110,12 @@ def _expand_mask(mask: torch.Tensor, dtype: torch.dtype, tgt_len: Optional[int]
def
BartLayerNorm
(
normalized_shape
:
torch
.
Size
,
eps
:
float
=
1e-5
,
elementwise_affine
:
bool
=
True
):
def
BartLayerNorm
(
normalized_shape
:
torch
.
Size
,
eps
:
float
=
1e-5
,
elementwise_affine
:
bool
=
True
):
if
torch
.
cuda
.
is_available
():
try
:
try
:
from
apex.normalization
import
FusedLayerNorm
from
apex.normalization
import
FusedLayerNorm
return
FusedLayerNorm
(
normalized_shape
,
eps
,
elementwise_affine
)
return
FusedLayerNorm
(
normalized_shape
,
eps
,
elementwise_affine
)
except
ImportError
:
except
ImportError
:
pass
pass
return
torch
.
nn
.
LayerNorm
(
normalized_shape
,
eps
,
elementwise_affine
)
return
torch
.
nn
.
LayerNorm
(
normalized_shape
,
eps
,
elementwise_affine
)
...
...
src/transformers/models/fsmt/modeling_fsmt.py
View file @
ae333d04
...
@@ -265,14 +265,12 @@ FSMT_INPUTS_DOCSTRING = r"""
...
@@ -265,14 +265,12 @@ FSMT_INPUTS_DOCSTRING = r"""
have_fused_layer_norm
=
False
have_fused_layer_norm
=
False
if
torch
.
cuda
.
is_available
():
try
:
try
:
from
apex.normalization
import
FusedLayerNorm
from
apex.normalization
import
FusedLayerNorm
have_fused_layer_norm
=
True
except
ImportError
:
pass
have_fused_layer_norm
=
True
except
ImportError
:
pass
LayerNorm
=
FusedLayerNorm
if
have_fused_layer_norm
else
torch
.
nn
.
LayerNorm
LayerNorm
=
FusedLayerNorm
if
have_fused_layer_norm
else
torch
.
nn
.
LayerNorm
...
...
src/transformers/models/prophetnet/modeling_prophetnet.py
View file @
ae333d04
...
@@ -511,13 +511,12 @@ class ProphetNetDecoderLMOutput(ModelOutput):
...
@@ -511,13 +511,12 @@ class ProphetNetDecoderLMOutput(ModelOutput):
def
ProphetNetLayerNorm
(
normalized_shape
,
eps
=
1e-5
,
elementwise_affine
=
True
):
def
ProphetNetLayerNorm
(
normalized_shape
,
eps
=
1e-5
,
elementwise_affine
=
True
):
if
torch
.
cuda
.
is_available
():
try
:
try
:
from
apex.normalization
import
FusedLayerNorm
from
apex.normalization
import
FusedLayerNorm
return
FusedLayerNorm
(
normalized_shape
,
eps
,
elementwise_affine
)
return
FusedLayerNorm
(
normalized_shape
,
eps
,
elementwise_affine
)
except
ImportError
:
except
ImportError
:
pass
pass
return
torch
.
nn
.
LayerNorm
(
normalized_shape
,
eps
,
elementwise_affine
)
return
torch
.
nn
.
LayerNorm
(
normalized_shape
,
eps
,
elementwise_affine
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment