Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
58b59a0c
"test/git@developer.sourcefind.cn:hehl2/torchaudio.git" did not exist on "ceee6912a5417a692b4fe231c542e82ceddbe421"
Commit
58b59a0c
authored
Aug 31, 2019
by
LysandreJik
Browse files
Random seed is accessible anywhere within the common tests
parent
d7a4c325
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
5 additions
and
3 deletions
+5
-3
pytorch_transformers/tests/modeling_common_test.py
pytorch_transformers/tests/modeling_common_test.py
+5
-3
No files found.
pytorch_transformers/tests/modeling_common_test.py
View file @
58b59a0c
...
@@ -163,8 +163,9 @@ class CommonTestCases:
...
@@ -163,8 +163,9 @@ class CommonTestCases:
if
not
self
.
test_head_masking
:
if
not
self
.
test_head_masking
:
return
return
torch
.
manual_
seed
(
42
)
global_rng
.
seed
(
42
)
config
,
inputs_dict
=
self
.
model_tester
.
prepare_config_and_inputs_for_common
()
config
,
inputs_dict
=
self
.
model_tester
.
prepare_config_and_inputs_for_common
()
global_rng
.
seed
()
config
.
output_attentions
=
True
config
.
output_attentions
=
True
config
.
output_hidden_states
=
True
config
.
output_hidden_states
=
True
...
@@ -174,7 +175,7 @@ class CommonTestCases:
...
@@ -174,7 +175,7 @@ class CommonTestCases:
model
.
eval
()
model
.
eval
()
# Prepare head_mask
# Prepare head_mask
# Set require_grad after having prepared the tensor to avoid error (leaf variable has been moved into the graph interior)
# Set require_grad after having prepared the tensor to avoid error (leaf variable has been moved into the graph interior)
head_mask
=
torch
.
ones
(
self
.
model_tester
.
num_hidden_layers
,
self
.
model_tester
.
num_attention_heads
)
head_mask
=
torch
.
ones
(
self
.
model_tester
.
num_hidden_layers
,
self
.
model_tester
.
num_attention_heads
)
head_mask
[
0
,
0
]
=
0
head_mask
[
0
,
0
]
=
0
head_mask
[
-
1
,
:
-
1
]
=
0
head_mask
[
-
1
,
:
-
1
]
=
0
...
@@ -551,12 +552,13 @@ class ConfigTester(object):
...
@@ -551,12 +552,13 @@ class ConfigTester(object):
self
.
create_and_test_config_to_json_file
()
self
.
create_and_test_config_to_json_file
()
global_rng
=
random
.
Random
()
def
ids_tensor
(
shape
,
vocab_size
,
rng
=
None
,
name
=
None
):
def
ids_tensor
(
shape
,
vocab_size
,
rng
=
None
,
name
=
None
):
"""Creates a random int32 tensor of the shape within the vocab size."""
"""Creates a random int32 tensor of the shape within the vocab size."""
if
rng
is
None
:
if
rng
is
None
:
rng
=
random
.
Random
()
rng
=
global_rng
total_dims
=
1
total_dims
=
1
for
dim
in
shape
:
for
dim
in
shape
:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment