Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
20c10258
Unverified
Commit
20c10258
authored
Mar 09, 2021
by
Suraj Patil
Committed by
GitHub
Mar 09, 2021
Browse files
layerdrop 0 (#10604)
parent
95ab0677
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
6 additions
and
0 deletions
+6
-0
tests/test_modeling_m2m_100.py
tests/test_modeling_m2m_100.py
+6
-0
No files found.
tests/test_modeling_m2m_100.py
View file @
20c10258
...
...
@@ -71,6 +71,8 @@ class M2M100ModelTester:
hidden_act
=
"relu"
,
hidden_dropout_prob
=
0.1
,
attention_probs_dropout_prob
=
0.1
,
encoder_layerdrop
=
0.0
,
decoder_layerdrop
=
0.0
,
max_position_embeddings
=
20
,
eos_token_id
=
2
,
pad_token_id
=
1
,
...
...
@@ -89,6 +91,8 @@ class M2M100ModelTester:
self
.
hidden_act
=
hidden_act
self
.
hidden_dropout_prob
=
hidden_dropout_prob
self
.
attention_probs_dropout_prob
=
attention_probs_dropout_prob
self
.
encoder_layerdrop
=
encoder_layerdrop
self
.
decoder_layerdrop
=
decoder_layerdrop
self
.
max_position_embeddings
=
max_position_embeddings
self
.
eos_token_id
=
eos_token_id
self
.
pad_token_id
=
pad_token_id
...
...
@@ -120,6 +124,8 @@ class M2M100ModelTester:
decoder_ffn_dim
=
self
.
intermediate_size
,
dropout
=
self
.
hidden_dropout_prob
,
attention_dropout
=
self
.
attention_probs_dropout_prob
,
encoder_layerdrop
=
self
.
encoder_layerdrop
,
decoder_layerdrop
=
self
.
decoder_layerdrop
,
max_position_embeddings
=
self
.
max_position_embeddings
,
eos_token_id
=
self
.
eos_token_id
,
bos_token_id
=
self
.
bos_token_id
,
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment