Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ModelZoo
ResNet50_tensorflow
Commits
befa593d
"git@developer.sourcefind.cn:modelzoo/resnet50_tensorflow.git" did not exist on "188536e7edd01ecb13e7cd6c118eeae6b1c38ae7"
Commit
befa593d
authored
Aug 09, 2020
by
xinliupitt
Browse files
indent
parent
7e4837c2
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
18 additions
and
18 deletions
+18
-18
official/nlp/modeling/models/seq2seq_transformer.py
official/nlp/modeling/models/seq2seq_transformer.py
+18
-18
No files found.
official/nlp/modeling/models/seq2seq_transformer.py
View file @
befa593d
...
@@ -433,23 +433,23 @@ class TransformerEncoder(tf.keras.layers.Layer):
...
@@ -433,23 +433,23 @@ class TransformerEncoder(tf.keras.layers.Layer):
"num_layers"
:
"num_layers"
:
self
.
_num_layers
,
self
.
_num_layers
,
"num_attention_heads"
:
"num_attention_heads"
:
self
.
_num_attention_heads
,
self
.
_num_attention_heads
,
"intermediate_size"
:
"intermediate_size"
:
self
.
_intermediate_size
,
self
.
_intermediate_size
,
"activation"
:
"activation"
:
self
.
_activation
,
self
.
_activation
,
"dropout_rate"
:
"dropout_rate"
:
self
.
_dropout_rate
,
self
.
_dropout_rate
,
"attention_dropout_rate"
:
"attention_dropout_rate"
:
self
.
_attention_dropout_rate
,
self
.
_attention_dropout_rate
,
"use_bias"
:
"use_bias"
:
self
.
_use_bias
,
self
.
_use_bias
,
"norm_first"
:
"norm_first"
:
self
.
_norm_first
,
self
.
_norm_first
,
"norm_epsilon"
:
"norm_epsilon"
:
self
.
_norm_epsilon
,
self
.
_norm_epsilon
,
"intermediate_dropout"
:
"intermediate_dropout"
:
self
.
_intermediate_dropout
self
.
_intermediate_dropout
}
}
def
call
(
self
,
def
call
(
self
,
...
@@ -548,23 +548,23 @@ class TransformerDecoder(tf.keras.layers.Layer):
...
@@ -548,23 +548,23 @@ class TransformerDecoder(tf.keras.layers.Layer):
"num_layers"
:
"num_layers"
:
self
.
_num_layers
,
self
.
_num_layers
,
"num_attention_heads"
:
"num_attention_heads"
:
self
.
_num_attention_heads
,
self
.
_num_attention_heads
,
"intermediate_size"
:
"intermediate_size"
:
self
.
_intermediate_size
,
self
.
_intermediate_size
,
"activation"
:
"activation"
:
self
.
_activation
,
self
.
_activation
,
"dropout_rate"
:
"dropout_rate"
:
self
.
_dropout_rate
,
self
.
_dropout_rate
,
"attention_dropout_rate"
:
"attention_dropout_rate"
:
self
.
_attention_dropout_rate
,
self
.
_attention_dropout_rate
,
"use_bias"
:
"use_bias"
:
self
.
_use_bias
,
self
.
_use_bias
,
"norm_first"
:
"norm_first"
:
self
.
_norm_first
,
self
.
_norm_first
,
"norm_epsilon"
:
"norm_epsilon"
:
self
.
_norm_epsilon
,
self
.
_norm_epsilon
,
"intermediate_dropout"
:
"intermediate_dropout"
:
self
.
_intermediate_dropout
self
.
_intermediate_dropout
}
}
def
call
(
self
,
def
call
(
self
,
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment