Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ModelZoo
ResNet50_tensorflow
Commits
8028eee4
"...resnet50_tensorflow.git" did not exist on "966da7c299c0c1d997e792c20ef410ef35d968ec"
Commit
8028eee4
authored
Aug 13, 2020
by
xinliupitt
Browse files
pylint
parent
f0e2f833
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
17 additions
and
21 deletions
+17
-21
official/nlp/modeling/models/seq2seq_transformer.py
official/nlp/modeling/models/seq2seq_transformer.py
+16
-16
official/nlp/modeling/models/seq2seq_transformer_test.py
official/nlp/modeling/models/seq2seq_transformer_test.py
+1
-5
No files found.
official/nlp/modeling/models/seq2seq_transformer.py
View file @
8028eee4
...
...
@@ -53,22 +53,22 @@ def create_model(params, is_train):
decoder_layer
=
TransformerDecoder
(
**
encdec_kwargs
)
model_kwargs
=
dict
(
vocab_size
=
params
[
"vocab_size"
],
hidden_size
=
params
[
"hidden_size"
],
dropout_rate
=
params
[
"layer_postprocess_dropout"
],
padded_decode
=
params
[
"padded_decode"
],
num_replicas
=
params
[
"num_replicas"
],
decode_batch_size
=
params
[
"decode_batch_size"
],
decode_max_length
=
params
[
"decode_max_length"
],
dtype
=
params
[
"dtype"
],
extra_decode_length
=
params
[
"extra_decode_length"
],
num_heads
=
params
[
"num_heads"
],
num_layers
=
params
[
"num_hidden_layers"
],
beam_size
=
params
[
"beam_size"
],
alpha
=
params
[
"alpha"
],
encoder_layer
=
encoder_layer
,
decoder_layer
=
decoder_layer
,
name
=
"transformer_v2"
)
vocab_size
=
params
[
"vocab_size"
],
hidden_size
=
params
[
"hidden_size"
],
dropout_rate
=
params
[
"layer_postprocess_dropout"
],
padded_decode
=
params
[
"padded_decode"
],
num_replicas
=
params
[
"num_replicas"
],
decode_batch_size
=
params
[
"decode_batch_size"
],
decode_max_length
=
params
[
"decode_max_length"
],
dtype
=
params
[
"dtype"
],
extra_decode_length
=
params
[
"extra_decode_length"
],
num_heads
=
params
[
"num_heads"
],
num_layers
=
params
[
"num_hidden_layers"
],
beam_size
=
params
[
"beam_size"
],
alpha
=
params
[
"alpha"
],
encoder_layer
=
encoder_layer
,
decoder_layer
=
decoder_layer
,
name
=
"transformer_v2"
)
with
tf
.
name_scope
(
"model"
):
if
is_train
:
...
...
official/nlp/modeling/models/seq2seq_transformer_test.py
View file @
8028eee4
# Copyright 20
18
The TensorFlow Authors. All Rights Reserved.
# Copyright 20
20
The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
...
...
@@ -14,10 +14,6 @@
# ==============================================================================
"""Test Transformer model."""
from
__future__
import
absolute_import
from
__future__
import
division
from
__future__
import
print_function
import
tensorflow
as
tf
from
official.nlp.modeling.models
import
seq2seq_transformer
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment