Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ModelZoo
ResNet50_tensorflow
Commits
9ac54b65
Commit
9ac54b65
authored
Aug 27, 2020
by
Hongkun Yu
Committed by
A. Unique TensorFlower
Aug 27, 2020
Browse files
Define the package pattern for keras-nlp
PiperOrigin-RevId: 328872353
parent
e3aa2762
Changes
5
Show whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
37 additions
and
15 deletions
+37
-15
official/nlp/keras_nlp/__init__.py
official/nlp/keras_nlp/__init__.py
+17
-0
official/nlp/keras_nlp/layers/__init__.py
official/nlp/keras_nlp/layers/__init__.py
+16
-0
official/nlp/keras_nlp/layers/transformer_encoder_block_test.py
...al/nlp/keras_nlp/layers/transformer_encoder_block_test.py
+0
-11
official/nlp/modeling/layers/transformer.py
official/nlp/modeling/layers/transformer.py
+2
-2
official/nlp/modeling/models/seq2seq_transformer.py
official/nlp/modeling/models/seq2seq_transformer.py
+2
-2
No files found.
official/nlp/keras_nlp/__init__.py
View file @
9ac54b65
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras-NLP package definition."""
# pylint: disable=wildcard-import
from
official.nlp.keras_nlp.layers
import
*
official/nlp/keras_nlp/layers/__init__.py
View file @
9ac54b65
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras-NLP layers package definition."""
from
official.nlp.keras_nlp.layers.transformer_encoder_block
import
TransformerEncoderBlock
official/nlp/keras_nlp/layers/transformer_encoder_block_test.py
View file @
9ac54b65
...
...
@@ -246,16 +246,5 @@ class TransformerArgumentTest(keras_parameterized.TestCase):
self
.
assertEqual
(
encoder_block_config
,
new_encoder_block
.
get_config
())
def
_create_cache
(
batch_size
,
init_decode_length
,
num_heads
,
head_size
):
return
{
'key'
:
tf
.
zeros
([
batch_size
,
init_decode_length
,
num_heads
,
head_size
],
dtype
=
tf
.
float32
),
'value'
:
tf
.
zeros
([
batch_size
,
init_decode_length
,
num_heads
,
head_size
],
dtype
=
tf
.
float32
)
}
if
__name__
==
'__main__'
:
tf
.
test
.
main
()
official/nlp/modeling/layers/transformer.py
View file @
9ac54b65
...
...
@@ -18,14 +18,14 @@
import
gin
import
tensorflow
as
tf
from
official.nlp
.keras_nlp.layers
import
transformer_encoder_block
from
official.nlp
import
keras_nlp
from
official.nlp.modeling.layers
import
attention
from
official.nlp.modeling.layers
import
multi_channel_attention
from
official.nlp.modeling.layers.util
import
tf_function_if_eager
@
tf
.
keras
.
utils
.
register_keras_serializable
(
package
=
"Text"
)
class
Transformer
(
transformer_encoder_block
.
TransformerEncoderBlock
):
class
Transformer
(
keras_nlp
.
TransformerEncoderBlock
):
"""Transformer layer.
This layer implements the Transformer from "Attention Is All You Need".
...
...
official/nlp/modeling/models/seq2seq_transformer.py
View file @
9ac54b65
...
...
@@ -20,7 +20,7 @@ import math
import
tensorflow
as
tf
from
official.modeling
import
tf_utils
from
official.nlp
.keras_nlp.layers
import
transformer_encoder_block
from
official.nlp
import
keras_nlp
from
official.nlp.modeling
import
layers
from
official.nlp.modeling.ops
import
beam_search
from
official.nlp.transformer
import
metrics
...
...
@@ -472,7 +472,7 @@ class TransformerEncoder(tf.keras.layers.Layer):
self
.
encoder_layers
=
[]
for
i
in
range
(
self
.
num_layers
):
self
.
encoder_layers
.
append
(
transformer_encoder_block
.
TransformerEncoderBlock
(
keras_nlp
.
TransformerEncoderBlock
(
num_attention_heads
=
self
.
num_attention_heads
,
inner_dim
=
self
.
_intermediate_size
,
inner_activation
=
self
.
_activation
,
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment