Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ModelZoo
ResNet50_tensorflow
Commits
abd510a6
Commit
abd510a6
authored
Jan 16, 2020
by
Hongkun Yu
Committed by
A. Unique TensorFlower
Jan 16, 2020
Browse files
Adds deprecation warning for old bert code and deprecated pattern: pack_inputs/unpack_inputs
PiperOrigin-RevId: 290111892
parent
f5e6e291
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
29 additions
and
7 deletions
+29
-7
official/modeling/tf_utils.py
official/modeling/tf_utils.py
+13
-0
official/nlp/bert_modeling.py
official/nlp/bert_modeling.py
+16
-7
No files found.
official/modeling/tf_utils.py
View file @
abd510a6
...
...
@@ -21,9 +21,16 @@ from __future__ import print_function
import
six
import
tensorflow
as
tf
from
tensorflow.python.util
import
deprecation
from
official.modeling
import
activations
@
deprecation
.
deprecated
(
None
,
"tf.keras.layers.Layer supports multiple positional args and kwargs as "
"input tensors. pack/unpack inputs to override __call__ is no longer "
"needed."
)
def
pack_inputs
(
inputs
):
"""Pack a list of `inputs` tensors to a tuple.
...
...
@@ -44,6 +51,12 @@ def pack_inputs(inputs):
return
tuple
(
outputs
)
@
deprecation
.
deprecated
(
None
,
"tf.keras.layers.Layer supports multiple positional args and kwargs as "
"input tensors. pack/unpack inputs to override __call__ is no longer "
"needed."
)
def
unpack_inputs
(
inputs
):
"""unpack a tuple of `inputs` tensors to a tuple.
...
...
official/nlp/bert_modeling.py
View file @
abd510a6
...
...
@@ -24,6 +24,7 @@ import math
import
six
import
tensorflow
as
tf
from
tensorflow.python.util
import
deprecation
from
official.modeling
import
tf_utils
...
...
@@ -145,6 +146,7 @@ class AlbertConfig(BertConfig):
return
config
@
deprecation
.
deprecated
(
None
,
"The function should not be used any more."
)
def
get_bert_model
(
input_word_ids
,
input_mask
,
input_type_ids
,
...
...
@@ -183,6 +185,8 @@ class BertModel(tf.keras.layers.Layer):
```
"""
@
deprecation
.
deprecated
(
None
,
"Please use `nlp.modeling.networks.TransformerEncoder` instead."
)
def
__init__
(
self
,
config
,
float_type
=
tf
.
float32
,
**
kwargs
):
super
(
BertModel
,
self
).
__init__
(
**
kwargs
)
self
.
config
=
(
...
...
@@ -240,6 +244,7 @@ class BertModel(tf.keras.layers.Layer):
Args:
inputs: packed input tensors.
mode: string, `bert` or `encoder`.
Returns:
Output tensor of the last layer for BERT training (mode=`bert`) which
is a float Tensor of shape [batch_size, seq_length, hidden_size] or
...
...
@@ -358,8 +363,8 @@ class EmbeddingPostprocessor(tf.keras.layers.Layer):
self
.
output_layer_norm
=
tf
.
keras
.
layers
.
LayerNormalization
(
name
=
"layer_norm"
,
axis
=-
1
,
epsilon
=
1e-12
,
dtype
=
tf
.
float32
)
self
.
output_dropout
=
tf
.
keras
.
layers
.
Dropout
(
rate
=
self
.
dropout_prob
,
dtype
=
tf
.
float32
)
self
.
output_dropout
=
tf
.
keras
.
layers
.
Dropout
(
rate
=
self
.
dropout_prob
,
dtype
=
tf
.
float32
)
super
(
EmbeddingPostprocessor
,
self
).
build
(
input_shapes
)
def
__call__
(
self
,
word_embeddings
,
token_type_ids
=
None
,
**
kwargs
):
...
...
@@ -546,8 +551,8 @@ class Dense3D(tf.keras.layers.Layer):
use_bias: A bool, whether the layer uses a bias.
output_projection: A bool, whether the Dense3D layer is used for output
linear projection.
backward_compatible: A bool, whether the variables shape are compatible
with
checkpoints converted from TF 1.x.
backward_compatible: A bool, whether the variables shape are compatible
with
checkpoints converted from TF 1.x.
"""
def
__init__
(
self
,
...
...
@@ -647,7 +652,8 @@ class Dense3D(tf.keras.layers.Layer):
"""
if
self
.
backward_compatible
:
kernel
=
tf
.
keras
.
backend
.
reshape
(
self
.
kernel
,
self
.
kernel_shape
)
bias
=
(
tf
.
keras
.
backend
.
reshape
(
self
.
bias
,
self
.
bias_shape
)
bias
=
(
tf
.
keras
.
backend
.
reshape
(
self
.
bias
,
self
.
bias_shape
)
if
self
.
use_bias
else
None
)
else
:
kernel
=
self
.
kernel
...
...
@@ -784,7 +790,9 @@ class TransformerBlock(tf.keras.layers.Layer):
rate
=
self
.
hidden_dropout_prob
)
self
.
attention_layer_norm
=
(
tf
.
keras
.
layers
.
LayerNormalization
(
name
=
"self_attention_layer_norm"
,
axis
=-
1
,
epsilon
=
1e-12
,
name
=
"self_attention_layer_norm"
,
axis
=-
1
,
epsilon
=
1e-12
,
# We do layer norm in float32 for numeric stability.
dtype
=
tf
.
float32
))
self
.
intermediate_dense
=
Dense2DProjection
(
...
...
@@ -909,6 +917,7 @@ class Transformer(tf.keras.layers.Layer):
inputs: packed inputs.
return_all_layers: bool, whether to return outputs of all layers inside
encoders.
Returns:
Output tensor of the last layer or a list of output tensors.
"""
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment