"vscode:/vscode.git/clone" did not exist on "9f0468addca7c7050f552c0d9848ecd5901e4fd7"
Commit 7e3aedfa authored by Hongkun Yu's avatar Hongkun Yu Committed by A. Unique TensorFlower
Browse files

Mark the subclass Transformer as deprecated.

Don't export the CompiledTransformer layer which may be deleted soon.

PiperOrigin-RevId: 478918916
parent 3c4cf26a
...@@ -60,7 +60,8 @@ from official.nlp.modeling.layers.text_layers import BertTokenizer ...@@ -60,7 +60,8 @@ from official.nlp.modeling.layers.text_layers import BertTokenizer
from official.nlp.modeling.layers.text_layers import FastWordpieceBertTokenizer from official.nlp.modeling.layers.text_layers import FastWordpieceBertTokenizer
from official.nlp.modeling.layers.text_layers import SentencepieceTokenizer from official.nlp.modeling.layers.text_layers import SentencepieceTokenizer
from official.nlp.modeling.layers.tn_transformer_expand_condense import TNTransformerExpandCondense from official.nlp.modeling.layers.tn_transformer_expand_condense import TNTransformerExpandCondense
from official.nlp.modeling.layers.transformer import * from official.nlp.modeling.layers.transformer import Transformer
from official.nlp.modeling.layers.transformer import TransformerDecoderBlock
from official.nlp.modeling.layers.transformer_encoder_block import TransformerEncoderBlock from official.nlp.modeling.layers.transformer_encoder_block import TransformerEncoderBlock
from official.nlp.modeling.layers.transformer_scaffold import TransformerScaffold from official.nlp.modeling.layers.transformer_scaffold import TransformerScaffold
from official.nlp.modeling.layers.transformer_xl import TransformerXL from official.nlp.modeling.layers.transformer_xl import TransformerXL
......
...@@ -15,6 +15,7 @@ ...@@ -15,6 +15,7 @@
"""Keras-based transformer block layer.""" """Keras-based transformer block layer."""
# pylint: disable=g-classes-have-attributes # pylint: disable=g-classes-have-attributes
from absl import logging
import gin import gin
import tensorflow as tf import tensorflow as tf
...@@ -32,6 +33,9 @@ class Transformer(transformer_encoder_block.TransformerEncoderBlock): ...@@ -32,6 +33,9 @@ class Transformer(transformer_encoder_block.TransformerEncoderBlock):
This layer implements the Transformer from "Attention Is All You Need". This layer implements the Transformer from "Attention Is All You Need".
(https://arxiv.org/abs/1706.03762). (https://arxiv.org/abs/1706.03762).
**Warning: this layer is deprecated. Please don't use it. Use the
`TransformerEncoderBlock` layer instead.**
Args: Args:
num_attention_heads: Number of attention heads. num_attention_heads: Number of attention heads.
intermediate_size: Size of the intermediate layer. intermediate_size: Size of the intermediate layer.
...@@ -98,6 +102,8 @@ class Transformer(transformer_encoder_block.TransformerEncoderBlock): ...@@ -98,6 +102,8 @@ class Transformer(transformer_encoder_block.TransformerEncoderBlock):
inner_dropout=intermediate_dropout, inner_dropout=intermediate_dropout,
attention_initializer=attention_initializer, attention_initializer=attention_initializer,
**kwargs) **kwargs)
logging.warning("The `Transformer` layer is deprecated. Please directly "
"use `TransformerEncoderBlock`.")
def get_config(self): def get_config(self):
return { return {
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment