Unverified Commit 95dea389 authored by Kirthi Shankar Sivamani's avatar Kirthi Shankar Sivamani Committed by GitHub
Browse files

Add release to deprecation warnings (#447)



Change deprecation warnings
Signed-off-by: default avatarKirthi Shankar Sivamani <ksivamani@nvidia.com>
parent 06eebf66
......@@ -10,29 +10,39 @@ from ..common.utils import deprecate_wrapper
extend_logical_axis_rules = deprecate_wrapper(
flax.extend_logical_axis_rules,
"extend_logical_axis_rules is moving to transformer_engine.jax.flax module")
"extend_logical_axis_rules is moving to transformer_engine.jax.flax module"
" and will be fully removed in the next release (v1.0.0).")
DenseGeneral = deprecate_wrapper(flax.DenseGeneral,
"DenseGeneral is moving to transformer_engine.jax.flax module")
"DenseGeneral is moving to transformer_engine.jax.flax module"
" and will be fully removed in the next release (v1.0.0).")
LayerNorm = deprecate_wrapper(flax.LayerNorm,
"LayerNorm is moving to transformer_engine.jax.flax module")
"LayerNorm is moving to transformer_engine.jax.flax module"
" and will be fully removed in the next release (v1.0.0).")
LayerNormDenseGeneral = deprecate_wrapper(
flax.LayerNormDenseGeneral,
"LayerNormDenseGeneral is moving to transformer_engine.jax.flax module")
"LayerNormDenseGeneral is moving to transformer_engine.jax.flax module"
" and will be fully removed in the next release (v1.0.0).")
LayerNormMLP = deprecate_wrapper(flax.LayerNormMLP,
"LayerNormMLP is moving to transformer_engine.jax.flax module")
"LayerNormMLP is moving to transformer_engine.jax.flax module"
" and will be fully removed in the next release (v1.0.0).")
TransformerEngineBase = deprecate_wrapper(
flax.TransformerEngineBase,
"TransformerEngineBase is moving to transformer_engine.jax.flax module")
"TransformerEngineBase is moving to transformer_engine.jax.flax module"
" and will be fully removed in the next release (v1.0.0).")
MultiHeadAttention = deprecate_wrapper(
flax.MultiHeadAttention, "MultiHeadAttention is moving to transformer_engine.jax.flax module")
flax.MultiHeadAttention, "MultiHeadAttention is moving to transformer_engine.jax.flax module"
" and will be fully removed in the next release (v1.0.0).")
RelativePositionBiases = deprecate_wrapper(
flax.RelativePositionBiases,
"RelativePositionBiases is moving to transformer_engine.jax.flax module")
"RelativePositionBiases is moving to transformer_engine.jax.flax module"
" and will be fully removed in the next release (v1.0.0).")
TransformerLayer = deprecate_wrapper(
flax.TransformerLayer, "TransformerLayer is moving to transformer_engine.jax.flax module")
flax.TransformerLayer, "TransformerLayer is moving to transformer_engine.jax.flax module"
" and will be fully removed in the next release (v1.0.0).")
TransformerLayerType = deprecate_wrapper(
flax.TransformerLayerType,
"TransformerLayerType is moving to transformer_engine.jax.flax module")
"TransformerLayerType is moving to transformer_engine.jax.flax module"
" and will be fully removed in the next release (v1.0.0).")
__all__ = [
'fp8_autocast', 'update_collections', 'update_fp8_metas', 'get_delayed_scaling',
......
......@@ -366,7 +366,7 @@ class TransformerEngineBaseModule(torch.nn.Module, ABC):
if isinstance(state, list):
warnings.warn(
"This checkpoint format is deprecated and will be"
"removed in a future release of Transformer Engine"
"removed in the next release (v1.0.0)."
)
# Retrieve checkpointed items.
......@@ -412,7 +412,7 @@ class TransformerEngineBaseModule(torch.nn.Module, ABC):
else:
warnings.warn(
"This checkpoint format is deprecated and will be"
"removed in a future release of Transformer Engine"
"removed in the next release (v1.0.0)."
)
# Load extra items.
self.fp8_meta.update(state["extra_fp8_variables"])
......
......@@ -516,7 +516,7 @@ class LayerNormLinear(TransformerEngineBaseModule):
.. warning::
Argument :attr:`skip_weight_param_allocation` is deprecated and will
be fully removed in future releases.
be fully removed in the next release (v1.0.0).
Parameters
----------
......@@ -624,7 +624,7 @@ class LayerNormLinear(TransformerEngineBaseModule):
if skip_weight_param_allocation:
warnings.warn(
"Argument `skip_weight_param_allocation` is deprecated and"
"will be fully removed in future releases. It is ignored"
"will be fully removed in the next release (v1.0.0). It is ignored"
"starting from v0.11.",
category=DeprecationWarning,
)
......@@ -831,7 +831,7 @@ class LayerNormLinear(TransformerEngineBaseModule):
.. warning::
Arguments :attr:`weight` and :attr:`bias` are deprecated and will
be fully removed in future releases.
be fully removed in the next release (v1.0.0).
Parameters
----------
......@@ -855,7 +855,7 @@ class LayerNormLinear(TransformerEngineBaseModule):
if weight is not None or bias is not None:
raise RuntimeError(
"Arguments `weight` and `bias` are deprecated and "
"will be fully removed in future releases."
"will be fully removed in the next release (v1.0.0)."
)
with self.prepare_forward(inp, is_first_microbatch) as inp:
......
......@@ -451,7 +451,7 @@ class Linear(TransformerEngineBaseModule):
.. warning::
Argument :attr:`skip_weight_param_allocation` is deprecated and will
be fully removed in future releases.
be fully removed in the next release (v1.0.0).
Parameters
----------
......@@ -538,7 +538,7 @@ class Linear(TransformerEngineBaseModule):
if skip_weight_param_allocation:
warnings.warn(
"Argument `skip_weight_param_allocation` is deprecated and"
"will be fully removed in future releases. It has ignored"
"will be fully removed in the next release (v1.0.0). It has ignored"
"starting from v0.11.",
category=DeprecationWarning,
)
......@@ -706,7 +706,7 @@ class Linear(TransformerEngineBaseModule):
.. warning::
Arguments :attr:`weight` and :attr:`bias` are deprecated and will
be fully removed in future releases.
be fully removed in the next release (v1.0.0).
Parameters
----------
......@@ -730,7 +730,7 @@ class Linear(TransformerEngineBaseModule):
if weight is not None or bias is not None:
raise RuntimeError(
"Arguments `weight` and `bias` are deprecated and "
"will be fully removed in future releases."
"will be fully removed in the next release (v1.0.0)."
)
with self.prepare_forward(inp, is_first_microbatch) as inp:
......
......@@ -71,7 +71,7 @@ class TransformerLayer(torch.nn.Module):
.. warning::
Arguments :attr:`attention_softmax_in_fp32` and :attr:`apply_query_key_layer_scaling`
are deprecated and will be fully removed in future releases.
are deprecated and will be fully removed in the next release (v1.0.0).
.. note::
......@@ -247,7 +247,7 @@ class TransformerLayer(torch.nn.Module):
warnings.warn(
"Arguments `attention_softmax_in_fp32` and `apply_query_key_layer_scaling`"
"are deprecated and will be fully removed in future releases.",
"are deprecated and will be fully removed in the next release (v1.0.0).",
category=DeprecationWarning,
)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment