Unverified Commit 4d391484 authored by Stas Bekman's avatar Stas Bekman Committed by GitHub
Browse files

fix deprecation warnings (#7033)

* fix deprecation warnings

* remove tests/test_tokenization_common.py's test_padding_to_max_length

* revert test_padding_to_max_length
parent 576eec98
...@@ -127,7 +127,7 @@ def load_tf_weights_in_funnel(model, config, tf_checkpoint_path): ...@@ -127,7 +127,7 @@ def load_tf_weights_in_funnel(model, config, tf_checkpoint_path):
skipped = False skipped = False
for m_name in name[1:]: for m_name in name[1:]:
if not isinstance(pointer, FunnelPositionwiseFFN) and re.fullmatch(r"layer_\d+", m_name): if not isinstance(pointer, FunnelPositionwiseFFN) and re.fullmatch(r"layer_\d+", m_name):
layer_index = int(re.search("layer_(\d+)", m_name).groups()[0]) layer_index = int(re.search(r"layer_(\d+)", m_name).groups()[0])
if layer_index < config.num_hidden_layers: if layer_index < config.num_hidden_layers:
block_idx = 0 block_idx = 0
while layer_index >= config.block_sizes[block_idx]: while layer_index >= config.block_sizes[block_idx]:
......
...@@ -699,7 +699,7 @@ class TFConv1D(tf.keras.layers.Layer): ...@@ -699,7 +699,7 @@ class TFConv1D(tf.keras.layers.Layer):
class TFSharedEmbeddings(tf.keras.layers.Layer): class TFSharedEmbeddings(tf.keras.layers.Layer):
""" r"""
Construct shared token embeddings. Construct shared token embeddings.
The weights of the embedding layer is usually shared with the weights of the linear decoder when doing The weights of the embedding layer is usually shared with the weights of the linear decoder when doing
......
...@@ -156,7 +156,7 @@ class TokenizerTesterMixin: ...@@ -156,7 +156,7 @@ class TokenizerTesterMixin:
tokenizers = self.get_tokenizers() tokenizers = self.get_tokenizers()
for tokenizer in tokenizers: for tokenizer in tokenizers:
with self.subTest(f"{tokenizer.__class__.__name__}"): with self.subTest(f"{tokenizer.__class__.__name__}"):
self.assertNotEqual(tokenizer.max_len, 42) self.assertNotEqual(tokenizer.model_max_length, 42)
# Now let's start the test # Now let's start the test
tokenizers = self.get_tokenizers() tokenizers = self.get_tokenizers()
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment