Unverified Commit 599db139 authored by Anthony MOI's avatar Anthony MOI
Browse files

Code style update

parent 835b76a4
...@@ -22,7 +22,7 @@ import unicodedata ...@@ -22,7 +22,7 @@ import unicodedata
import tokenizers as tk import tokenizers as tk
from .tokenization_utils import PreTrainedTokenizerFast, PreTrainedTokenizer from .tokenization_utils import PreTrainedTokenizer, PreTrainedTokenizerFast
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
...@@ -583,9 +583,7 @@ class BertTokenizerFast(PreTrainedTokenizerFast): ...@@ -583,9 +583,7 @@ class BertTokenizerFast(PreTrainedTokenizerFast):
) )
) )
if max_length is not None: if max_length is not None:
self._tokenizer.with_truncation(max_length, self._tokenizer.with_truncation(max_length, stride=stride, strategy=truncation_strategy)
stride=stride,
strategy=truncation_strategy)
self._tokenizer.with_padding( self._tokenizer.with_padding(
max_length=max_length if pad_to_max_length else None, max_length=max_length if pad_to_max_length else None,
direction=self.padding_side, direction=self.padding_side,
......
...@@ -23,7 +23,7 @@ from functools import lru_cache ...@@ -23,7 +23,7 @@ from functools import lru_cache
import regex as re import regex as re
import tokenizers as tk import tokenizers as tk
from .tokenization_utils import PreTrainedTokenizerFast, PreTrainedTokenizer from .tokenization_utils import PreTrainedTokenizer, PreTrainedTokenizerFast
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
...@@ -277,9 +277,7 @@ class GPT2TokenizerFast(PreTrainedTokenizerFast): ...@@ -277,9 +277,7 @@ class GPT2TokenizerFast(PreTrainedTokenizerFast):
self._tokenizer.with_pre_tokenizer(tk.pre_tokenizers.ByteLevel.new(add_prefix_space=add_prefix_space)) self._tokenizer.with_pre_tokenizer(tk.pre_tokenizers.ByteLevel.new(add_prefix_space=add_prefix_space))
self._tokenizer.with_decoder(tk.decoders.ByteLevel.new()) self._tokenizer.with_decoder(tk.decoders.ByteLevel.new())
if max_length: if max_length:
self._tokenizer.with_truncation(max_length, self._tokenizer.with_truncation(max_length, stride=stride, strategy=truncation_strategy)
stride=stride,
strategy=truncation_strategy)
self._tokenizer.with_padding( self._tokenizer.with_padding(
max_length=max_length if pad_to_max_length else None, max_length=max_length if pad_to_max_length else None,
direction=self.padding_side, direction=self.padding_side,
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment