Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
6494910f
Unverified
Commit
6494910f
authored
Nov 19, 2020
by
Sylvain Gugger
Committed by
GitHub
Nov 19, 2020
Browse files
Add sentencepiece to the CI and fix tests (#8672)
* Fix the CI and tests * Fix quality * Remove that m form nowhere
parent
0ad45e10
Changes
6
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
15 additions
and
22 deletions
+15
-22
.circleci/config.yml
.circleci/config.yml
+9
-9
src/transformers/models/mbart/tokenization_mbart.py
src/transformers/models/mbart/tokenization_mbart.py
+1
-1
src/transformers/models/mbart/tokenization_mbart_fast.py
src/transformers/models/mbart/tokenization_mbart_fast.py
+1
-1
src/transformers/models/t5/tokenization_t5.py
src/transformers/models/t5/tokenization_t5.py
+1
-1
src/transformers/models/t5/tokenization_t5_fast.py
src/transformers/models/t5/tokenization_t5_fast.py
+1
-1
tests/test_tokenization_mbart.py
tests/test_tokenization_mbart.py
+2
-9
No files found.
.circleci/config.yml
View file @
6494910f
...
...
@@ -77,7 +77,7 @@ jobs:
-
v0.4-torch_and_tf-{{ checksum "setup.py" }}
-
v0.4-{{ checksum "setup.py" }}
-
run
:
pip install --upgrade pip
-
run
:
pip install .[sklearn,tf-cpu,torch,testing]
-
run
:
pip install .[sklearn,tf-cpu,torch,testing
,sentencepiece
]
-
save_cache
:
key
:
v0.4-{{ checksum "setup.py" }}
paths
:
...
...
@@ -103,7 +103,7 @@ jobs:
-
v0.4-torch-{{ checksum "setup.py" }}
-
v0.4-{{ checksum "setup.py" }}
-
run
:
pip install --upgrade pip
-
run
:
pip install .[sklearn,torch,testing]
-
run
:
pip install .[sklearn,torch,testing
,sentencepiece
]
-
save_cache
:
key
:
v0.4-torch-{{ checksum "setup.py" }}
paths
:
...
...
@@ -129,7 +129,7 @@ jobs:
-
v0.4-tf-{{ checksum "setup.py" }}
-
v0.4-{{ checksum "setup.py" }}
-
run
:
pip install --upgrade pip
-
run
:
pip install .[sklearn,tf-cpu,testing]
-
run
:
pip install .[sklearn,tf-cpu,testing
,sentencepiece
]
-
save_cache
:
key
:
v0.4-tf-{{ checksum "setup.py" }}
paths
:
...
...
@@ -155,7 +155,7 @@ jobs:
-
v0.4-flax-{{ checksum "setup.py" }}
-
v0.4-{{ checksum "setup.py" }}
-
run
:
pip install --upgrade pip
-
run
:
sudo pip install .[flax,sklearn,torch,testing]
-
run
:
sudo pip install .[flax,sklearn,torch,testing
,sentencepiece
]
-
save_cache
:
key
:
v0.4-flax-{{ checksum "setup.py" }}
paths
:
...
...
@@ -181,7 +181,7 @@ jobs:
-
v0.4-torch-{{ checksum "setup.py" }}
-
v0.4-{{ checksum "setup.py" }}
-
run
:
pip install --upgrade pip
-
run
:
pip install .[sklearn,torch,testing]
-
run
:
pip install .[sklearn,torch,testing
,sentencepiece
]
-
save_cache
:
key
:
v0.4-torch-{{ checksum "setup.py" }}
paths
:
...
...
@@ -207,7 +207,7 @@ jobs:
-
v0.4-tf-{{ checksum "setup.py" }}
-
v0.4-{{ checksum "setup.py" }}
-
run
:
pip install --upgrade pip
-
run
:
pip install .[sklearn,tf-cpu,testing]
-
run
:
pip install .[sklearn,tf-cpu,testing
,sentencepiece
]
-
save_cache
:
key
:
v0.4-tf-{{ checksum "setup.py" }}
paths
:
...
...
@@ -231,7 +231,7 @@ jobs:
-
v0.4-custom_tokenizers-{{ checksum "setup.py" }}
-
v0.4-{{ checksum "setup.py" }}
-
run
:
pip install --upgrade pip
-
run
:
pip install .[ja,testing]
-
run
:
pip install .[ja,testing
,sentencepiece
]
-
run
:
python -m unidic download
-
save_cache
:
key
:
v0.4-custom_tokenizers-{{ checksum "setup.py" }}
...
...
@@ -258,7 +258,7 @@ jobs:
-
v0.4-torch_examples-{{ checksum "setup.py" }}
-
v0.4-{{ checksum "setup.py" }}
-
run
:
pip install --upgrade pip
-
run
:
pip install .[sklearn,torch,testing]
-
run
:
pip install .[sklearn,torch,
sentencepiece,
testing]
-
run
:
pip install -r examples/requirements.txt
-
save_cache
:
key
:
v0.4-torch_examples-{{ checksum "setup.py" }}
...
...
@@ -324,7 +324,7 @@ jobs:
-
v0.4-{{ checksum "setup.py" }}
-
run
:
pip install --upgrade pip
-
run
:
pip install isort
-
run
:
pip install .[
tf,torch,flax
,quality]
-
run
:
pip install .[
all
,quality]
-
save_cache
:
key
:
v0.4-code_quality-{{ checksum "setup.py" }}
paths
:
...
...
src/transformers/models/mbart/tokenization_mbart.py
View file @
6494910f
...
...
@@ -188,7 +188,7 @@ class MBartTokenizer(XLMRobertaTokenizer):
**
kwargs
,
)
->
BatchEncoding
:
if
max_length
is
None
:
max_length
=
self
.
max_len
max_length
=
self
.
model_
max_len
gth
self
.
set_src_lang_special_tokens
(
src_lang
)
model_inputs
:
BatchEncoding
=
self
(
src_texts
,
...
...
src/transformers/models/mbart/tokenization_mbart_fast.py
View file @
6494910f
...
...
@@ -185,7 +185,7 @@ class MBartTokenizerFast(XLMRobertaTokenizerFast):
**
kwargs
,
)
->
BatchEncoding
:
if
max_length
is
None
:
max_length
=
self
.
max_len
max_length
=
self
.
model_
max_len
gth
self
.
set_src_lang_special_tokens
(
src_lang
)
model_inputs
:
BatchEncoding
=
self
(
src_texts
,
...
...
src/transformers/models/t5/tokenization_t5.py
View file @
6494910f
...
...
@@ -309,7 +309,7 @@ class T5Tokenizer(PreTrainedTokenizer):
**
kwargs
,
)
->
BatchEncoding
:
if
max_length
is
None
:
max_length
=
self
.
max_len
max_length
=
self
.
model_
max_len
gth
model_inputs
=
self
(
src_texts
,
add_special_tokens
=
True
,
...
...
src/transformers/models/t5/tokenization_t5_fast.py
View file @
6494910f
...
...
@@ -226,7 +226,7 @@ class T5TokenizerFast(PreTrainedTokenizerFast):
**
kwargs
,
)
->
BatchEncoding
:
if
max_length
is
None
:
max_length
=
self
.
max_len
max_length
=
self
.
model_
max_len
gth
self
.
prefix_tokens
=
[]
model_inputs
=
self
(
src_texts
,
...
...
tests/test_tokenization_mbart.py
View file @
6494910f
import
tempfile
import
unittest
from
transformers
import
(
SPIECE_UNDERLINE
,
AutoTokenizer
,
BatchEncoding
,
MBartTokenizer
,
MBartTokenizerFast
,
is_torch_available
,
)
from
transformers
import
SPIECE_UNDERLINE
,
BatchEncoding
,
MBartTokenizer
,
MBartTokenizerFast
,
is_torch_available
from
transformers.testing_utils
import
(
_sentencepiece_available
,
require_sentencepiece
,
...
...
@@ -138,7 +131,7 @@ class MBartEnroIntegrationTest(unittest.TestCase):
@
classmethod
def
setUpClass
(
cls
):
cls
.
tokenizer
:
MBartTokenizer
=
Auto
Tokenizer
.
from_pretrained
(
cls
.
checkpoint_name
)
cls
.
tokenizer
:
MBartTokenizer
=
MBart
Tokenizer
.
from_pretrained
(
cls
.
checkpoint_name
)
cls
.
pad_token_id
=
1
return
cls
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment