Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
d2100428
Commit
d2100428
authored
Dec 10, 2019
by
Julien Chaumond
Browse files
Update to new test infra and only run conditionally
parent
597ba7fe
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
44 additions
and
27 deletions
+44
-27
.circleci/config.yml
.circleci/config.yml
+10
-10
transformers/tests/tokenization_bert_japanese_test.py
transformers/tests/tokenization_bert_japanese_test.py
+4
-5
transformers/tests/utils.py
transformers/tests/utils.py
+30
-12
No files found.
.circleci/config.yml
View file @
d2100428
...
@@ -13,8 +13,6 @@ jobs:
...
@@ -13,8 +13,6 @@ jobs:
-
run
:
sudo pip install --progress-bar off .
-
run
:
sudo pip install --progress-bar off .
-
run
:
sudo pip install pytest codecov pytest-cov
-
run
:
sudo pip install pytest codecov pytest-cov
-
run
:
sudo pip install tensorboardX scikit-learn
-
run
:
sudo pip install tensorboardX scikit-learn
-
run
:
sudo apt-get -y install libmecab-dev mecab mecab-ipadic-utf8 swig
-
run
:
sudo pip install mecab-python3
-
run
:
python -m pytest -sv ./transformers/tests/ --cov
-
run
:
python -m pytest -sv ./transformers/tests/ --cov
-
run
:
codecov
-
run
:
codecov
build_py3_torch
:
build_py3_torch
:
...
@@ -29,8 +27,6 @@ jobs:
...
@@ -29,8 +27,6 @@ jobs:
-
run
:
sudo pip install --progress-bar off .
-
run
:
sudo pip install --progress-bar off .
-
run
:
sudo pip install pytest codecov pytest-cov
-
run
:
sudo pip install pytest codecov pytest-cov
-
run
:
sudo pip install tensorboardX scikit-learn
-
run
:
sudo pip install tensorboardX scikit-learn
-
run
:
sudo apt-get -y install libmecab-dev mecab mecab-ipadic-utf8 swig
-
run
:
sudo pip install mecab-python3
-
run
:
python -m pytest -sv ./transformers/tests/ --cov
-
run
:
python -m pytest -sv ./transformers/tests/ --cov
-
run
:
python -m pytest -sv ./examples/
-
run
:
python -m pytest -sv ./examples/
-
run
:
codecov
-
run
:
codecov
...
@@ -46,8 +42,6 @@ jobs:
...
@@ -46,8 +42,6 @@ jobs:
-
run
:
sudo pip install --progress-bar off .
-
run
:
sudo pip install --progress-bar off .
-
run
:
sudo pip install pytest codecov pytest-cov
-
run
:
sudo pip install pytest codecov pytest-cov
-
run
:
sudo pip install tensorboardX scikit-learn
-
run
:
sudo pip install tensorboardX scikit-learn
-
run
:
sudo apt-get -y install libmecab-dev mecab mecab-ipadic-utf8 swig
-
run
:
sudo pip install mecab-python3
-
run
:
python -m pytest -sv ./transformers/tests/ --cov
-
run
:
python -m pytest -sv ./transformers/tests/ --cov
-
run
:
codecov
-
run
:
codecov
build_py2_torch
:
build_py2_torch
:
...
@@ -61,8 +55,6 @@ jobs:
...
@@ -61,8 +55,6 @@ jobs:
-
run
:
sudo pip install torch
-
run
:
sudo pip install torch
-
run
:
sudo pip install --progress-bar off .
-
run
:
sudo pip install --progress-bar off .
-
run
:
sudo pip install pytest codecov pytest-cov
-
run
:
sudo pip install pytest codecov pytest-cov
-
run
:
sudo apt-get -y install libmecab-dev mecab mecab-ipadic-utf8 swig
-
run
:
sudo pip install mecab-python
-
run
:
python -m pytest -sv ./transformers/tests/ --cov
-
run
:
python -m pytest -sv ./transformers/tests/ --cov
-
run
:
codecov
-
run
:
codecov
build_py2_tf
:
build_py2_tf
:
...
@@ -76,10 +68,18 @@ jobs:
...
@@ -76,10 +68,18 @@ jobs:
-
run
:
sudo pip install tensorflow
-
run
:
sudo pip install tensorflow
-
run
:
sudo pip install --progress-bar off .
-
run
:
sudo pip install --progress-bar off .
-
run
:
sudo pip install pytest codecov pytest-cov
-
run
:
sudo pip install pytest codecov pytest-cov
-
run
:
sudo apt-get -y install libmecab-dev mecab mecab-ipadic-utf8 swig
-
run
:
sudo pip install mecab-python
-
run
:
python -m pytest -sv ./transformers/tests/ --cov
-
run
:
python -m pytest -sv ./transformers/tests/ --cov
-
run
:
codecov
-
run
:
codecov
build_py3_custom_tokenizers
:
working_directory
:
~/transformers
docker
:
-
image
:
circleci/python:3.5
steps
:
-
checkout
-
run
:
sudo pip install --progress-bar off .
-
run
:
sudo pip install pytest
-
run
:
sudo pip install mecab-python3
-
run
:
python -m pytest -sv ./transformers/tests/tokenization_bert_japanese_test.py
deploy_doc
:
deploy_doc
:
working_directory
:
~/transformers
working_directory
:
~/transformers
docker
:
docker
:
...
...
transformers/tests/tokenization_bert_japanese_test.py
View file @
d2100428
...
@@ -16,7 +16,6 @@ from __future__ import absolute_import, division, print_function, unicode_litera
...
@@ -16,7 +16,6 @@ from __future__ import absolute_import, division, print_function, unicode_litera
import
os
import
os
import
unittest
import
unittest
import
pytest
from
io
import
open
from
io
import
open
from
transformers.tokenization_bert
import
WordpieceTokenizer
from
transformers.tokenization_bert
import
WordpieceTokenizer
...
@@ -25,8 +24,10 @@ from transformers.tokenization_bert_japanese import (BertJapaneseTokenizer,
...
@@ -25,8 +24,10 @@ from transformers.tokenization_bert_japanese import (BertJapaneseTokenizer,
VOCAB_FILES_NAMES
)
VOCAB_FILES_NAMES
)
from
.tokenization_tests_commons
import
CommonTestCases
from
.tokenization_tests_commons
import
CommonTestCases
from
.utils
import
slow
,
custom_tokenizers
@
custom_tokenizers
class
BertJapaneseTokenizationTest
(
CommonTestCases
.
CommonTokenizerTester
):
class
BertJapaneseTokenizationTest
(
CommonTestCases
.
CommonTokenizerTester
):
tokenizer_class
=
BertJapaneseTokenizer
tokenizer_class
=
BertJapaneseTokenizer
...
@@ -104,7 +105,7 @@ class BertJapaneseTokenizationTest(CommonTestCases.CommonTokenizerTester):
...
@@ -104,7 +105,7 @@ class BertJapaneseTokenizationTest(CommonTestCases.CommonTokenizerTester):
self
.
assertListEqual
(
tokenizer
.
tokenize
(
u
"こんばんは こんばんにちは こんにちは"
),
self
.
assertListEqual
(
tokenizer
.
tokenize
(
u
"こんばんは こんばんにちは こんにちは"
),
[
u
"こん"
,
u
"##ばんは"
,
u
"[UNK]"
,
u
"こんにちは"
])
[
u
"こん"
,
u
"##ばんは"
,
u
"[UNK]"
,
u
"こんにちは"
])
@
pytest
.
mark
.
slow
@
slow
def
test_sequence_builders
(
self
):
def
test_sequence_builders
(
self
):
tokenizer
=
self
.
tokenizer_class
.
from_pretrained
(
"bert-base-japanese"
)
tokenizer
=
self
.
tokenizer_class
.
from_pretrained
(
"bert-base-japanese"
)
...
@@ -172,7 +173,7 @@ class BertJapaneseCharacterTokenizationTest(CommonTestCases.CommonTokenizerTeste
...
@@ -172,7 +173,7 @@ class BertJapaneseCharacterTokenizationTest(CommonTestCases.CommonTokenizerTeste
self
.
assertListEqual
(
tokenizer
.
tokenize
(
u
"こんにちほ"
),
self
.
assertListEqual
(
tokenizer
.
tokenize
(
u
"こんにちほ"
),
[
u
"こ"
,
u
"ん"
,
u
"に"
,
u
"ち"
,
u
"[UNK]"
])
[
u
"こ"
,
u
"ん"
,
u
"に"
,
u
"ち"
,
u
"[UNK]"
])
@
pytest
.
mark
.
slow
@
slow
def
test_sequence_builders
(
self
):
def
test_sequence_builders
(
self
):
tokenizer
=
self
.
tokenizer_class
.
from_pretrained
(
"bert-base-japanese-char"
)
tokenizer
=
self
.
tokenizer_class
.
from_pretrained
(
"bert-base-japanese-char"
)
...
@@ -188,5 +189,3 @@ class BertJapaneseCharacterTokenizationTest(CommonTestCases.CommonTokenizerTeste
...
@@ -188,5 +189,3 @@ class BertJapaneseCharacterTokenizationTest(CommonTestCases.CommonTokenizerTeste
if
__name__
==
'__main__'
:
unittest
.
main
()
transformers/tests/utils.py
View file @
d2100428
...
@@ -6,18 +6,23 @@ from distutils.util import strtobool
...
@@ -6,18 +6,23 @@ from distutils.util import strtobool
from
transformers.file_utils
import
_tf_available
,
_torch_available
from
transformers.file_utils
import
_tf_available
,
_torch_available
try
:
def
parse_flag_from_env
(
key
,
default
=
False
):
run_slow
=
os
.
environ
[
"RUN_SLOW"
]
except
KeyError
:
# RUN_SLOW isn't set, default to skipping slow tests.
_run_slow_tests
=
False
else
:
# RUN_SLOW is set, convert it to True or False.
try
:
try
:
_run_slow_tests
=
strtobool
(
run_slow
)
value
=
os
.
environ
[
key
]
except
ValueError
:
except
KeyError
:
# More values are supported, but let's keep the message simple.
# KEY isn't set, default to `default`.
raise
ValueError
(
"If set, RUN_SLOW must be yes or no."
)
_value
=
default
else
:
# KEY is set, convert it to True or False.
try
:
_value
=
strtobool
(
value
)
except
ValueError
:
# More values are supported, but let's keep the message simple.
raise
ValueError
(
"If set, {} must be yes or no."
.
format
(
key
))
return
_value
_run_slow_tests
=
parse_flag_from_env
(
"RUN_SLOW"
,
default
=
False
)
_run_custom_tokenizers
=
parse_flag_from_env
(
"RUN_CUSTOM_TOKENIZERS"
,
default
=
False
)
def
slow
(
test_case
):
def
slow
(
test_case
):
...
@@ -33,6 +38,19 @@ def slow(test_case):
...
@@ -33,6 +38,19 @@ def slow(test_case):
return
test_case
return
test_case
def
custom_tokenizers
(
test_case
):
"""
Decorator marking a test for a custom tokenizer.
Custom tokenizers require additional dependencies, and are skipped
by default. Set the RUN_CUSTOM_TOKENIZERS environment variable
to a truthy value to run them.
"""
if
not
_run_custom_tokenizers
:
test_case
=
unittest
.
skip
(
"test of custom tokenizers"
)(
test_case
)
return
test_case
def
require_torch
(
test_case
):
def
require_torch
(
test_case
):
"""
"""
Decorator marking a test that requires PyTorch.
Decorator marking a test that requires PyTorch.
...
@@ -59,6 +77,6 @@ def require_tf(test_case):
...
@@ -59,6 +77,6 @@ def require_tf(test_case):
if
_torch_available
:
if
_torch_available
:
# Set the USE_CUDA environment variable to select a GPU.
# Set the USE_CUDA environment variable to select a GPU.
torch_device
=
"cuda"
if
os
.
environ
.
get
(
"USE_CUDA"
)
else
"cpu"
torch_device
=
"cuda"
if
parse_flag_from_env
(
"USE_CUDA"
)
else
"cpu"
else
:
else
:
torch_device
=
None
torch_device
=
None
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment