Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
13deb95a
Unverified
Commit
13deb95a
authored
Jul 01, 2020
by
Sam Shleifer
Committed by
GitHub
Jul 01, 2020
Browse files
Move tests/utils.py -> transformers/testing_utils.py (#5350)
parent
9c219305
Changes
66
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
6 additions
and
8 deletions
+6
-8
tests/test_tokenization_transfo_xl.py
tests/test_tokenization_transfo_xl.py
+1
-1
tests/test_tokenization_utils.py
tests/test_tokenization_utils.py
+1
-2
tests/test_tokenization_xlm.py
tests/test_tokenization_xlm.py
+1
-1
tests/test_tokenization_xlm_roberta.py
tests/test_tokenization_xlm_roberta.py
+1
-1
tests/test_tokenization_xlnet.py
tests/test_tokenization_xlnet.py
+1
-1
tests/test_trainer.py
tests/test_trainer.py
+1
-2
No files found.
tests/test_tokenization_transfo_xl.py
View file @
13deb95a
...
...
@@ -18,9 +18,9 @@ import os
import
unittest
from
transformers
import
is_torch_available
from
transformers.testing_utils
import
require_torch
from
.test_tokenization_common
import
TokenizerTesterMixin
from
.utils
import
require_torch
if
is_torch_available
():
...
...
tests/test_tokenization_utils.py
View file @
13deb95a
...
...
@@ -17,10 +17,9 @@ import unittest
from
typing
import
Callable
,
Optional
from
transformers
import
BatchEncoding
,
BertTokenizer
,
BertTokenizerFast
,
PreTrainedTokenizer
,
TensorType
from
transformers.testing_utils
import
require_tf
,
require_torch
,
slow
from
transformers.tokenization_gpt2
import
GPT2Tokenizer
from
.utils
import
require_tf
,
require_torch
,
slow
class
TokenizerUtilsTest
(
unittest
.
TestCase
):
def
check_tokenizer_from_pretrained
(
self
,
tokenizer_class
):
...
...
tests/test_tokenization_xlm.py
View file @
13deb95a
...
...
@@ -18,10 +18,10 @@ import json
import
os
import
unittest
from
transformers.testing_utils
import
slow
from
transformers.tokenization_xlm
import
VOCAB_FILES_NAMES
,
XLMTokenizer
from
.test_tokenization_common
import
TokenizerTesterMixin
from
.utils
import
slow
class
XLMTokenizationTest
(
TokenizerTesterMixin
,
unittest
.
TestCase
):
...
...
tests/test_tokenization_xlm_roberta.py
View file @
13deb95a
...
...
@@ -18,10 +18,10 @@ import os
import
unittest
from
transformers.file_utils
import
cached_property
from
transformers.testing_utils
import
slow
from
transformers.tokenization_xlm_roberta
import
SPIECE_UNDERLINE
,
XLMRobertaTokenizer
from
.test_tokenization_common
import
TokenizerTesterMixin
from
.utils
import
slow
SAMPLE_VOCAB
=
os
.
path
.
join
(
os
.
path
.
dirname
(
os
.
path
.
abspath
(
__file__
)),
"fixtures/test_sentencepiece.model"
)
...
...
tests/test_tokenization_xlnet.py
View file @
13deb95a
...
...
@@ -17,10 +17,10 @@
import
os
import
unittest
from
transformers.testing_utils
import
slow
from
transformers.tokenization_xlnet
import
SPIECE_UNDERLINE
,
XLNetTokenizer
from
.test_tokenization_common
import
TokenizerTesterMixin
from
.utils
import
slow
SAMPLE_VOCAB
=
os
.
path
.
join
(
os
.
path
.
dirname
(
os
.
path
.
abspath
(
__file__
)),
"fixtures/test_sentencepiece.model"
)
...
...
tests/test_trainer.py
View file @
13deb95a
import
unittest
from
transformers
import
AutoTokenizer
,
TrainingArguments
,
is_torch_available
from
.utils
import
require_torch
from
transformers.testing_utils
import
require_torch
if
is_torch_available
():
...
...
Prev
1
2
3
4
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment