Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
75a23d24
Commit
75a23d24
authored
Dec 22, 2019
by
Aymeric Augustin
Browse files
Remove import fallbacks.
parent
798b3b38
Changes
4
Show whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
3 additions
and
29 deletions
+3
-29
examples/test_examples.py
examples/test_examples.py
+1
-7
src/transformers/file_utils.py
src/transformers/file_utils.py
+1
-4
src/transformers/tokenization_gpt2.py
src/transformers/tokenization_gpt2.py
+1
-9
src/transformers/tokenization_roberta.py
src/transformers/tokenization_roberta.py
+0
-9
No files found.
examples/test_examples.py
View file @
75a23d24
...
...
@@ -18,19 +18,13 @@ import argparse
import
logging
import
sys
import
unittest
from
unittest.mock
import
patch
import
run_generation
import
run_glue
import
run_squad
try
:
# python 3.4+ can use builtin unittest.mock instead of mock package
from
unittest.mock
import
patch
except
ImportError
:
from
mock
import
patch
logging
.
basicConfig
(
level
=
logging
.
DEBUG
)
logger
=
logging
.
getLogger
()
...
...
src/transformers/file_utils.py
View file @
75a23d24
...
...
@@ -15,6 +15,7 @@ from contextlib import contextmanager
from
functools
import
partial
,
wraps
from
hashlib
import
sha256
from
io
import
open
from
urllib.parse
import
urlparse
import
boto3
import
requests
...
...
@@ -65,10 +66,6 @@ except ImportError:
)
default_cache_path
=
os
.
path
.
join
(
torch_cache_home
,
"transformers"
)
try
:
from
urllib.parse
import
urlparse
except
ImportError
:
from
urlparse
import
urlparse
try
:
from
pathlib
import
Path
...
...
src/transformers/tokenization_gpt2.py
View file @
75a23d24
...
...
@@ -18,6 +18,7 @@
import
json
import
logging
import
os
from
functools
import
lru_cache
from
io
import
open
import
regex
as
re
...
...
@@ -25,15 +26,6 @@ import regex as re
from
.tokenization_utils
import
PreTrainedTokenizer
try
:
from
functools
import
lru_cache
except
ImportError
:
# Just a dummy decorator to get the checks to run on python2
# because honestly I don't want to support a byte-level unicode BPE tokenizer on python 2 right now.
def
lru_cache
():
return
lambda
func
:
func
logger
=
logging
.
getLogger
(
__name__
)
VOCAB_FILES_NAMES
=
{
...
...
src/transformers/tokenization_roberta.py
View file @
75a23d24
...
...
@@ -20,15 +20,6 @@ import logging
from
.tokenization_gpt2
import
GPT2Tokenizer
try
:
from
functools
import
lru_cache
except
ImportError
:
# Just a dummy decorator to get the checks to run on python2
# because honestly I don't want to support a byte-level unicode BPE tokenizer on python 2 right now.
def
lru_cache
():
return
lambda
func
:
func
logger
=
logging
.
getLogger
(
__name__
)
VOCAB_FILES_NAMES
=
{
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment