Commit 1c62e87b authored by Aymeric Augustin's avatar Aymeric Augustin
Browse files

Use built-in open().

On Python 3, `open is io.open`.
parent d6eaf4e6
...@@ -16,7 +16,6 @@ ...@@ -16,7 +16,6 @@
import os import os
import unittest import unittest
from io import open
from transformers.tokenization_bert import VOCAB_FILES_NAMES, XxxTokenizer from transformers.tokenization_bert import VOCAB_FILES_NAMES, XxxTokenizer
......
...@@ -18,7 +18,6 @@ ...@@ -18,7 +18,6 @@
import collections import collections
import logging import logging
import os import os
from io import open
from .tokenization_utils import PreTrainedTokenizer from .tokenization_utils import PreTrainedTokenizer
......
...@@ -16,7 +16,6 @@ ...@@ -16,7 +16,6 @@
import os import os
import unittest import unittest
from io import open
from transformers.tokenization_bert import ( from transformers.tokenization_bert import (
VOCAB_FILES_NAMES, VOCAB_FILES_NAMES,
......
...@@ -16,7 +16,6 @@ ...@@ -16,7 +16,6 @@
import os import os
import unittest import unittest
from io import open
from transformers.tokenization_bert import WordpieceTokenizer from transformers.tokenization_bert import WordpieceTokenizer
from transformers.tokenization_bert_japanese import ( from transformers.tokenization_bert_japanese import (
......
...@@ -18,7 +18,6 @@ import os ...@@ -18,7 +18,6 @@ import os
import pickle import pickle
import shutil import shutil
import tempfile import tempfile
from io import open
class TokenizerTesterMixin: class TokenizerTesterMixin:
......
...@@ -16,7 +16,6 @@ ...@@ -16,7 +16,6 @@
import json import json
import os import os
import unittest import unittest
from io import open
from transformers.tokenization_ctrl import VOCAB_FILES_NAMES, CTRLTokenizer from transformers.tokenization_ctrl import VOCAB_FILES_NAMES, CTRLTokenizer
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
import json import json
import os import os
import unittest import unittest
from io import open
from transformers.tokenization_gpt2 import VOCAB_FILES_NAMES, GPT2Tokenizer from transformers.tokenization_gpt2 import VOCAB_FILES_NAMES, GPT2Tokenizer
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
import json import json
import os import os
import unittest import unittest
from io import open
from transformers.tokenization_roberta import VOCAB_FILES_NAMES, RobertaTokenizer from transformers.tokenization_roberta import VOCAB_FILES_NAMES, RobertaTokenizer
......
...@@ -16,7 +16,6 @@ ...@@ -16,7 +16,6 @@
import os import os
import unittest import unittest
from io import open
from transformers import is_torch_available from transformers import is_torch_available
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment