Unverified Commit 4975002d authored by Sylvain Gugger's avatar Sylvain Gugger Committed by GitHub
Browse files

Reorganize file utils (#16264)

* Split file_utils in several submodules

* Fixes

* Add back more objects

* More fixes

* Who exactly decided to import that from there?

* Second suggestion to code with code review

* Revert wront move

* Fix imports

* Adapt all imports

* Adapt all imports everywhere

* Revert this import, will fix in a separate commit
parent 71356034
......@@ -71,7 +71,7 @@ You are not required to read the following guidelines before opening an issue. H
File "/transformers/src/transformers/__init__.py", line 34, in <module>
from . import dependency_versions_check
File "/transformers/src/transformers/dependency_versions_check.py", line 34, in <module>
from .file_utils import is_tokenizers_available
from .utils import is_tokenizers_available
File "/transformers/src/transformers/file_utils.py", line 40, in <module>
from tqdm.auto import tqdm
ModuleNotFoundError: No module named 'tqdm.auto'
......@@ -124,7 +124,7 @@ You are not required to read the following guidelines before opening an issue. H
File "/transformers/src/transformers/__init__.py", line 34, in <module>
from . import dependency_versions_check
File "/transformers/src/transformers/dependency_versions_check.py", line 34, in <module>
from .file_utils import is_tokenizers_available
from .utils import is_tokenizers_available
File "/transformers/src/transformers/file_utils.py", line 40, in <module>
from tqdm.auto import tqdm
ModuleNotFoundError: No module named 'tqdm.auto'
......
......@@ -52,7 +52,7 @@ from transformers import (
HfArgumentParser,
is_tensorboard_available,
)
from transformers.file_utils import get_full_repo_name, is_offline_mode
from transformers.utils import get_full_repo_name, is_offline_mode
logger = logging.getLogger(__name__)
......
......@@ -57,8 +57,8 @@ from transformers import (
is_tensorboard_available,
set_seed,
)
from transformers.file_utils import get_full_repo_name
from transformers.testing_utils import CaptureLogger
from transformers.utils import get_full_repo_name
logger = logging.getLogger(__name__)
......
......@@ -58,7 +58,7 @@ from transformers import (
is_tensorboard_available,
set_seed,
)
from transformers.file_utils import get_full_repo_name
from transformers.utils import get_full_repo_name
MODEL_CONFIG_CLASSES = list(FLAX_MODEL_FOR_MASKED_LM_MAPPING.keys())
......
......@@ -56,8 +56,8 @@ from transformers import (
is_tensorboard_available,
set_seed,
)
from transformers.file_utils import get_full_repo_name
from transformers.models.t5.modeling_flax_t5 import shift_tokens_right
from transformers.utils import get_full_repo_name
MODEL_CONFIG_CLASSES = list(FLAX_MODEL_FOR_MASKED_LM_MAPPING.keys())
......
......@@ -53,8 +53,7 @@ from transformers import (
PreTrainedTokenizerFast,
is_tensorboard_available,
)
from transformers.file_utils import get_full_repo_name
from transformers.utils import check_min_version
from transformers.utils import check_min_version, get_full_repo_name
from utils_qa import postprocess_qa_predictions
......
......@@ -54,7 +54,7 @@ from transformers import (
HfArgumentParser,
is_tensorboard_available,
)
from transformers.file_utils import get_full_repo_name, is_offline_mode
from transformers.utils import get_full_repo_name, is_offline_mode
logger = logging.getLogger(__name__)
......
......@@ -48,8 +48,7 @@ from transformers import (
TrainingArguments,
is_tensorboard_available,
)
from transformers.file_utils import get_full_repo_name
from transformers.utils import check_min_version
from transformers.utils import check_min_version, get_full_repo_name
logger = logging.getLogger(__name__)
......
......@@ -47,8 +47,7 @@ from transformers import (
HfArgumentParser,
is_tensorboard_available,
)
from transformers.file_utils import get_full_repo_name
from transformers.utils import check_min_version
from transformers.utils import check_min_version, get_full_repo_name
from transformers.utils.versions import require_version
......
......@@ -53,7 +53,7 @@ from transformers import (
is_tensorboard_available,
set_seed,
)
from transformers.file_utils import get_full_repo_name
from transformers.utils import get_full_repo_name
logger = logging.getLogger(__name__)
......
......@@ -16,9 +16,9 @@ import os
import tempfile
import unittest
from transformers.file_utils import cached_property
from transformers.models.marian.convert_marian_tatoeba_to_pytorch import DEFAULT_REPO, TatoebaConverter
from transformers.testing_utils import slow
from transformers.utils import cached_property
@unittest.skipUnless(os.path.exists(DEFAULT_REPO), "Tatoeba directory does not exist.")
......
......@@ -19,7 +19,6 @@ from torch import nn
from torch.utils.data import DistributedSampler, RandomSampler
from transformers import PreTrainedModel, Trainer, logging
from transformers.file_utils import is_torch_tpu_available
from transformers.integrations import is_fairscale_available
from transformers.models.fsmt.configuration_fsmt import FSMTConfig
from transformers.optimization import (
......@@ -34,6 +33,7 @@ from transformers.optimization import (
)
from transformers.trainer_pt_utils import get_tpu_sampler
from transformers.training_args import ParallelMode
from transformers.utils import is_torch_tpu_available
if is_fairscale_available():
......
......@@ -34,8 +34,8 @@ from torch.utils.data import Dataset, Sampler
from sentence_splitter import add_newline_to_end_of_each_sentence
from transformers import BartTokenizer, EvalPrediction, PreTrainedTokenizer, T5Tokenizer
from transformers.file_utils import cached_property
from transformers.models.bart.modeling_bart import shift_tokens_right
from transformers.utils import cached_property
try:
......
......@@ -51,7 +51,7 @@ from transformers import (
get_scheduler,
set_seed,
)
from transformers.file_utils import get_full_repo_name
from transformers.utils import get_full_repo_name
from transformers.utils.versions import require_version
......
......@@ -51,7 +51,7 @@ from transformers import (
get_scheduler,
set_seed,
)
from transformers.file_utils import get_full_repo_name
from transformers.utils import get_full_repo_name
from transformers.utils.versions import require_version
......
......@@ -41,10 +41,9 @@ from transformers import (
default_data_collator,
set_seed,
)
from transformers.file_utils import PaddingStrategy
from transformers.tokenization_utils_base import PreTrainedTokenizerBase
from transformers.trainer_utils import get_last_checkpoint
from transformers.utils import check_min_version
from transformers.utils import PaddingStrategy, check_min_version
# Will error if the minimal version of Transformers is not installed. Remove at your own risks.
......
......@@ -50,7 +50,7 @@ from transformers import (
get_scheduler,
set_seed,
)
from transformers.file_utils import PaddingStrategy, get_full_repo_name
from transformers.utils import PaddingStrategy, get_full_repo_name
logger = logging.getLogger(__name__)
......
......@@ -47,8 +47,7 @@ from transformers import (
get_scheduler,
set_seed,
)
from transformers.file_utils import get_full_repo_name
from transformers.utils import check_min_version
from transformers.utils import check_min_version, get_full_repo_name
from transformers.utils.versions import require_version
from utils_qa import postprocess_qa_predictions_with_beam_search
......
......@@ -49,8 +49,7 @@ from transformers import (
get_scheduler,
set_seed,
)
from transformers.file_utils import get_full_repo_name
from transformers.utils import check_min_version
from transformers.utils import check_min_version, get_full_repo_name
from transformers.utils.versions import require_version
from utils_qa import postprocess_qa_predictions
......
......@@ -42,8 +42,8 @@ from transformers import (
is_wandb_available,
set_seed,
)
from transformers.file_utils import get_full_repo_name
from transformers.models.wav2vec2.modeling_wav2vec2 import _compute_mask_indices, _sample_negative_indices
from transformers.utils import get_full_repo_name
logger = logging.getLogger(__name__)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment