Unverified Commit 4975002d authored by Sylvain Gugger's avatar Sylvain Gugger Committed by GitHub
Browse files

Reorganize file utils (#16264)

* Split file_utils in several submodules

* Fixes

* Add back more objects

* More fixes

* Who exactly decided to import that from there?

* Second suggestion to code with code review

* Revert wront move

* Fix imports

* Adapt all imports

* Adapt all imports everywhere

* Revert this import, will fix in a separate commit
parent 71356034
......@@ -22,9 +22,8 @@ from typing import Any, Dict, List, Optional, Tuple
import sentencepiece as spm
from ...file_utils import SPIECE_UNDERLINE
from ...tokenization_utils import AddedToken, PreTrainedTokenizer
from ...utils import logging
from ...utils import SPIECE_UNDERLINE, logging
logger = logging.get_logger(__name__)
......
......@@ -19,10 +19,9 @@ import os
from shutil import copyfile
from typing import List, Optional, Tuple
from ...file_utils import is_sentencepiece_available
from ...tokenization_utils import AddedToken
from ...tokenization_utils_fast import PreTrainedTokenizerFast
from ...utils import logging
from ...utils import is_sentencepiece_available, logging
if is_sentencepiece_available():
......
......@@ -18,7 +18,7 @@
from typing import TYPE_CHECKING
# rely on isort to merge the imports
from ...file_utils import _LazyModule, is_tokenizers_available, is_torch_available
from ...utils import _LazyModule, is_tokenizers_available, is_torch_available
_import_structure = {
......
......@@ -26,7 +26,6 @@ from torch import nn
from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
from ...activations import ACT2FN
from ...file_utils import add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward
from ...modeling_outputs import (
BaseModelOutputWithCrossAttentions,
MaskedLMOutput,
......@@ -41,7 +40,7 @@ from ...modeling_utils import (
find_pruneable_heads_and_indices,
prune_linear_layer,
)
from ...utils import logging
from ...utils import add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging
from .configuration_yoso import YosoConfig
......
......@@ -15,7 +15,7 @@
from typing import TYPE_CHECKING
from ..file_utils import _LazyModule
from ..utils import _LazyModule
_import_structure = {
......
......@@ -21,8 +21,7 @@ from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Mapping,
import numpy as np
from packaging import version
from ..file_utils import TensorType, is_torch_available, is_vision_available
from ..utils import logging
from ..utils import TensorType, is_torch_available, is_vision_available, logging
from .utils import ParameterFormat, compute_effective_axis_dimension, compute_serialized_parameters_size
......@@ -204,7 +203,7 @@ class OnnxConfig(ABC):
`bool`: Whether the installed version of PyTorch is compatible with the model.
"""
if is_torch_available():
from transformers.file_utils import torch_version
from transformers.utils import torch_version
return torch_version >= self.torch_onnx_minimum_version
else:
......
......@@ -21,9 +21,14 @@ from typing import TYPE_CHECKING, Iterable, List, Tuple, Union
import numpy as np
from packaging.version import Version, parse
from ..file_utils import TensorType, is_tf_available, is_torch_available, is_torch_onnx_dict_inputs_support_available
from ..tokenization_utils_base import PreTrainedTokenizerBase
from ..utils import logging
from ..utils import (
TensorType,
is_tf_available,
is_torch_available,
is_torch_onnx_dict_inputs_support_available,
logging,
)
from .config import OnnxConfig
......@@ -294,7 +299,7 @@ def export(
preprocessor = tokenizer
if is_torch_available():
from ..file_utils import torch_version
from ..utils import torch_version
if not is_torch_onnx_dict_inputs_support_available():
raise AssertionError(f"Unsupported PyTorch version, minimum required is 1.8.0, got: {torch_version}")
......
......@@ -25,12 +25,11 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
from ..configuration_utils import PretrainedConfig
from ..feature_extraction_utils import PreTrainedFeatureExtractor
from ..file_utils import http_get, is_tf_available, is_torch_available
from ..models.auto.configuration_auto import AutoConfig
from ..models.auto.feature_extraction_auto import FEATURE_EXTRACTOR_MAPPING, AutoFeatureExtractor
from ..models.auto.tokenization_auto import TOKENIZER_MAPPING, AutoTokenizer
from ..tokenization_utils import PreTrainedTokenizer
from ..utils import logging
from ..utils import http_get, is_tf_available, is_torch_available, logging
from .audio_classification import AudioClassificationPipeline
from .automatic_speech_recognition import AutomaticSpeechRecognitionPipeline
from .base import (
......
......@@ -16,8 +16,7 @@ from typing import Union
import numpy as np
from ..file_utils import add_end_docstrings, is_torch_available
from ..utils import logging
from ..utils import add_end_docstrings, is_torch_available, logging
from .base import PIPELINE_INIT_ARGS, Pipeline
......
......@@ -16,8 +16,7 @@ from typing import TYPE_CHECKING, Dict, Optional, Union
import numpy as np
from ..file_utils import is_torch_available
from ..utils import logging
from ..utils import is_torch_available, logging
from .audio_utils import ffmpeg_read
from .base import ChunkPipeline
......
......@@ -30,11 +30,10 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
from packaging import version
from ..feature_extraction_utils import PreTrainedFeatureExtractor
from ..file_utils import ModelOutput, add_end_docstrings, is_tf_available, is_torch_available
from ..modelcard import ModelCard
from ..models.auto.configuration_auto import AutoConfig
from ..tokenization_utils import PreTrainedTokenizer
from ..utils import logging
from ..utils import ModelOutput, add_end_docstrings, is_tf_available, is_torch_available, logging
GenericTensor = Union[List["GenericTensor"], "torch.Tensor", "tf.Tensor"]
......
import uuid
from typing import Any, Dict, List, Optional, Union
from ..file_utils import add_end_docstrings, is_tf_available, is_torch_available
from ..utils import logging
from ..utils import add_end_docstrings, is_tf_available, is_torch_available, logging
from .base import PIPELINE_INIT_ARGS, Pipeline
......
......@@ -2,8 +2,7 @@ from typing import Dict
import numpy as np
from ..file_utils import add_end_docstrings, is_tf_available, is_torch_available
from ..utils import logging
from ..utils import add_end_docstrings, is_tf_available, is_torch_available, logging
from .base import PIPELINE_INIT_ARGS, GenericTensor, Pipeline, PipelineException
......
from typing import List, Union
from ..file_utils import (
from ..utils import (
add_end_docstrings,
is_tf_available,
is_torch_available,
is_vision_available,
logging,
requires_backends,
)
from ..utils import logging
from .base import PIPELINE_INIT_ARGS, Pipeline
......
......@@ -2,8 +2,7 @@ from typing import Any, Dict, List, Union
import numpy as np
from ..file_utils import add_end_docstrings, is_torch_available, is_vision_available, requires_backends
from ..utils import logging
from ..utils import add_end_docstrings, is_torch_available, is_vision_available, logging, requires_backends
from .base import PIPELINE_INIT_ARGS, Pipeline
......
from typing import Any, Dict, List, Union
from ..file_utils import add_end_docstrings, is_torch_available, is_vision_available, requires_backends
from ..utils import logging
from ..utils import add_end_docstrings, is_torch_available, is_vision_available, logging, requires_backends
from .base import PIPELINE_INIT_ARGS, Pipeline
......
......@@ -5,10 +5,9 @@ from typing import TYPE_CHECKING, Dict, List, Optional, Tuple, Union
import numpy as np
from ..data import SquadExample, SquadFeatures, squad_convert_examples_to_features
from ..file_utils import PaddingStrategy, add_end_docstrings, is_tf_available, is_torch_available
from ..modelcard import ModelCard
from ..tokenization_utils import PreTrainedTokenizer
from ..utils import logging
from ..utils import PaddingStrategy, add_end_docstrings, is_tf_available, is_torch_available, logging
from .base import PIPELINE_INIT_ARGS, ArgumentHandler, ChunkPipeline
......
......@@ -3,7 +3,7 @@ import types
import numpy as np
from ..file_utils import (
from ..utils import (
add_end_docstrings,
is_tensorflow_probability_available,
is_tf_available,
......
import enum
from ..file_utils import add_end_docstrings, is_tf_available, is_torch_available
from ..tokenization_utils import TruncationStrategy
from ..utils import logging
from ..utils import add_end_docstrings, is_tf_available, is_torch_available, logging
from .base import PIPELINE_INIT_ARGS, Pipeline
......
......@@ -2,7 +2,7 @@ from typing import Dict
import numpy as np
from ..file_utils import ExplicitEnum, add_end_docstrings, is_tf_available, is_torch_available
from ..utils import ExplicitEnum, add_end_docstrings, is_tf_available, is_torch_available
from .base import PIPELINE_INIT_ARGS, GenericTensor, Pipeline
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment