Unverified Commit e84bf1f7 authored by Yih-Dar's avatar Yih-Dar Committed by GitHub
Browse files

️ Time to say goodbye to py37 (#24091)



* fix

---------
Co-authored-by: default avatarydshieh <ydshieh@users.noreply.github.com>
parent 12240925
......@@ -16,7 +16,6 @@ requirements:
- pip
- numpy >=1.17
- dataclasses
- importlib_metadata
- huggingface_hub
- packaging
- filelock
......@@ -31,7 +30,6 @@ requirements:
- python
- numpy >=1.17
- dataclasses
- importlib_metadata
- huggingface_hub
- packaging
- filelock
......
......@@ -17,7 +17,7 @@ jobs:
- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: 3.7
python-version: 3.8
- name: Install requirements
run: |
......
......@@ -130,7 +130,7 @@ You will need basic `git` proficiency to contribute to
manual. Type `git --help` in a shell and enjoy! If you prefer books, [Pro
Git](https://git-scm.com/book/en/v2) is a very good reference.
You'll need **[Python 3.7]((https://github.com/huggingface/transformers/blob/main/setup.py#L426))** or above to contribute to 🤗 Transformers. Follow the steps below to start contributing:
You'll need **[Python 3.8]((https://github.com/huggingface/transformers/blob/main/setup.py#L426))** or above to contribute to 🤗 Transformers. Follow the steps below to start contributing:
1. Fork the [repository](https://github.com/huggingface/transformers) by
clicking on the **[Fork](https://github.com/huggingface/transformers/fork)** button on the repository's page. This creates a copy of the code
......
......@@ -149,7 +149,7 @@ _deps = [
"pytest>=7.2.0",
"pytest-timeout",
"pytest-xdist",
"python>=3.7.0",
"python>=3.8.0",
"ray[tune]",
"regex!=2019.12.17",
"requests",
......@@ -413,7 +413,6 @@ extras["agents"] = deps_list(
# when modifying the following list, make sure to update src/transformers/dependency_versions_check.py
install_requires = [
deps["importlib_metadata"] + ";python_version<'3.8'", # importlib_metadata for Python versions that don't have it
deps["filelock"], # filesystem locks, e.g., to prevent parallel downloads
deps["huggingface-hub"],
deps["numpy"],
......@@ -444,7 +443,7 @@ setup(
zip_safe=False,
extras_require=extras,
entry_points={"console_scripts": ["transformers-cli=transformers.commands.transformers_cli:main"]},
python_requires=">=3.7.0",
python_requires=">=3.8.0",
install_requires=install_requires,
classifiers=[
"Development Status :: 5 - Production/Stable",
......@@ -454,7 +453,6 @@ setup(
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
......
......@@ -11,7 +11,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from .dependency_versions_table import deps
from .utils.versions import require_version, require_version_core
......@@ -38,9 +37,6 @@ pkgs_to_check_at_runtime = [
"pyyaml",
]
if sys.version_info < (3, 8):
pkgs_to_check_at_runtime.append("importlib_metadata")
for pkg in pkgs_to_check_at_runtime:
if pkg in deps:
if pkg == "tokenizers":
......
......@@ -54,7 +54,7 @@ deps = {
"pytest": "pytest>=7.2.0",
"pytest-timeout": "pytest-timeout",
"pytest-xdist": "pytest-xdist",
"python": "python>=3.7.0",
"python": "python>=3.8.0",
"ray[tune]": "ray[tune]",
"regex": "regex!=2019.12.17",
"requests": "requests",
......
......@@ -21,19 +21,11 @@ from copy import copy
from enum import Enum
from inspect import isclass
from pathlib import Path
from typing import Any, Callable, Dict, Iterable, List, NewType, Optional, Tuple, Union, get_type_hints
from typing import Any, Callable, Dict, Iterable, List, Literal, NewType, Optional, Tuple, Union, get_type_hints
import yaml
try:
# For Python versions <3.8, Literal is not in typing: https://peps.python.org/pep-0586/
from typing import Literal
except ImportError:
# For Python 3.7
from typing_extensions import Literal
DataClass = NewType("DataClass", Any)
DataClassType = NewType("DataClassType", Any)
......
......@@ -15,6 +15,7 @@
Integrations with other Python libraries.
"""
import functools
import importlib.metadata
import importlib.util
import json
import numbers
......@@ -31,7 +32,6 @@ import numpy as np
from . import __version__ as version
from .utils import flatten_dict, is_datasets_available, is_pandas_available, is_torch_available, logging
from .utils.versions import importlib_metadata
logger = logging.get_logger(__name__)
......@@ -59,13 +59,13 @@ _has_neptune = (
)
if TYPE_CHECKING and _has_neptune:
try:
_neptune_version = importlib_metadata.version("neptune")
_neptune_version = importlib.metadata.version("neptune")
logger.info(f"Neptune version {_neptune_version} available.")
except importlib_metadata.PackageNotFoundError:
except importlib.metadata.PackageNotFoundError:
try:
_neptune_version = importlib_metadata.version("neptune-client")
_neptune_version = importlib.metadata.version("neptune-client")
logger.info(f"Neptune-client version {_neptune_version} available.")
except importlib_metadata.PackageNotFoundError:
except importlib.metadata.PackageNotFoundError:
_has_neptune = False
from .trainer_callback import ProgressCallback, TrainerCallback # noqa: E402
......@@ -367,10 +367,8 @@ def run_hp_search_ray(trainer, n_trials: int, direction: str, **kwargs) -> BestR
def run_hp_search_sigopt(trainer, n_trials: int, direction: str, **kwargs) -> BestRun:
import sigopt
from transformers.utils.versions import importlib_metadata
if trainer.args.process_index == 0:
if importlib_metadata.version("sigopt") >= "8.0.0":
if importlib.metadata.version("sigopt") >= "8.0.0":
sigopt.set_project("huggingface")
experiment = sigopt.create_experiment(
......
......@@ -15,6 +15,7 @@
# limitations under the License.
import collections
import gc
import importlib.metadata
import inspect
import json
import os
......@@ -73,7 +74,7 @@ from .utils import (
replace_return_docstrings,
)
from .utils.hub import convert_file_size_to_int, get_checkpoint_shard_files
from .utils.import_utils import ENV_VARS_TRUE_VALUES, importlib_metadata, is_sagemaker_mp_enabled
from .utils.import_utils import ENV_VARS_TRUE_VALUES, is_sagemaker_mp_enabled
from .utils.quantization_config import BitsAndBytesConfig
from .utils.versions import require_version_core
......@@ -2203,7 +2204,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix
use_safetensors = False
if is_bitsandbytes_available():
is_8bit_serializable = version.parse(importlib_metadata.version("bitsandbytes")) > version.parse("0.37.2")
is_8bit_serializable = version.parse(importlib.metadata.version("bitsandbytes")) > version.parse("0.37.2")
else:
is_8bit_serializable = False
......@@ -2738,7 +2739,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix
modules_to_not_convert.extend(keys_on_cpu)
supports_4bit = version.parse(importlib_metadata.version("bitsandbytes")) >= version.parse("0.39.0")
supports_4bit = version.parse(importlib.metadata.version("bitsandbytes")) >= version.parse("0.39.0")
if load_in_4bit and not supports_4bit:
raise ValueError(
......@@ -2751,7 +2752,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix
)
# training in 8-bit is only available in 0.37.0+
model._is_quantized_training_enabled = version.parse(
importlib_metadata.version("bitsandbytes")
importlib.metadata.version("bitsandbytes")
) >= version.parse("0.37.0")
model.config.quantization_config = quantization_config
......@@ -2786,7 +2787,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix
target_dtype = torch_dtype
if load_in_4bit:
if version.parse(importlib_metadata.version("accelerate")) > version.parse("0.19.0"):
if version.parse(importlib.metadata.version("accelerate")) > version.parse("0.19.0"):
from accelerate.utils import CustomDtype
target_dtype = CustomDtype.INT4
......
import importlib.metadata
import warnings
from copy import deepcopy
from packaging import version
from ..utils import logging
from .import_utils import importlib_metadata, is_accelerate_available, is_bitsandbytes_available
from .import_utils import is_accelerate_available, is_bitsandbytes_available
if is_bitsandbytes_available():
......@@ -75,7 +76,7 @@ def set_module_quantized_tensor_to_device(module, tensor_name, device, value=Non
elif isinstance(value, torch.Tensor):
new_value = value.to("cpu")
if value.dtype == torch.int8:
is_8bit_serializable = version.parse(importlib_metadata.version("bitsandbytes")) > version.parse(
is_8bit_serializable = version.parse(importlib.metadata.version("bitsandbytes")) > version.parse(
"0.37.2"
)
if not is_8bit_serializable:
......
......@@ -15,6 +15,7 @@
Import utilities: Utilities related to imports and our lazy inits.
"""
import importlib.metadata
import importlib.util
import json
import os
......@@ -31,7 +32,6 @@ from typing import Any, Tuple, Union
from packaging import version
from . import logging
from .versions import importlib_metadata
logger = logging.get_logger(__name__) # pylint: disable=invalid-name
......@@ -44,9 +44,9 @@ def _is_package_available(pkg_name: str, return_version: bool = False) -> Union[
package_version = "N/A"
if package_exists:
try:
package_version = importlib_metadata.version(pkg_name)
package_version = importlib.metadata.version(pkg_name)
package_exists = True
except importlib_metadata.PackageNotFoundError:
except importlib.metadata.PackageNotFoundError:
package_exists = False
logger.debug(f"Detected {pkg_name} version {package_version}")
if return_version:
......@@ -71,7 +71,7 @@ TORCH_FX_REQUIRED_VERSION = version.parse("1.10")
_accelerate_available, _accelerate_version = _is_package_available("accelerate", return_version=True)
_apex_available = _is_package_available("apex")
_bitsandbytes_available = _is_package_available("bitsandbytes")
# `importlib_metadata.version` doesn't work with `bs4` but `beautifulsoup4`. For `importlib.util.find_spec`, reversed.
# `importlib.metadata.version` doesn't work with `bs4` but `beautifulsoup4`. For `importlib.util.find_spec`, reversed.
_bs4_available = importlib.util.find_spec("bs4") is not None
_coloredlogs_available = _is_package_available("coloredlogs")
_datasets_available = _is_package_available("datasets")
......@@ -80,13 +80,13 @@ _detectron2_available = _is_package_available("detectron2")
# We need to check both `faiss` and `faiss-cpu`.
_faiss_available = importlib.util.find_spec("faiss") is not None
try:
_faiss_version = importlib_metadata.version("faiss")
_faiss_version = importlib.metadata.version("faiss")
logger.debug(f"Successfully imported faiss version {_faiss_version}")
except importlib_metadata.PackageNotFoundError:
except importlib.metadata.PackageNotFoundError:
try:
_faiss_version = importlib_metadata.version("faiss-cpu")
_faiss_version = importlib.metadata.version("faiss-cpu")
logger.debug(f"Successfully imported faiss version {_faiss_version}")
except importlib_metadata.PackageNotFoundError:
except importlib.metadata.PackageNotFoundError:
_faiss_available = False
_ftfy_available = _is_package_available("ftfy")
_ipex_available, _ipex_version = _is_package_available("intel_extension_for_pytorch", return_version=True)
......@@ -115,8 +115,8 @@ _sentencepiece_available = _is_package_available("sentencepiece")
_sklearn_available = importlib.util.find_spec("sklearn") is not None
if _sklearn_available:
try:
importlib_metadata.version("scikit-learn")
except importlib_metadata.PackageNotFoundError:
importlib.metadata.version("scikit-learn")
except importlib.metadata.PackageNotFoundError:
_sklearn_available = False
_smdistributed_available = importlib.util.find_spec("smdistributed") is not None
_soundfile_available = _is_package_available("soundfile")
......@@ -168,9 +168,9 @@ else:
# For the metadata, we have to look for both tensorflow and tensorflow-cpu
for pkg in candidates:
try:
_tf_version = importlib_metadata.version(pkg)
_tf_version = importlib.metadata.version(pkg)
break
except importlib_metadata.PackageNotFoundError:
except importlib.metadata.PackageNotFoundError:
pass
_tf_available = _tf_version is not None
if _tf_available:
......@@ -189,9 +189,9 @@ _is_ccl_available = (
or importlib.util.find_spec("oneccl_bindings_for_pytorch") is not None
)
try:
ccl_version = importlib_metadata.version("oneccl_bind_pt")
ccl_version = importlib.metadata.version("oneccl_bind_pt")
logger.debug(f"Detected oneccl_bind_pt version {ccl_version}")
except importlib_metadata.PackageNotFoundError:
except importlib.metadata.PackageNotFoundError:
_is_ccl_available = False
......@@ -530,8 +530,8 @@ def is_vision_available():
_pil_available = importlib.util.find_spec("PIL") is not None
if _pil_available:
try:
package_version = importlib_metadata.version("Pillow")
except importlib_metadata.PackageNotFoundError:
package_version = importlib.metadata.version("Pillow")
except importlib.metadata.PackageNotFoundError:
return False
logger.debug(f"Detected PIL version {package_version}")
return _pil_available
......
......@@ -15,6 +15,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import importlib.metadata
import json
import os
from dataclasses import dataclass
......@@ -23,7 +24,6 @@ from typing import Any, Dict, Union
from packaging import version
from ..utils import is_torch_available, logging
from ..utils.import_utils import importlib_metadata
if is_torch_available():
......@@ -141,7 +141,7 @@ class BitsAndBytesConfig:
if not isinstance(self.bnb_4bit_use_double_quant, bool):
raise ValueError("bnb_4bit_use_double_quant must be a boolean")
if self.load_in_4bit and not version.parse(importlib_metadata.version("bitsandbytes")) >= version.parse(
if self.load_in_4bit and not version.parse(importlib.metadata.version("bitsandbytes")) >= version.parse(
"0.39.0"
):
raise ValueError(
......
......@@ -15,6 +15,7 @@
Utilities for working with package versions
"""
import importlib.metadata
import operator
import re
import sys
......@@ -23,13 +24,6 @@ from typing import Optional
from packaging import version
# The package importlib_metadata is in a different place, depending on the python version.
if sys.version_info < (3, 8):
import importlib_metadata
else:
import importlib.metadata as importlib_metadata
ops = {
"<": operator.lt,
"<=": operator.le,
......@@ -56,7 +50,7 @@ def require_version(requirement: str, hint: Optional[str] = None) -> None:
"""
Perform a runtime check of the dependency versions, using the exact same syntax used by pip.
The installed module version comes from the *site-packages* dir via *importlib_metadata*.
The installed module version comes from the *site-packages* dir via *importlib.metadata*.
Args:
requirement (`str`): pip style definition, e.g., "tokenizers==0.9.4", "tqdm>=4.27", "numpy"
......@@ -105,9 +99,9 @@ def require_version(requirement: str, hint: Optional[str] = None) -> None:
# check if any version is installed
try:
got_ver = importlib_metadata.version(pkg)
except importlib_metadata.PackageNotFoundError:
raise importlib_metadata.PackageNotFoundError(
got_ver = importlib.metadata.version(pkg)
except importlib.metadata.PackageNotFoundError:
raise importlib.metadata.PackageNotFoundError(
f"The '{requirement}' distribution was not found and is required by this application. {hint}"
)
......
......@@ -13,6 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import gc
import importlib.metadata
import tempfile
import unittest
......@@ -36,7 +37,6 @@ from transformers.testing_utils import (
require_torch_multi_gpu,
slow,
)
from transformers.utils.versions import importlib_metadata
def get_some_linear_layer(model):
......@@ -446,7 +446,7 @@ class Bnb4BitTestTraining(Base4bitTest):
super().setUp()
def test_training(self):
if version.parse(importlib_metadata.version("bitsandbytes")) < version.parse("0.37.0"):
if version.parse(importlib.metadata.version("bitsandbytes")) < version.parse("0.37.0"):
return
# Step 1: freeze all parameters
......
......@@ -13,6 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import gc
import importlib.metadata
import tempfile
import unittest
......@@ -38,7 +39,6 @@ from transformers.testing_utils import (
require_torch_multi_gpu,
slow,
)
from transformers.utils.versions import importlib_metadata
def get_some_linear_layer(model):
......@@ -722,7 +722,7 @@ class MixedInt8TestTraining(BaseMixedInt8Test):
super().setUp()
def test_training(self):
if version.parse(importlib_metadata.version("bitsandbytes")) < version.parse("0.37.0"):
if version.parse(importlib.metadata.version("bitsandbytes")) < version.parse("0.37.0"):
return
# Step 1: freeze all parameters
......
......@@ -22,7 +22,7 @@ from argparse import Namespace
from dataclasses import dataclass, field
from enum import Enum
from pathlib import Path
from typing import List, Optional
from typing import List, Literal, Optional
import yaml
......@@ -30,13 +30,6 @@ from transformers import HfArgumentParser, TrainingArguments
from transformers.hf_argparser import make_choice_type_function, string_to_bool
try:
# For Python versions <3.8, Literal is not in typing: https://peps.python.org/pep-0586/
from typing import Literal
except ImportError:
# For Python 3.7
from typing_extensions import Literal
# Since Python 3.10, we can use the builtin `|` operator for Union types
# See PEP 604: https://peps.python.org/pep-0604
is_python_no_less_than_3_10 = sys.version_info >= (3, 10)
......
......@@ -12,13 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import importlib.metadata
import sys
from transformers.testing_utils import TestCasePlus
from transformers.utils.versions import importlib_metadata, require_version, require_version_core
from transformers.utils.versions import require_version, require_version_core
numpy_ver = importlib_metadata.version("numpy")
numpy_ver = importlib.metadata.version("numpy")
python_ver = ".".join([str(x) for x in sys.version_info[:3]])
......@@ -65,7 +66,7 @@ class DependencyVersionCheckTest(TestCasePlus):
for req in ["numpipypie>1", "numpipypie2"]:
try:
require_version_core(req)
except importlib_metadata.PackageNotFoundError as e:
except importlib.metadata.PackageNotFoundError as e:
self.assertIn(f"The '{req}' distribution was not found and is required by this application", str(e))
self.assertIn("Try: pip install transformers -U", str(e))
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment