Unverified Commit f9f1f2ac authored by Younes Belkada's avatar Younes Belkada Committed by GitHub
Browse files

[`HFQuantizer`] Remove `check_packages_compatibility` logic (#28789)

remove `check_packages_compatibility` logic
parent ae0c27ad
...@@ -15,7 +15,6 @@ from abc import ABC, abstractmethod ...@@ -15,7 +15,6 @@ from abc import ABC, abstractmethod
from typing import TYPE_CHECKING, Any, Dict, Optional, Union from typing import TYPE_CHECKING, Any, Dict, Optional, Union
from ..utils import is_torch_available from ..utils import is_torch_available
from ..utils.import_utils import _is_package_available
from ..utils.quantization_config import QuantizationConfigMixin from ..utils.quantization_config import QuantizationConfigMixin
...@@ -64,8 +63,6 @@ class HfQuantizer(ABC): ...@@ -64,8 +63,6 @@ class HfQuantizer(ABC):
f"pass `pre_quantized=True` while knowing what you are doing." f"pass `pre_quantized=True` while knowing what you are doing."
) )
self.check_packages_compatibility()
def update_torch_dtype(self, torch_dtype: "torch.dtype") -> "torch.dtype": def update_torch_dtype(self, torch_dtype: "torch.dtype") -> "torch.dtype":
""" """
Some quantization methods require to explicitly set the dtype of the model to a Some quantization methods require to explicitly set the dtype of the model to a
...@@ -152,25 +149,6 @@ class HfQuantizer(ABC): ...@@ -152,25 +149,6 @@ class HfQuantizer(ABC):
""" """
return return
def check_packages_compatibility(self):
"""
Check the compatibility of the quantizer with respect to the current environment. Loops over all packages
name under `self.required_packages` and checks if that package is available.
"""
if self.required_packages is not None:
non_available_packages = []
for package_name in self.required_packages:
is_package_available = _is_package_available(package_name)
if not is_package_available:
non_available_packages.append(package_name)
if len(non_available_packages) > 0:
raise ValueError(
f"The packages {self.required_packages} are required to use {self.__class__.__name__}"
f" the following packages are missing in your environment: {non_available_packages}, please make sure"
f" to install them in order to use the quantizer."
)
def preprocess_model(self, model: "PreTrainedModel", **kwargs): def preprocess_model(self, model: "PreTrainedModel", **kwargs):
""" """
Setting model attributes and/or converting model before weights loading. At this point Setting model attributes and/or converting model before weights loading. At this point
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment