"docs/source/vscode:/vscode.git/clone" did not exist on "a244de579cbd964912aa46446a0a9b2a2f595c87"
Unverified Commit ec5449f3 authored by Lucain's avatar Lucain Committed by GitHub
Browse files

Support both huggingface_hub `v0.x` and `v1.x` (#12389)

* Support huggingface_hub 0.x and 1.x

* httpx
parent 310fdaf5
...@@ -102,7 +102,8 @@ _deps = [ ...@@ -102,7 +102,8 @@ _deps = [
"filelock", "filelock",
"flax>=0.4.1", "flax>=0.4.1",
"hf-doc-builder>=0.3.0", "hf-doc-builder>=0.3.0",
"huggingface-hub>=0.34.0", "httpx<1.0.0",
"huggingface-hub>=0.34.0,<2.0",
"requests-mock==1.10.0", "requests-mock==1.10.0",
"importlib_metadata", "importlib_metadata",
"invisible-watermark>=0.2.0", "invisible-watermark>=0.2.0",
...@@ -259,6 +260,7 @@ extras["dev"] = ( ...@@ -259,6 +260,7 @@ extras["dev"] = (
install_requires = [ install_requires = [
deps["importlib_metadata"], deps["importlib_metadata"],
deps["filelock"], deps["filelock"],
deps["httpx"],
deps["huggingface-hub"], deps["huggingface-hub"],
deps["numpy"], deps["numpy"],
deps["regex"], deps["regex"],
......
...@@ -30,11 +30,11 @@ import numpy as np ...@@ -30,11 +30,11 @@ import numpy as np
from huggingface_hub import DDUFEntry, create_repo, hf_hub_download from huggingface_hub import DDUFEntry, create_repo, hf_hub_download
from huggingface_hub.utils import ( from huggingface_hub.utils import (
EntryNotFoundError, EntryNotFoundError,
HfHubHTTPError,
RepositoryNotFoundError, RepositoryNotFoundError,
RevisionNotFoundError, RevisionNotFoundError,
validate_hf_hub_args, validate_hf_hub_args,
) )
from requests import HTTPError
from typing_extensions import Self from typing_extensions import Self
from . import __version__ from . import __version__
...@@ -419,7 +419,7 @@ class ConfigMixin: ...@@ -419,7 +419,7 @@ class ConfigMixin:
raise EnvironmentError( raise EnvironmentError(
f"{pretrained_model_name_or_path} does not appear to have a file named {cls.config_name}." f"{pretrained_model_name_or_path} does not appear to have a file named {cls.config_name}."
) )
except HTTPError as err: except HfHubHTTPError as err:
raise EnvironmentError( raise EnvironmentError(
"There was a specific connection error when trying to load" "There was a specific connection error when trying to load"
f" {pretrained_model_name_or_path}:\n{err}" f" {pretrained_model_name_or_path}:\n{err}"
......
...@@ -9,7 +9,8 @@ deps = { ...@@ -9,7 +9,8 @@ deps = {
"filelock": "filelock", "filelock": "filelock",
"flax": "flax>=0.4.1", "flax": "flax>=0.4.1",
"hf-doc-builder": "hf-doc-builder>=0.3.0", "hf-doc-builder": "hf-doc-builder>=0.3.0",
"huggingface-hub": "huggingface-hub>=0.34.0", "httpx": "httpx<1.0.0",
"huggingface-hub": "huggingface-hub>=0.34.0,<2.0",
"requests-mock": "requests-mock==1.10.0", "requests-mock": "requests-mock==1.10.0",
"importlib_metadata": "importlib_metadata", "importlib_metadata": "importlib_metadata",
"invisible-watermark": "invisible-watermark>=0.2.0", "invisible-watermark": "invisible-watermark>=0.2.0",
......
...@@ -26,11 +26,11 @@ from flax.traverse_util import flatten_dict, unflatten_dict ...@@ -26,11 +26,11 @@ from flax.traverse_util import flatten_dict, unflatten_dict
from huggingface_hub import create_repo, hf_hub_download from huggingface_hub import create_repo, hf_hub_download
from huggingface_hub.utils import ( from huggingface_hub.utils import (
EntryNotFoundError, EntryNotFoundError,
HfHubHTTPError,
RepositoryNotFoundError, RepositoryNotFoundError,
RevisionNotFoundError, RevisionNotFoundError,
validate_hf_hub_args, validate_hf_hub_args,
) )
from requests import HTTPError
from .. import __version__, is_torch_available from .. import __version__, is_torch_available
from ..utils import ( from ..utils import (
...@@ -385,7 +385,7 @@ class FlaxModelMixin(PushToHubMixin): ...@@ -385,7 +385,7 @@ class FlaxModelMixin(PushToHubMixin):
raise EnvironmentError( raise EnvironmentError(
f"{pretrained_model_name_or_path} does not appear to have a file named {FLAX_WEIGHTS_NAME}." f"{pretrained_model_name_or_path} does not appear to have a file named {FLAX_WEIGHTS_NAME}."
) )
except HTTPError as err: except HfHubHTTPError as err:
raise EnvironmentError( raise EnvironmentError(
f"There was a specific connection error when trying to load {pretrained_model_name_or_path}:\n" f"There was a specific connection error when trying to load {pretrained_model_name_or_path}:\n"
f"{err}" f"{err}"
......
...@@ -19,12 +19,12 @@ import warnings ...@@ -19,12 +19,12 @@ import warnings
from pathlib import Path from pathlib import Path
from typing import Any, Callable, Dict, List, Optional, Union from typing import Any, Callable, Dict, List, Optional, Union
import httpx
import requests import requests
import torch import torch
from huggingface_hub import DDUFEntry, ModelCard, model_info, snapshot_download from huggingface_hub import DDUFEntry, ModelCard, model_info, snapshot_download
from huggingface_hub.utils import OfflineModeIsEnabled, validate_hf_hub_args from huggingface_hub.utils import HfHubHTTPError, OfflineModeIsEnabled, validate_hf_hub_args
from packaging import version from packaging import version
from requests.exceptions import HTTPError
from .. import __version__ from .. import __version__
from ..utils import ( from ..utils import (
...@@ -1110,7 +1110,7 @@ def _download_dduf_file( ...@@ -1110,7 +1110,7 @@ def _download_dduf_file(
if not local_files_only: if not local_files_only:
try: try:
info = model_info(pretrained_model_name, token=token, revision=revision) info = model_info(pretrained_model_name, token=token, revision=revision)
except (HTTPError, OfflineModeIsEnabled, requests.ConnectionError) as e: except (HfHubHTTPError, OfflineModeIsEnabled, requests.ConnectionError, httpx.NetworkError) as e:
logger.warning(f"Couldn't connect to the Hub: {e}.\nWill try to load from local cache.") logger.warning(f"Couldn't connect to the Hub: {e}.\nWill try to load from local cache.")
local_files_only = True local_files_only = True
model_info_call_error = e # save error to reraise it if model is not cached locally model_info_call_error = e # save error to reraise it if model is not cached locally
......
...@@ -23,6 +23,7 @@ from dataclasses import dataclass ...@@ -23,6 +23,7 @@ from dataclasses import dataclass
from pathlib import Path from pathlib import Path
from typing import Any, Callable, Dict, List, Optional, Union, get_args, get_origin from typing import Any, Callable, Dict, List, Optional, Union, get_args, get_origin
import httpx
import numpy as np import numpy as np
import PIL.Image import PIL.Image
import requests import requests
...@@ -36,9 +37,8 @@ from huggingface_hub import ( ...@@ -36,9 +37,8 @@ from huggingface_hub import (
read_dduf_file, read_dduf_file,
snapshot_download, snapshot_download,
) )
from huggingface_hub.utils import OfflineModeIsEnabled, validate_hf_hub_args from huggingface_hub.utils import HfHubHTTPError, OfflineModeIsEnabled, validate_hf_hub_args
from packaging import version from packaging import version
from requests.exceptions import HTTPError
from tqdm.auto import tqdm from tqdm.auto import tqdm
from typing_extensions import Self from typing_extensions import Self
...@@ -1616,7 +1616,7 @@ class DiffusionPipeline(ConfigMixin, PushToHubMixin): ...@@ -1616,7 +1616,7 @@ class DiffusionPipeline(ConfigMixin, PushToHubMixin):
if not local_files_only: if not local_files_only:
try: try:
info = model_info(pretrained_model_name, token=token, revision=revision) info = model_info(pretrained_model_name, token=token, revision=revision)
except (HTTPError, OfflineModeIsEnabled, requests.ConnectionError) as e: except (HfHubHTTPError, OfflineModeIsEnabled, requests.ConnectionError, httpx.NetworkError) as e:
logger.warning(f"Couldn't connect to the Hub: {e}.\nWill try to load from local cache.") logger.warning(f"Couldn't connect to the Hub: {e}.\nWill try to load from local cache.")
local_files_only = True local_files_only = True
model_info_call_error = e # save error to reraise it if model is not cached locally model_info_call_error = e # save error to reraise it if model is not cached locally
......
...@@ -38,13 +38,13 @@ from huggingface_hub.constants import HF_HUB_DISABLE_TELEMETRY, HF_HUB_OFFLINE ...@@ -38,13 +38,13 @@ from huggingface_hub.constants import HF_HUB_DISABLE_TELEMETRY, HF_HUB_OFFLINE
from huggingface_hub.file_download import REGEX_COMMIT_HASH from huggingface_hub.file_download import REGEX_COMMIT_HASH
from huggingface_hub.utils import ( from huggingface_hub.utils import (
EntryNotFoundError, EntryNotFoundError,
HfHubHTTPError,
RepositoryNotFoundError, RepositoryNotFoundError,
RevisionNotFoundError, RevisionNotFoundError,
is_jinja_available, is_jinja_available,
validate_hf_hub_args, validate_hf_hub_args,
) )
from packaging import version from packaging import version
from requests import HTTPError
from .. import __version__ from .. import __version__
from .constants import ( from .constants import (
...@@ -316,7 +316,7 @@ def _get_model_file( ...@@ -316,7 +316,7 @@ def _get_model_file(
raise EnvironmentError( raise EnvironmentError(
f"{pretrained_model_name_or_path} does not appear to have a file named {weights_name}." f"{pretrained_model_name_or_path} does not appear to have a file named {weights_name}."
) from e ) from e
except HTTPError as e: except HfHubHTTPError as e:
raise EnvironmentError( raise EnvironmentError(
f"There was a specific connection error when trying to load {pretrained_model_name_or_path}:\n{e}" f"There was a specific connection error when trying to load {pretrained_model_name_or_path}:\n{e}"
) from e ) from e
...@@ -432,7 +432,7 @@ def _get_checkpoint_shard_files( ...@@ -432,7 +432,7 @@ def _get_checkpoint_shard_files(
# We have already dealt with RepositoryNotFoundError and RevisionNotFoundError when getting the index, so # We have already dealt with RepositoryNotFoundError and RevisionNotFoundError when getting the index, so
# we don't have to catch them here. We have also dealt with EntryNotFoundError. # we don't have to catch them here. We have also dealt with EntryNotFoundError.
except HTTPError as e: except HfHubHTTPError as e:
raise EnvironmentError( raise EnvironmentError(
f"We couldn't connect to '{HUGGINGFACE_CO_RESOLVE_ENDPOINT}' to load {pretrained_model_name_or_path}. You should try" f"We couldn't connect to '{HUGGINGFACE_CO_RESOLVE_ENDPOINT}' to load {pretrained_model_name_or_path}. You should try"
" again after checking your internet connection." " again after checking your internet connection."
......
...@@ -37,9 +37,8 @@ import torch ...@@ -37,9 +37,8 @@ import torch
import torch.nn as nn import torch.nn as nn
from accelerate.utils.modeling import _get_proper_dtype, compute_module_sizes, dtype_byte_size from accelerate.utils.modeling import _get_proper_dtype, compute_module_sizes, dtype_byte_size
from huggingface_hub import ModelCard, delete_repo, snapshot_download, try_to_load_from_cache from huggingface_hub import ModelCard, delete_repo, snapshot_download, try_to_load_from_cache
from huggingface_hub.utils import is_jinja_available from huggingface_hub.utils import HfHubHTTPError, is_jinja_available
from parameterized import parameterized from parameterized import parameterized
from requests.exceptions import HTTPError
from diffusers.models import FluxTransformer2DModel, SD3Transformer2DModel, UNet2DConditionModel from diffusers.models import FluxTransformer2DModel, SD3Transformer2DModel, UNet2DConditionModel
from diffusers.models.attention_processor import ( from diffusers.models.attention_processor import (
...@@ -272,7 +271,7 @@ class ModelUtilsTest(unittest.TestCase): ...@@ -272,7 +271,7 @@ class ModelUtilsTest(unittest.TestCase):
response_mock = mock.Mock() response_mock = mock.Mock()
response_mock.status_code = 500 response_mock.status_code = 500
response_mock.headers = {} response_mock.headers = {}
response_mock.raise_for_status.side_effect = HTTPError response_mock.raise_for_status.side_effect = HfHubHTTPError("Server down", response=mock.Mock())
response_mock.json.return_value = {} response_mock.json.return_value = {}
# Download this model to make sure it's in the cache. # Download this model to make sure it's in the cache.
...@@ -296,7 +295,7 @@ class ModelUtilsTest(unittest.TestCase): ...@@ -296,7 +295,7 @@ class ModelUtilsTest(unittest.TestCase):
error_response = mock.Mock( error_response = mock.Mock(
status_code=500, status_code=500,
headers={}, headers={},
raise_for_status=mock.Mock(side_effect=HTTPError), raise_for_status=mock.Mock(side_effect=HfHubHTTPError("Server down", response=mock.Mock())),
json=mock.Mock(return_value={}), json=mock.Mock(return_value={}),
) )
......
...@@ -33,9 +33,9 @@ import safetensors.torch ...@@ -33,9 +33,9 @@ import safetensors.torch
import torch import torch
import torch.nn as nn import torch.nn as nn
from huggingface_hub import snapshot_download from huggingface_hub import snapshot_download
from huggingface_hub.utils import HfHubHTTPError
from parameterized import parameterized from parameterized import parameterized
from PIL import Image from PIL import Image
from requests.exceptions import HTTPError
from transformers import CLIPImageProcessor, CLIPModel, CLIPTextConfig, CLIPTextModel, CLIPTokenizer from transformers import CLIPImageProcessor, CLIPModel, CLIPTextConfig, CLIPTextModel, CLIPTokenizer
from diffusers import ( from diffusers import (
...@@ -430,7 +430,7 @@ class DownloadTests(unittest.TestCase): ...@@ -430,7 +430,7 @@ class DownloadTests(unittest.TestCase):
response_mock = mock.Mock() response_mock = mock.Mock()
response_mock.status_code = 500 response_mock.status_code = 500
response_mock.headers = {} response_mock.headers = {}
response_mock.raise_for_status.side_effect = HTTPError response_mock.raise_for_status.side_effect = HfHubHTTPError("Server down", response=mock.Mock())
response_mock.json.return_value = {} response_mock.json.return_value = {}
# Download this model to make sure it's in the cache. # Download this model to make sure it's in the cache.
...@@ -457,7 +457,7 @@ class DownloadTests(unittest.TestCase): ...@@ -457,7 +457,7 @@ class DownloadTests(unittest.TestCase):
response_mock = mock.Mock() response_mock = mock.Mock()
response_mock.status_code = 500 response_mock.status_code = 500
response_mock.headers = {} response_mock.headers = {}
response_mock.raise_for_status.side_effect = HTTPError response_mock.raise_for_status.side_effect = HfHubHTTPError("Server down", response=mock.Mock())
response_mock.json.return_value = {} response_mock.json.return_value = {}
# first check that with local files only the pipeline can only be used if cached # first check that with local files only the pipeline can only be used if cached
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment