Unverified Commit 1a7e9f13 authored by Patrick von Platen's avatar Patrick von Platen Committed by GitHub
Browse files

[Pipeline loading] Remove send_telemetry (#2640)

* [Pipeline loading]

* up
parent c460ef61
...@@ -28,7 +28,6 @@ import numpy as np ...@@ -28,7 +28,6 @@ import numpy as np
import PIL import PIL
import torch import torch
from huggingface_hub import hf_hub_download, model_info, snapshot_download from huggingface_hub import hf_hub_download, model_info, snapshot_download
from huggingface_hub.utils import send_telemetry
from packaging import version from packaging import version
from PIL import Image from PIL import Image
from tqdm.auto import tqdm from tqdm.auto import tqdm
...@@ -1054,17 +1053,10 @@ class DiffusionPipeline(ConfigMixin): ...@@ -1054,17 +1053,10 @@ class DiffusionPipeline(ConfigMixin):
<Tip> <Tip>
It is required to be logged in (`huggingface-cli login`) when you want to use private or [gated It is required to be logged in (`huggingface-cli login`) when you want to use private or [gated
models](https://huggingface.co/docs/hub/models-gated#gated-models), *e.g.* `"runwayml/stable-diffusion-v1-5"` models](https://huggingface.co/docs/hub/models-gated#gated-models)
</Tip> </Tip>
<Tip>
Activate the special
["offline-mode"](https://huggingface.co/diffusers/installation.html#notice-on-telemetry-logging) to use this
method in a firewalled environment.
</Tip>
""" """
cache_dir = kwargs.pop("cache_dir", DIFFUSERS_CACHE) cache_dir = kwargs.pop("cache_dir", DIFFUSERS_CACHE)
resume_download = kwargs.pop("resume_download", False) resume_download = kwargs.pop("resume_download", False)
...@@ -1081,35 +1073,24 @@ class DiffusionPipeline(ConfigMixin): ...@@ -1081,35 +1073,24 @@ class DiffusionPipeline(ConfigMixin):
allow_patterns = None allow_patterns = None
ignore_patterns = None ignore_patterns = None
user_agent = {"pipeline_class": cls.__name__}
if custom_pipeline is not None and not custom_pipeline.endswith(".py"):
user_agent["custom_pipeline"] = custom_pipeline
if not local_files_only: if not local_files_only:
info = model_info(
pretrained_model_name,
use_auth_token=use_auth_token,
revision=revision,
)
user_agent["pretrained_model_name"] = pretrained_model_name
send_telemetry("pipelines", library_name="diffusers", library_version=__version__, user_agent=user_agent)
commit_hash = info.sha
# try loading the config file
config_file = hf_hub_download( config_file = hf_hub_download(
pretrained_model_name, pretrained_model_name,
cls.config_name, cls.config_name,
cache_dir=cache_dir, cache_dir=cache_dir,
revision=commit_hash, revision=revision,
proxies=proxies, proxies=proxies,
force_download=force_download, force_download=force_download,
resume_download=resume_download, resume_download=resume_download,
use_auth_token=use_auth_token, use_auth_token=use_auth_token,
) )
info = model_info(
pretrained_model_name,
use_auth_token=use_auth_token,
revision=revision,
)
config_dict = cls._dict_from_json_file(config_file) config_dict = cls._dict_from_json_file(config_file)
config_is_cached = True
# retrieve all folder_names that contain relevant files # retrieve all folder_names that contain relevant files
folder_names = [k for k, v in config_dict.items() if isinstance(v, list)] folder_names = [k for k, v in config_dict.items() if isinstance(v, list)]
...@@ -1166,7 +1147,6 @@ class DiffusionPipeline(ConfigMixin): ...@@ -1166,7 +1147,6 @@ class DiffusionPipeline(ConfigMixin):
f"\nA mixture of {variant} and non-{variant} filenames will be loaded.\nLoaded {variant} filenames:\n[{', '.join(bin_variant_filenames)}]\nLoaded non-{variant} filenames:\n[{', '.join(bin_model_filenames - bin_variant_filenames)}\nIf this behavior is not expected, please check your folder structure." f"\nA mixture of {variant} and non-{variant} filenames will be loaded.\nLoaded {variant} filenames:\n[{', '.join(bin_variant_filenames)}]\nLoaded non-{variant} filenames:\n[{', '.join(bin_model_filenames - bin_variant_filenames)}\nIf this behavior is not expected, please check your folder structure."
) )
if config_is_cached:
re_ignore_pattern = [re.compile(fnmatch.translate(p)) for p in ignore_patterns] re_ignore_pattern = [re.compile(fnmatch.translate(p)) for p in ignore_patterns]
re_allow_pattern = [re.compile(fnmatch.translate(p)) for p in allow_patterns] re_allow_pattern = [re.compile(fnmatch.translate(p)) for p in allow_patterns]
...@@ -1181,6 +1161,10 @@ class DiffusionPipeline(ConfigMixin): ...@@ -1181,6 +1161,10 @@ class DiffusionPipeline(ConfigMixin):
# else call snapshot_download # else call snapshot_download
return snapshot_folder return snapshot_folder
user_agent = {"pipeline_class": cls.__name__}
if custom_pipeline is not None and not custom_pipeline.endswith(".py"):
user_agent["custom_pipeline"] = custom_pipeline
# download all allow_patterns - ignore_patterns # download all allow_patterns - ignore_patterns
cached_folder = snapshot_download( cached_folder = snapshot_download(
pretrained_model_name, pretrained_model_name,
......
...@@ -74,10 +74,10 @@ class DownloadTests(unittest.TestCase): ...@@ -74,10 +74,10 @@ class DownloadTests(unittest.TestCase):
) )
download_requests = [r.method for r in m.request_history] download_requests = [r.method for r in m.request_history]
assert download_requests.count("HEAD") == 16, "15 calls to files + send_telemetry" assert download_requests.count("HEAD") == 15, "15 calls to files"
assert download_requests.count("GET") == 17, "15 calls to files + model_info + model_index.json" assert download_requests.count("GET") == 17, "15 calls to files + model_info + model_index.json"
assert ( assert (
len(download_requests) == 33 len(download_requests) == 32
), "2 calls per file (15 files) + send_telemetry, model_info and model_index.json" ), "2 calls per file (15 files) + send_telemetry, model_info and model_index.json"
with requests_mock.mock(real_http=True) as m: with requests_mock.mock(real_http=True) as m:
...@@ -86,7 +86,7 @@ class DownloadTests(unittest.TestCase): ...@@ -86,7 +86,7 @@ class DownloadTests(unittest.TestCase):
) )
cache_requests = [r.method for r in m.request_history] cache_requests = [r.method for r in m.request_history]
assert cache_requests.count("HEAD") == 1, "send_telemetry is only HEAD" assert cache_requests.count("HEAD") == 1, "model_index.json is only HEAD"
assert cache_requests.count("GET") == 1, "model info is only GET" assert cache_requests.count("GET") == 1, "model info is only GET"
assert ( assert (
len(cache_requests) == 2 len(cache_requests) == 2
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment