pipeline_utils.py 11.3 KB
Newer Older
Patrick von Platen's avatar
Patrick von Platen committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
# coding=utf-8
# Copyright 2022 The HuggingFace Inc. team.
# Copyright (c) 2022, NVIDIA CORPORATION.  All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

Patrick von Platen's avatar
improve  
Patrick von Platen committed
17
import importlib
18
import inspect
Patrick von Platen's avatar
Patrick von Platen committed
19
20
import os
from typing import Optional, Union
anton-l's avatar
Style  
anton-l committed
21

Pedro Cuenca's avatar
Pedro Cuenca committed
22
23
import torch

Patrick von Platen's avatar
up  
Patrick von Platen committed
24
from huggingface_hub import snapshot_download
25
from PIL import Image
hysts's avatar
hysts committed
26
from tqdm.auto import tqdm
Patrick von Platen's avatar
Patrick von Platen committed
27

Patrick von Platen's avatar
Patrick von Platen committed
28
from .configuration_utils import ConfigMixin
Patrick von Platen's avatar
Patrick von Platen committed
29
from .utils import DIFFUSERS_CACHE, logging
Patrick von Platen's avatar
improve  
Patrick von Platen committed
30

Patrick von Platen's avatar
Patrick von Platen committed
31

Patrick von Platen's avatar
Patrick von Platen committed
32
INDEX_FILE = "diffusion_pytorch_model.bin"
Patrick von Platen's avatar
Patrick von Platen committed
33
34
35
36
37
38
39


logger = logging.get_logger(__name__)


LOADABLE_CLASSES = {
    "diffusers": {
Patrick von Platen's avatar
Patrick von Platen committed
40
        "ModelMixin": ["save_pretrained", "from_pretrained"],
Patrick von Platen's avatar
Patrick von Platen committed
41
        "SchedulerMixin": ["save_config", "from_config"],
Patrick von Platen's avatar
Patrick von Platen committed
42
        "DiffusionPipeline": ["save_pretrained", "from_pretrained"],
Patrick von Platen's avatar
Patrick von Platen committed
43
44
    },
    "transformers": {
anton-l's avatar
anton-l committed
45
        "PreTrainedTokenizer": ["save_pretrained", "from_pretrained"],
46
        "PreTrainedTokenizerFast": ["save_pretrained", "from_pretrained"],
anton-l's avatar
anton-l committed
47
        "PreTrainedModel": ["save_pretrained", "from_pretrained"],
Suraj Patil's avatar
Suraj Patil committed
48
        "FeatureExtractionMixin": ["save_pretrained", "from_pretrained"],
Patrick von Platen's avatar
Patrick von Platen committed
49
50
51
    },
}

52
53
54
55
ALL_IMPORTABLE_CLASSES = {}
for library in LOADABLE_CLASSES:
    ALL_IMPORTABLE_CLASSES.update(LOADABLE_CLASSES[library])

Patrick von Platen's avatar
Patrick von Platen committed
56

Patrick von Platen's avatar
Patrick von Platen committed
57
class DiffusionPipeline(ConfigMixin):
Patrick von Platen's avatar
Patrick von Platen committed
58
59
60

    config_name = "model_index.json"

Patrick von Platen's avatar
up  
Patrick von Platen committed
61
    def register_modules(self, **kwargs):
62
63
        # import it here to avoid circular import
        from diffusers import pipelines
64

Patrick von Platen's avatar
Patrick von Platen committed
65
66
67
        for name, module in kwargs.items():
            # retrive library
            library = module.__module__.split(".")[0]
68

69
70
            # check if the module is a pipeline module
            pipeline_dir = module.__module__.split(".")[-2]
Suraj Patil's avatar
Suraj Patil committed
71
72
            path = module.__module__.split(".")
            is_pipeline_module = pipeline_dir in path and hasattr(pipelines, pipeline_dir)
73

74
75
            # if library is not in LOADABLE_CLASSES, then it is a custom module.
            # Or if it's a pipeline module, then the module is inside the pipeline
76
            # folder so we set the library to module name.
77
            if library not in LOADABLE_CLASSES or is_pipeline_module:
78
                library = pipeline_dir
patil-suraj's avatar
patil-suraj committed
79

Patrick von Platen's avatar
Patrick von Platen committed
80
81
82
            # retrive class_name
            class_name = module.__class__.__name__

83
84
            register_dict = {name: (library, class_name)}

Patrick von Platen's avatar
Patrick von Platen committed
85
            # save model index config
86
            self.register_to_config(**register_dict)
Patrick von Platen's avatar
Patrick von Platen committed
87
88
89

            # set models
            setattr(self, name, module)
90

Patrick von Platen's avatar
Patrick von Platen committed
91
92
93
    def save_pretrained(self, save_directory: Union[str, os.PathLike]):
        self.save_config(save_directory)

Patrick von Platen's avatar
Patrick von Platen committed
94
        model_index_dict = dict(self.config)
Patrick von Platen's avatar
Patrick von Platen committed
95
        model_index_dict.pop("_class_name")
96
        model_index_dict.pop("_diffusers_version")
97
        model_index_dict.pop("_module", None)
Patrick von Platen's avatar
Patrick von Platen committed
98

anton-l's avatar
anton-l committed
99
100
101
        for pipeline_component_name in model_index_dict.keys():
            sub_model = getattr(self, pipeline_component_name)
            model_cls = sub_model.__class__
Patrick von Platen's avatar
Patrick von Platen committed
102
103

            save_method_name = None
anton-l's avatar
anton-l committed
104
105
106
107
108
109
110
111
112
113
114
115
116
117
            # search for the model's base class in LOADABLE_CLASSES
            for library_name, library_classes in LOADABLE_CLASSES.items():
                library = importlib.import_module(library_name)
                for base_class, save_load_methods in library_classes.items():
                    class_candidate = getattr(library, base_class)
                    if issubclass(model_cls, class_candidate):
                        # if we found a suitable base class in LOADABLE_CLASSES then grab its save method
                        save_method_name = save_load_methods[0]
                        break
                if save_method_name is not None:
                    break

            save_method = getattr(sub_model, save_method_name)
            save_method(os.path.join(save_directory, pipeline_component_name))
Patrick von Platen's avatar
Patrick von Platen committed
118

Pedro Cuenca's avatar
Pedro Cuenca committed
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
    def to(self, torch_device: Optional[Union[str, torch.device]] = None):
        if torch_device is None:
            return self

        module_names, _ = self.extract_init_dict(dict(self.config))
        for name in module_names.keys():
            module = getattr(self, name)
            if isinstance(module, torch.nn.Module):
                module.to(torch_device)
        return self

    @property
    def device(self) -> torch.device:
        module_names, _ = self.extract_init_dict(dict(self.config))
        for name in module_names.keys():
            module = getattr(self, name)
            if isinstance(module, torch.nn.Module):
                return module.device
        return torch.device("cpu")

Patrick von Platen's avatar
Patrick von Platen committed
139
140
    @classmethod
    def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.PathLike]], **kwargs):
141
        r"""
Patrick von Platen's avatar
Patrick von Platen committed
142
        Add docstrings
143
144
145
146
147
148
        """
        cache_dir = kwargs.pop("cache_dir", DIFFUSERS_CACHE)
        resume_download = kwargs.pop("resume_download", False)
        proxies = kwargs.pop("proxies", None)
        local_files_only = kwargs.pop("local_files_only", False)
        use_auth_token = kwargs.pop("use_auth_token", None)
149
        revision = kwargs.pop("revision", None)
150
        torch_dtype = kwargs.pop("torch_dtype", None)
Patrick von Platen's avatar
Patrick von Platen committed
151

patil-suraj's avatar
patil-suraj committed
152
        # 1. Download the checkpoints and configs
Patrick von Platen's avatar
Patrick von Platen committed
153
        # use snapshot download here to get it working from from_pretrained
Patrick von Platen's avatar
Patrick von Platen committed
154
        if not os.path.isdir(pretrained_model_name_or_path):
155
156
157
158
159
160
161
            cached_folder = snapshot_download(
                pretrained_model_name_or_path,
                cache_dir=cache_dir,
                resume_download=resume_download,
                proxies=proxies,
                local_files_only=local_files_only,
                use_auth_token=use_auth_token,
162
                revision=revision,
163
            )
Patrick von Platen's avatar
Patrick von Platen committed
164
165
        else:
            cached_folder = pretrained_model_name_or_path
166

patil-suraj's avatar
patil-suraj committed
167
        config_dict = cls.get_config_dict(cached_folder)
168

Patrick von Platen's avatar
Patrick von Platen committed
169
        # 2. Load the pipeline class, if using custom module then load it from the hub
170
171
        # if we load from explicit class, let's use it
        if cls != DiffusionPipeline:
172
173
            pipeline_class = cls
        else:
Patrick von Platen's avatar
Patrick von Platen committed
174
175
176
            diffusers_module = importlib.import_module(cls.__module__.split(".")[0])
            pipeline_class = getattr(diffusers_module, config_dict["_class_name"])

177
178
179
180
181
182
        # some modules can be passed directly to the init
        # in this case they are already instantiated in `kwargs`
        # extract them here
        expected_modules = set(inspect.signature(pipeline_class.__init__).parameters.keys())
        passed_class_obj = {k: kwargs.pop(k) for k in expected_modules if k in kwargs}

183
        init_dict, _ = pipeline_class.extract_init_dict(config_dict, **kwargs)
Patrick von Platen's avatar
Patrick von Platen committed
184
185

        init_kwargs = {}
186

187
188
        # import it here to avoid circular import
        from diffusers import pipelines
189

Patrick von Platen's avatar
Patrick von Platen committed
190
        # 3. Load each module in the pipeline
patil-suraj's avatar
patil-suraj committed
191
        for name, (library_name, class_name) in init_dict.items():
192
            is_pipeline_module = hasattr(pipelines, library_name)
193
194
            loaded_sub_model = None

195
            # if the model is in a pipeline module, then we load it from the pipeline
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
            if name in passed_class_obj:
                # 1. check that passed_class_obj has correct parent class
                if not is_pipeline_module:
                    library = importlib.import_module(library_name)
                    class_obj = getattr(library, class_name)
                    importable_classes = LOADABLE_CLASSES[library_name]
                    class_candidates = {c: getattr(library, c) for c in importable_classes.keys()}

                    expected_class_obj = None
                    for class_name, class_candidate in class_candidates.items():
                        if issubclass(class_obj, class_candidate):
                            expected_class_obj = class_candidate

                    if not issubclass(passed_class_obj[name].__class__, expected_class_obj):
                        raise ValueError(
                            f"{passed_class_obj[name]} is of type: {type(passed_class_obj[name])}, but should be"
                            f" {expected_class_obj}"
                        )
                else:
                    logger.warn(
                        f"You have passed a non-standard module {passed_class_obj[name]}. We cannot verify whether it"
                        " has the correct type"
                    )

                # set passed class object
                loaded_sub_model = passed_class_obj[name]
            elif is_pipeline_module:
223
224
225
                pipeline_module = getattr(pipelines, library_name)
                class_obj = getattr(pipeline_module, class_name)
                importable_classes = ALL_IMPORTABLE_CLASSES
Patrick von Platen's avatar
Patrick von Platen committed
226
                class_candidates = {c: class_obj for c in importable_classes.keys()}
patil-suraj's avatar
patil-suraj committed
227
            else:
patil-suraj's avatar
patil-suraj committed
228
                # else we just import it from the library.
patil-suraj's avatar
patil-suraj committed
229
230
                library = importlib.import_module(library_name)
                class_obj = getattr(library, class_name)
231
                importable_classes = LOADABLE_CLASSES[library_name]
patil-suraj's avatar
patil-suraj committed
232
                class_candidates = {c: getattr(library, c) for c in importable_classes.keys()}
233

234
235
236
237
238
            if loaded_sub_model is None:
                load_method_name = None
                for class_name, class_candidate in class_candidates.items():
                    if issubclass(class_obj, class_candidate):
                        load_method_name = importable_classes[class_name][1]
Patrick von Platen's avatar
Patrick von Platen committed
239

240
                load_method = getattr(class_obj, load_method_name)
Patrick von Platen's avatar
Patrick von Platen committed
241

242
243
244
245
                loading_kwargs = {}
                if issubclass(class_obj, torch.nn.Module):
                    loading_kwargs["torch_dtype"] = torch_dtype

246
247
                # check if the module is in a subdirectory
                if os.path.isdir(os.path.join(cached_folder, name)):
248
                    loaded_sub_model = load_method(os.path.join(cached_folder, name), **loading_kwargs)
249
250
                else:
                    # else load from the root directory
251
                    loaded_sub_model = load_method(cached_folder, **loading_kwargs)
Patrick von Platen's avatar
Patrick von Platen committed
252

253
            init_kwargs[name] = loaded_sub_model  # UNet(...), # DiffusionSchedule(...)
Patrick von Platen's avatar
Patrick von Platen committed
254

255
        # 4. Instantiate the pipeline
256
        model = pipeline_class(**init_kwargs)
Patrick von Platen's avatar
Patrick von Platen committed
257
        return model
258
259
260
261
262
263
264
265
266
267
268
269

    @staticmethod
    def numpy_to_pil(images):
        """
        Convert a numpy image or a batch of images to a PIL image.
        """
        if images.ndim == 3:
            images = images[None, ...]
        images = (images * 255).round().astype("uint8")
        pil_images = [Image.fromarray(image) for image in images]

        return pil_images
hysts's avatar
hysts committed
270
271
272
273
274
275
276
277
278
279
280
281
282

    def progress_bar(self, iterable):
        if not hasattr(self, "_progress_bar_config"):
            self._progress_bar_config = {}
        elif not isinstance(self._progress_bar_config, dict):
            raise ValueError(
                f"`self._progress_bar_config` should be of type `dict`, but is {type(self._progress_bar_config)}."
            )

        return tqdm(iterable, **self._progress_bar_config)

    def set_progress_bar_config(self, **kwargs):
        self._progress_bar_config = kwargs