configuration_utils.py 31.1 KB
Newer Older
1
# coding=utf-8
2
# Copyright 2024 The HuggingFace Inc. team.
3
4
5
6
7
8
9
10
11
12
13
14
15
# Copyright (c) 2022, NVIDIA CORPORATION.  All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
16
""" ConfigMixin base class and utilities."""
17
import dataclasses
18
import functools
19
import importlib
Patrick von Platen's avatar
improve  
Patrick von Platen committed
20
import inspect
21
22
23
import json
import os
import re
Patrick von Platen's avatar
Patrick von Platen committed
24
from collections import OrderedDict
25
from pathlib import PosixPath
26
27
from typing import Any, Dict, Tuple, Union

28
import numpy as np
29
from huggingface_hub import create_repo, hf_hub_download
30
31
32
33
34
35
from huggingface_hub.utils import (
    EntryNotFoundError,
    RepositoryNotFoundError,
    RevisionNotFoundError,
    validate_hf_hub_args,
)
Patrick von Platen's avatar
Patrick von Platen committed
36
from requests import HTTPError
37

Patrick von Platen's avatar
Patrick von Platen committed
38
from . import __version__
39
40
41
42
43
44
45
46
from .utils import (
    HUGGINGFACE_CO_RESOLVE_ENDPOINT,
    DummyObject,
    deprecate,
    extract_commit_hash,
    http_user_agent,
    logging,
)
47

48

49
50
51
52
53
logger = logging.get_logger(__name__)

_re_configuration_file = re.compile(r"config\.(.*)\.json")


54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
class FrozenDict(OrderedDict):
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

        for key, value in self.items():
            setattr(self, key, value)

        self.__frozen = True

    def __delitem__(self, *args, **kwargs):
        raise Exception(f"You cannot use ``__delitem__`` on a {self.__class__.__name__} instance.")

    def setdefault(self, *args, **kwargs):
        raise Exception(f"You cannot use ``setdefault`` on a {self.__class__.__name__} instance.")

    def pop(self, *args, **kwargs):
        raise Exception(f"You cannot use ``pop`` on a {self.__class__.__name__} instance.")

    def update(self, *args, **kwargs):
        raise Exception(f"You cannot use ``update`` on a {self.__class__.__name__} instance.")

    def __setattr__(self, name, value):
        if hasattr(self, "__frozen") and self.__frozen:
            raise Exception(f"You cannot use ``__setattr__`` on a {self.__class__.__name__} instance.")
        super().__setattr__(name, value)

    def __setitem__(self, name, value):
        if hasattr(self, "__frozen") and self.__frozen:
            raise Exception(f"You cannot use ``__setattr__`` on a {self.__class__.__name__} instance.")
        super().__setitem__(name, value)


Patrick von Platen's avatar
Patrick von Platen committed
86
class ConfigMixin:
87
    r"""
Steven Liu's avatar
Steven Liu committed
88
89
90
    Base class for all configuration classes. All configuration parameters are stored under `self.config`. Also
    provides the [`~ConfigMixin.from_config`] and [`~ConfigMixin.save_config`] methods for loading, downloading, and
    saving classes that inherit from [`ConfigMixin`].
Patrick von Platen's avatar
Patrick von Platen committed
91
92
93

    Class attributes:
        - **config_name** (`str`) -- A filename under which the config should stored when calling
94
          [`~ConfigMixin.save_config`] (should be overridden by parent class).
Patrick von Platen's avatar
Patrick von Platen committed
95
        - **ignore_for_config** (`List[str]`) -- A list of attributes that should not be saved in the config (should be
96
97
          overridden by subclass).
        - **has_compatibles** (`bool`) -- Whether the class has compatible classes (should be overridden by subclass).
Steven Liu's avatar
Steven Liu committed
98
        - **_deprecated_kwargs** (`List[str]`) -- Keyword arguments that are deprecated. Note that the `init` function
99
100
          should only have a `kwargs` argument if at least one argument is deprecated (should be overridden by
          subclass).
101
    """
102

103
    config_name = None
Patrick von Platen's avatar
Patrick von Platen committed
104
    ignore_for_config = []
105
    has_compatibles = False
106

107
108
    _deprecated_kwargs = []

109
    def register_to_config(self, **kwargs):
110
111
        if self.config_name is None:
            raise NotImplementedError(f"Make sure that {self.__class__} has defined a class name `config_name`")
112
113
114
115
        # Special case for `kwargs` used in deprecation warning added to schedulers
        # TODO: remove this when we remove the deprecation warning, and the `kwargs` argument,
        # or solve in a more general way.
        kwargs.pop("kwargs", None)
Patrick von Platen's avatar
Patrick von Platen committed
116

Patrick von Platen's avatar
Patrick von Platen committed
117
118
119
120
121
122
        if not hasattr(self, "_internal_dict"):
            internal_dict = kwargs
        else:
            previous_dict = dict(self._internal_dict)
            internal_dict = {**self._internal_dict, **kwargs}
            logger.debug(f"Updating config from {previous_dict} to {internal_dict}")
123

Patrick von Platen's avatar
Patrick von Platen committed
124
        self._internal_dict = FrozenDict(internal_dict)
125

126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
    def __getattr__(self, name: str) -> Any:
        """The only reason we overwrite `getattr` here is to gracefully deprecate accessing
        config attributes directly. See https://github.com/huggingface/diffusers/pull/3129

        Tihs funtion is mostly copied from PyTorch's __getattr__ overwrite:
        https://pytorch.org/docs/stable/_modules/torch/nn/modules/module.html#Module
        """

        is_in_config = "_internal_dict" in self.__dict__ and hasattr(self.__dict__["_internal_dict"], name)
        is_attribute = name in self.__dict__

        if is_in_config and not is_attribute:
            deprecation_message = f"Accessing config attribute `{name}` directly via '{type(self).__name__}' object attribute is deprecated. Please access '{name}' over '{type(self).__name__}'s config object instead, e.g. 'scheduler.config.{name}'."
            deprecate("direct config name access", "1.0.0", deprecation_message, standard_warn=False)
            return self._internal_dict[name]

        raise AttributeError(f"'{type(self).__name__}' object has no attribute '{name}'")

144
    def save_config(self, save_directory: Union[str, os.PathLike], push_to_hub: bool = False, **kwargs):
145
        """
Steven Liu's avatar
Steven Liu committed
146
        Save a configuration object to the directory specified in `save_directory` so that it can be reloaded using the
Patrick von Platen's avatar
Patrick von Platen committed
147
        [`~ConfigMixin.from_config`] class method.
148
149
150

        Args:
            save_directory (`str` or `os.PathLike`):
Steven Liu's avatar
Steven Liu committed
151
                Directory where the configuration JSON file is saved (will be created if it does not exist).
152
153
154
155
156
157
            push_to_hub (`bool`, *optional*, defaults to `False`):
                Whether or not to push your model to the Hugging Face Hub after saving it. You can specify the
                repository you want to push to with `repo_id` (will default to the name of `save_directory` in your
                namespace).
            kwargs (`Dict[str, Any]`, *optional*):
                Additional keyword arguments passed along to the [`~utils.PushToHubMixin.push_to_hub`] method.
158
159
160
161
162
163
        """
        if os.path.isfile(save_directory):
            raise AssertionError(f"Provided path ({save_directory}) should be a directory, not a file")

        os.makedirs(save_directory, exist_ok=True)

164
165
        # If we save using the predefined names, we can load using `from_config`
        output_config_file = os.path.join(save_directory, self.config_name)
166

167
        self.to_json_file(output_config_file)
Pedro Cuenca's avatar
Pedro Cuenca committed
168
        logger.info(f"Configuration saved in {output_config_file}")
169

170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
        if push_to_hub:
            commit_message = kwargs.pop("commit_message", None)
            private = kwargs.pop("private", False)
            create_pr = kwargs.pop("create_pr", False)
            token = kwargs.pop("token", None)
            repo_id = kwargs.pop("repo_id", save_directory.split(os.path.sep)[-1])
            repo_id = create_repo(repo_id, exist_ok=True, private=private, token=token).repo_id

            self._upload_folder(
                save_directory,
                repo_id,
                token=token,
                commit_message=commit_message,
                create_pr=create_pr,
            )

186
    @classmethod
187
    def from_config(cls, config: Union[FrozenDict, Dict[str, Any]] = None, return_unused_kwargs=False, **kwargs):
Patrick von Platen's avatar
Patrick von Platen committed
188
        r"""
Steven Liu's avatar
Steven Liu committed
189
        Instantiate a Python class from a config dictionary.
190
191
192

        Parameters:
            config (`Dict[str, Any]`):
Steven Liu's avatar
Steven Liu committed
193
194
                A config dictionary from which the Python class is instantiated. Make sure to only load configuration
                files of compatible classes.
195
196
197
            return_unused_kwargs (`bool`, *optional*, defaults to `False`):
                Whether kwargs that are not consumed by the Python class should be returned or not.
            kwargs (remaining dictionary of keyword arguments, *optional*):
Steven Liu's avatar
Steven Liu committed
198
                Can be used to update the configuration object (after it is loaded) and initiate the Python class.
Steven Liu's avatar
Steven Liu committed
199
200
                `**kwargs` are passed directly to the underlying scheduler/model's `__init__` method and eventually
                overwrite the same named arguments in `config`.
Steven Liu's avatar
Steven Liu committed
201
202
203
204

        Returns:
            [`ModelMixin`] or [`SchedulerMixin`]:
                A model or scheduler object instantiated from a config dictionary.
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252

        Examples:

        ```python
        >>> from diffusers import DDPMScheduler, DDIMScheduler, PNDMScheduler

        >>> # Download scheduler from huggingface.co and cache.
        >>> scheduler = DDPMScheduler.from_pretrained("google/ddpm-cifar10-32")

        >>> # Instantiate DDIM scheduler class with same config as DDPM
        >>> scheduler = DDIMScheduler.from_config(scheduler.config)

        >>> # Instantiate PNDM scheduler class with same config as DDPM
        >>> scheduler = PNDMScheduler.from_config(scheduler.config)
        ```
        """
        # <===== TO BE REMOVED WITH DEPRECATION
        # TODO(Patrick) - make sure to remove the following lines when config=="model_path" is deprecated
        if "pretrained_model_name_or_path" in kwargs:
            config = kwargs.pop("pretrained_model_name_or_path")

        if config is None:
            raise ValueError("Please make sure to provide a config as the first positional argument.")
        # ======>

        if not isinstance(config, dict):
            deprecation_message = "It is deprecated to pass a pretrained model name or path to `from_config`."
            if "Scheduler" in cls.__name__:
                deprecation_message += (
                    f"If you were trying to load a scheduler, please use {cls}.from_pretrained(...) instead."
                    " Otherwise, please make sure to pass a configuration dictionary instead. This functionality will"
                    " be removed in v1.0.0."
                )
            elif "Model" in cls.__name__:
                deprecation_message += (
                    f"If you were trying to load a model, please use {cls}.load_config(...) followed by"
                    f" {cls}.from_config(...) instead. Otherwise, please make sure to pass a configuration dictionary"
                    " instead. This functionality will be removed in v1.0.0."
                )
            deprecate("config-passed-as-path", "1.0.0", deprecation_message, standard_warn=False)
            config, kwargs = cls.load_config(pretrained_model_name_or_path=config, return_unused_kwargs=True, **kwargs)

        init_dict, unused_kwargs, hidden_dict = cls.extract_init_dict(config, **kwargs)

        # Allow dtype to be specified on initialization
        if "dtype" in unused_kwargs:
            init_dict["dtype"] = unused_kwargs.pop("dtype")

253
254
255
256
        # add possible deprecated kwargs
        for deprecated_kwarg in cls._deprecated_kwargs:
            if deprecated_kwarg in unused_kwargs:
                init_dict[deprecated_kwarg] = unused_kwargs.pop(deprecated_kwarg)
257

258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
        # Return model and optionally state and/or unused_kwargs
        model = cls(**init_dict)

        # make sure to also save config parameters that might be used for compatible classes
        model.register_to_config(**hidden_dict)

        # add hidden kwargs of compatible classes to unused_kwargs
        unused_kwargs = {**unused_kwargs, **hidden_dict}

        if return_unused_kwargs:
            return (model, unused_kwargs)
        else:
            return model

    @classmethod
    def get_config_dict(cls, *args, **kwargs):
        deprecation_message = (
            f" The function get_config_dict is deprecated. Please use {cls}.load_config instead. This function will be"
            " removed in version v1.0.0"
        )
        deprecate("get_config_dict", "1.0.0", deprecation_message, standard_warn=False)
        return cls.load_config(*args, **kwargs)

    @classmethod
282
    @validate_hf_hub_args
283
    def load_config(
284
285
286
287
288
        cls,
        pretrained_model_name_or_path: Union[str, os.PathLike],
        return_unused_kwargs=False,
        return_commit_hash=False,
        **kwargs,
289
290
    ) -> Tuple[Dict[str, Any], Dict[str, Any]]:
        r"""
Steven Liu's avatar
Steven Liu committed
291
        Load a model or scheduler configuration.
Patrick von Platen's avatar
Patrick von Platen committed
292
293
294
295
296

        Parameters:
            pretrained_model_name_or_path (`str` or `os.PathLike`, *optional*):
                Can be either:

Steven Liu's avatar
Steven Liu committed
297
298
299
300
                    - A string, the *model id* (for example `google/ddpm-celebahq-256`) of a pretrained model hosted on
                      the Hub.
                    - A path to a *directory* (for example `./my_model_directory`) containing model weights saved with
                      [`~ConfigMixin.save_config`].
Patrick von Platen's avatar
Patrick von Platen committed
301
302

            cache_dir (`Union[str, os.PathLike]`, *optional*):
Steven Liu's avatar
Steven Liu committed
303
304
                Path to a directory where a downloaded pretrained model configuration is cached if the standard cache
                is not used.
Patrick von Platen's avatar
Patrick von Platen committed
305
306
307
308
            force_download (`bool`, *optional*, defaults to `False`):
                Whether or not to force the (re-)download of the model weights and configuration files, overriding the
                cached versions if they exist.
            resume_download (`bool`, *optional*, defaults to `False`):
Steven Liu's avatar
Steven Liu committed
309
                Whether or not to resume downloading the model weights and configuration files. If set to `False`, any
Steven Liu's avatar
Steven Liu committed
310
                incompletely downloaded files are deleted.
Patrick von Platen's avatar
Patrick von Platen committed
311
            proxies (`Dict[str, str]`, *optional*):
Steven Liu's avatar
Steven Liu committed
312
                A dictionary of proxy servers to use by protocol or endpoint, for example, `{'http': 'foo.bar:3128',
Patrick von Platen's avatar
Patrick von Platen committed
313
314
                'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request.
            output_loading_info(`bool`, *optional*, defaults to `False`):
315
                Whether or not to also return a dictionary containing missing keys, unexpected keys and error messages.
Steven Liu's avatar
Steven Liu committed
316
317
318
            local_files_only (`bool`, *optional*, defaults to `False`):
                Whether to only load local model weights and configuration files or not. If set to `True`, the model
                won't be downloaded from the Hub.
319
            token (`str` or *bool*, *optional*):
Steven Liu's avatar
Steven Liu committed
320
321
                The token to use as HTTP bearer authorization for remote files. If `True`, the token generated from
                `diffusers-cli login` (stored in `~/.huggingface`) is used.
Patrick von Platen's avatar
Patrick von Platen committed
322
            revision (`str`, *optional*, defaults to `"main"`):
Steven Liu's avatar
Steven Liu committed
323
324
                The specific model version to use. It can be a branch name, a tag name, a commit id, or any identifier
                allowed by Git.
325
            subfolder (`str`, *optional*, defaults to `""`):
Steven Liu's avatar
Steven Liu committed
326
                The subfolder location of a model file within a larger model repository on the Hub or locally.
327
            return_unused_kwargs (`bool`, *optional*, defaults to `False):
Steven Liu's avatar
Steven Liu committed
328
                Whether unused keyword arguments of the config are returned.
329
            return_commit_hash (`bool`, *optional*, defaults to `False):
Steven Liu's avatar
Steven Liu committed
330
                Whether the `commit_hash` of the loaded configuration are returned.
Patrick von Platen's avatar
Patrick von Platen committed
331

Steven Liu's avatar
Steven Liu committed
332
333
334
        Returns:
            `dict`:
                A dictionary of all the parameters stored in a JSON configuration file.
Patrick von Platen's avatar
Patrick von Platen committed
335
336

        """
337
        cache_dir = kwargs.pop("cache_dir", None)
338
339
340
        force_download = kwargs.pop("force_download", False)
        resume_download = kwargs.pop("resume_download", False)
        proxies = kwargs.pop("proxies", None)
341
        token = kwargs.pop("token", None)
342
343
        local_files_only = kwargs.pop("local_files_only", False)
        revision = kwargs.pop("revision", None)
344
        _ = kwargs.pop("mirror", None)
Patrick von Platen's avatar
Patrick von Platen committed
345
        subfolder = kwargs.pop("subfolder", None)
346
        user_agent = kwargs.pop("user_agent", {})
347

348
349
        user_agent = {**user_agent, "file_type": "config"}
        user_agent = http_user_agent(user_agent)
350
351
352

        pretrained_model_name_or_path = str(pretrained_model_name_or_path)

353
354
355
356
357
358
        if cls.config_name is None:
            raise ValueError(
                "`self.config_name` is not defined. Note that one should not load a config from "
                "`ConfigMixin`. Please make sure to define `config_name` in a class inheriting from `ConfigMixin`"
            )

359
360
361
362
363
364
        if os.path.isfile(pretrained_model_name_or_path):
            config_file = pretrained_model_name_or_path
        elif os.path.isdir(pretrained_model_name_or_path):
            if os.path.isfile(os.path.join(pretrained_model_name_or_path, cls.config_name)):
                # Load from a PyTorch checkpoint
                config_file = os.path.join(pretrained_model_name_or_path, cls.config_name)
Patrick von Platen's avatar
Patrick von Platen committed
365
366
367
368
            elif subfolder is not None and os.path.isfile(
                os.path.join(pretrained_model_name_or_path, subfolder, cls.config_name)
            ):
                config_file = os.path.join(pretrained_model_name_or_path, subfolder, cls.config_name)
369
            else:
370
371
372
373
374
375
376
377
378
379
380
381
382
383
                raise EnvironmentError(
                    f"Error no file named {cls.config_name} found in directory {pretrained_model_name_or_path}."
                )
        else:
            try:
                # Load from URL or cache if already cached
                config_file = hf_hub_download(
                    pretrained_model_name_or_path,
                    filename=cls.config_name,
                    cache_dir=cache_dir,
                    force_download=force_download,
                    proxies=proxies,
                    resume_download=resume_download,
                    local_files_only=local_files_only,
384
                    token=token,
385
                    user_agent=user_agent,
Patrick von Platen's avatar
Patrick von Platen committed
386
                    subfolder=subfolder,
387
                    revision=revision,
388
                )
389
390
            except RepositoryNotFoundError:
                raise EnvironmentError(
Patrick von Platen's avatar
Patrick von Platen committed
391
392
                    f"{pretrained_model_name_or_path} is not a local folder and is not a valid model identifier"
                    " listed on 'https://huggingface.co/models'\nIf this is a private repository, make sure to pass a"
393
                    " token having permission to this repo with `token` or log in with `huggingface-cli login`."
394
395
396
                )
            except RevisionNotFoundError:
                raise EnvironmentError(
Patrick von Platen's avatar
Patrick von Platen committed
397
398
399
                    f"{revision} is not a valid git identifier (branch name, tag name or commit id) that exists for"
                    " this model name. Check the model page at"
                    f" 'https://huggingface.co/{pretrained_model_name_or_path}' for available revisions."
400
401
402
403
404
405
406
                )
            except EntryNotFoundError:
                raise EnvironmentError(
                    f"{pretrained_model_name_or_path} does not appear to have a file named {cls.config_name}."
                )
            except HTTPError as err:
                raise EnvironmentError(
Patrick von Platen's avatar
Patrick von Platen committed
407
408
                    "There was a specific connection error when trying to load"
                    f" {pretrained_model_name_or_path}:\n{err}"
409
410
411
                )
            except ValueError:
                raise EnvironmentError(
Patrick von Platen's avatar
Patrick von Platen committed
412
413
414
415
416
                    f"We couldn't connect to '{HUGGINGFACE_CO_RESOLVE_ENDPOINT}' to load this model, couldn't find it"
                    f" in the cached files and it looks like {pretrained_model_name_or_path} is not the path to a"
                    f" directory containing a {cls.config_name} file.\nCheckout your internet connection or see how to"
                    " run the library in offline mode at"
                    " 'https://huggingface.co/docs/diffusers/installation#offline-mode'."
417
418
419
420
421
422
423
424
                )
            except EnvironmentError:
                raise EnvironmentError(
                    f"Can't load config for '{pretrained_model_name_or_path}'. If you were trying to load it from "
                    "'https://huggingface.co/models', make sure you don't have a local directory with the same name. "
                    f"Otherwise, make sure '{pretrained_model_name_or_path}' is the correct path to a directory "
                    f"containing a {cls.config_name} file"
                )
425

426
427
428
        try:
            # Load config dict
            config_dict = cls._dict_from_json_file(config_file)
429
430

            commit_hash = extract_commit_hash(config_file)
431
        except (json.JSONDecodeError, UnicodeDecodeError):
Patrick von Platen's avatar
Patrick von Platen committed
432
            raise EnvironmentError(f"It looks like the config file at '{config_file}' is not a valid JSON file.")
433

434
435
436
437
438
        if not (return_unused_kwargs or return_commit_hash):
            return config_dict

        outputs = (config_dict,)

439
        if return_unused_kwargs:
440
441
442
443
            outputs += (kwargs,)

        if return_commit_hash:
            outputs += (commit_hash,)
444

445
        return outputs
446

447
448
449
450
    @staticmethod
    def _get_init_keys(cls):
        return set(dict(inspect.signature(cls.__init__).parameters).keys())

patil-suraj's avatar
patil-suraj committed
451
452
    @classmethod
    def extract_init_dict(cls, config_dict, **kwargs):
453
454
455
456
        # Skip keys that were not present in the original config, so default __init__ values were used
        used_defaults = config_dict.get("_use_default_values", [])
        config_dict = {k: v for k, v in config_dict.items() if k not in used_defaults and k != "_use_default_values"}

457
        # 0. Copy origin config dict
458
        original_dict = dict(config_dict.items())
459

460
461
        # 1. Retrieve expected config attributes from __init__ signature
        expected_keys = cls._get_init_keys(cls)
462
        expected_keys.remove("self")
Patrick von Platen's avatar
hotfix  
Patrick von Platen committed
463
464
465
        # remove general kwargs if present in dict
        if "kwargs" in expected_keys:
            expected_keys.remove("kwargs")
Yuta Hayashibe's avatar
Yuta Hayashibe committed
466
        # remove flax internal keys
467
468
469
470
        if hasattr(cls, "_flax_internal_args"):
            for arg in cls._flax_internal_args:
                expected_keys.remove(arg)

471
        # 2. Remove attributes that cannot be expected from expected config attributes
Patrick von Platen's avatar
Patrick von Platen committed
472
473
474
        # remove keys to be ignored
        if len(cls.ignore_for_config) > 0:
            expected_keys = expected_keys - set(cls.ignore_for_config)
475
476
477
478

        # load diffusers library to import compatible and original scheduler
        diffusers_library = importlib.import_module(__name__.split(".")[0])

479
480
481
482
483
        if cls.has_compatibles:
            compatible_classes = [c for c in cls._get_compatibles() if not isinstance(c, DummyObject)]
        else:
            compatible_classes = []

484
485
486
487
488
489
490
491
492
        expected_keys_comp_cls = set()
        for c in compatible_classes:
            expected_keys_c = cls._get_init_keys(c)
            expected_keys_comp_cls = expected_keys_comp_cls.union(expected_keys_c)
        expected_keys_comp_cls = expected_keys_comp_cls - cls._get_init_keys(cls)
        config_dict = {k: v for k, v in config_dict.items() if k not in expected_keys_comp_cls}

        # remove attributes from orig class that cannot be expected
        orig_cls_name = config_dict.pop("_class_name", cls.__name__)
493
494
495
496
497
        if (
            isinstance(orig_cls_name, str)
            and orig_cls_name != cls.__name__
            and hasattr(diffusers_library, orig_cls_name)
        ):
498
499
500
            orig_cls = getattr(diffusers_library, orig_cls_name)
            unexpected_keys_from_orig = cls._get_init_keys(orig_cls) - expected_keys
            config_dict = {k: v for k, v in config_dict.items() if k not in unexpected_keys_from_orig}
501
502
503
504
        elif not isinstance(orig_cls_name, str) and not isinstance(orig_cls_name, (list, tuple)):
            raise ValueError(
                "Make sure that the `_class_name` is of type string or list of string (for custom pipelines)."
            )
505
506
507
508
509

        # remove private attributes
        config_dict = {k: v for k, v in config_dict.items() if not k.startswith("_")}

        # 3. Create keyword arguments that will be passed to __init__ from expected keyword arguments
patil-suraj's avatar
patil-suraj committed
510
        init_dict = {}
Patrick von Platen's avatar
improve  
Patrick von Platen committed
511
        for key in expected_keys:
512
513
514
515
516
            # if config param is passed to kwarg and is present in config dict
            # it should overwrite existing config dict key
            if key in kwargs and key in config_dict:
                config_dict[key] = kwargs.pop(key)

Patrick von Platen's avatar
improve  
Patrick von Platen committed
517
518
            if key in kwargs:
                # overwrite key
patil-suraj's avatar
patil-suraj committed
519
520
521
522
                init_dict[key] = kwargs.pop(key)
            elif key in config_dict:
                # use value from config dict
                init_dict[key] = config_dict.pop(key)
Patrick von Platen's avatar
improve  
Patrick von Platen committed
523

524
        # 4. Give nice warning if unexpected values have been passed
525
526
527
528
529
530
531
        if len(config_dict) > 0:
            logger.warning(
                f"The config attributes {config_dict} were passed to {cls.__name__}, "
                "but are not expected and will be ignored. Please verify your "
                f"{cls.config_name} configuration file."
            )

532
        # 5. Give nice info if config attributes are initiliazed to default because they have not been passed
patil-suraj's avatar
patil-suraj committed
533
        passed_keys = set(init_dict.keys())
534
        if len(expected_keys - passed_keys) > 0:
535
            logger.info(
Patrick von Platen's avatar
improve  
Patrick von Platen committed
536
                f"{expected_keys - passed_keys} was not found in config. Values will be initialized to default values."
537
            )
538

539
540
541
        # 6. Define unused keyword arguments
        unused_kwargs = {**config_dict, **kwargs}

542
        # 7. Define "hidden" config parameters that were saved for compatible classes
543
        hidden_config_dict = {k: v for k, v in original_dict.items() if k not in init_dict}
544
545

        return init_dict, unused_kwargs, hidden_config_dict
Patrick von Platen's avatar
Patrick von Platen committed
546

547
548
549
550
551
552
    @classmethod
    def _dict_from_json_file(cls, json_file: Union[str, os.PathLike]):
        with open(json_file, "r", encoding="utf-8") as reader:
            text = reader.read()
        return json.loads(text)

anton-l's avatar
anton-l committed
553
    def __repr__(self):
554
        return f"{self.__class__.__name__} {self.to_json_string()}"
555

556
557
    @property
    def config(self) -> Dict[str, Any]:
558
559
560
561
562
563
        """
        Returns the config of the class as a frozen dictionary

        Returns:
            `Dict[str, Any]`: Config of the class.
        """
Patrick von Platen's avatar
Patrick von Platen committed
564
        return self._internal_dict
565

566
    def to_json_string(self) -> str:
567
        """
Steven Liu's avatar
Steven Liu committed
568
        Serializes the configuration instance to a JSON string.
569
570

        Returns:
Steven Liu's avatar
Steven Liu committed
571
572
            `str`:
                String containing all the attributes that make up the configuration instance in JSON format.
573
        """
574
        config_dict = self._internal_dict if hasattr(self, "_internal_dict") else {}
575
576
577
        config_dict["_class_name"] = self.__class__.__name__
        config_dict["_diffusers_version"] = __version__

578
579
580
        def to_json_saveable(value):
            if isinstance(value, np.ndarray):
                value = value.tolist()
581
582
            elif isinstance(value, PosixPath):
                value = str(value)
583
584
585
            return value

        config_dict = {k: to_json_saveable(v) for k, v in config_dict.items()}
586
        # Don't save "_ignore_files" or "_use_default_values"
Patrick von Platen's avatar
Patrick von Platen committed
587
        config_dict.pop("_ignore_files", None)
588
        config_dict.pop("_use_default_values", None)
Patrick von Platen's avatar
Patrick von Platen committed
589

590
591
        return json.dumps(config_dict, indent=2, sort_keys=True) + "\n"

592
    def to_json_file(self, json_file_path: Union[str, os.PathLike]):
593
        """
Steven Liu's avatar
Steven Liu committed
594
        Save the configuration instance's parameters to a JSON file.
595
596
597

        Args:
            json_file_path (`str` or `os.PathLike`):
Steven Liu's avatar
Steven Liu committed
598
                Path to the JSON file to save a configuration instance's parameters.
599
600
        """
        with open(json_file_path, "w", encoding="utf-8") as writer:
601
            writer.write(self.to_json_string())
Patrick von Platen's avatar
Patrick von Platen committed
602
603


604
def register_to_config(init):
Patrick von Platen's avatar
Patrick von Platen committed
605
606
607
608
    r"""
    Decorator to apply on the init of classes inheriting from [`ConfigMixin`] so that all the arguments are
    automatically sent to `self.register_for_config`. To ignore a specific argument accepted by the init but that
    shouldn't be registered in the config, use the `ignore_for_config` class variable
609
610
611
612
613
614
615
616

    Warning: Once decorated, all private arguments (beginning with an underscore) are trashed and not sent to the init!
    """

    @functools.wraps(init)
    def inner_init(self, *args, **kwargs):
        # Ignore private kwargs in the init.
        init_kwargs = {k: v for k, v in kwargs.items() if not k.startswith("_")}
617
        config_init_kwargs = {k: v for k, v in kwargs.items() if k.startswith("_")}
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
        if not isinstance(self, ConfigMixin):
            raise RuntimeError(
                f"`@register_for_config` was applied to {self.__class__.__name__} init method, but this class does "
                "not inherit from `ConfigMixin`."
            )

        ignore = getattr(self, "ignore_for_config", [])
        # Get positional arguments aligned with kwargs
        new_kwargs = {}
        signature = inspect.signature(init)
        parameters = {
            name: p.default for i, (name, p) in enumerate(signature.parameters.items()) if i > 0 and name not in ignore
        }
        for arg, name in zip(args, parameters.keys()):
            new_kwargs[name] = arg

        # Then add all kwargs
        new_kwargs.update(
            {
                k: init_kwargs.get(k, default)
                for k, default in parameters.items()
                if k not in ignore and k not in new_kwargs
            }
        )
642
643
644

        # Take note of the parameters that were not present in the loaded config
        if len(set(new_kwargs.keys()) - set(init_kwargs)) > 0:
645
            new_kwargs["_use_default_values"] = list(set(new_kwargs.keys()) - set(init_kwargs))
646

647
        new_kwargs = {**config_init_kwargs, **new_kwargs}
648
        getattr(self, "register_to_config")(**new_kwargs)
649
        init(self, *args, **init_kwargs)
650
651

    return inner_init
652
653
654
655
656
657
658
659
660
661
662
663
664
665


def flax_register_to_config(cls):
    original_init = cls.__init__

    @functools.wraps(original_init)
    def init(self, *args, **kwargs):
        if not isinstance(self, ConfigMixin):
            raise RuntimeError(
                f"`@register_for_config` was applied to {self.__class__.__name__} init method, but this class does "
                "not inherit from `ConfigMixin`."
            )

        # Ignore private kwargs in the init. Retrieve all passed attributes
666
        init_kwargs = dict(kwargs.items())
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681

        # Retrieve default values
        fields = dataclasses.fields(self)
        default_kwargs = {}
        for field in fields:
            # ignore flax specific attributes
            if field.name in self._flax_internal_args:
                continue
            if type(field.default) == dataclasses._MISSING_TYPE:
                default_kwargs[field.name] = None
            else:
                default_kwargs[field.name] = getattr(self, field.name)

        # Make sure init_kwargs override default kwargs
        new_kwargs = {**default_kwargs, **init_kwargs}
682
683
684
        # dtype should be part of `init_kwargs`, but not `new_kwargs`
        if "dtype" in new_kwargs:
            new_kwargs.pop("dtype")
685
686
687
688
689
690

        # Get positional arguments aligned with kwargs
        for i, arg in enumerate(args):
            name = fields[i].name
            new_kwargs[name] = arg

691
692
        # Take note of the parameters that were not present in the loaded config
        if len(set(new_kwargs.keys()) - set(init_kwargs)) > 0:
693
            new_kwargs["_use_default_values"] = list(set(new_kwargs.keys()) - set(init_kwargs))
694

695
696
697
698
699
        getattr(self, "register_to_config")(**new_kwargs)
        original_init(self, *args, **kwargs)

    cls.__init__ = init
    return cls