test_config.py 8.71 KB
Newer Older
1
# coding=utf-8
2
# Copyright 2024 HuggingFace Inc.
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import tempfile
import unittest

19
20
from diffusers import (
    DDIMScheduler,
21
    DDPMScheduler,
22
23
24
25
26
27
    DPMSolverMultistepScheduler,
    EulerAncestralDiscreteScheduler,
    EulerDiscreteScheduler,
    PNDMScheduler,
    logging,
)
28
from diffusers.configuration_utils import ConfigMixin, register_to_config
29
from diffusers.utils.testing_utils import CaptureLogger
30
31


32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
class SampleObject(ConfigMixin):
    config_name = "config.json"

    @register_to_config
    def __init__(
        self,
        a=2,
        b=5,
        c=(2, 5),
        d="for diffusion",
        e=[1, 3],
    ):
        pass


47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
class SampleObject2(ConfigMixin):
    config_name = "config.json"

    @register_to_config
    def __init__(
        self,
        a=2,
        b=5,
        c=(2, 5),
        d="for diffusion",
        f=[1, 3],
    ):
        pass


class SampleObject3(ConfigMixin):
    config_name = "config.json"

    @register_to_config
    def __init__(
        self,
        a=2,
        b=5,
        c=(2, 5),
        d="for diffusion",
        e=[1, 3],
        f=[1, 3],
    ):
        pass


78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
class SampleObject4(ConfigMixin):
    config_name = "config.json"

    @register_to_config
    def __init__(
        self,
        a=2,
        b=5,
        c=(2, 5),
        d="for diffusion",
        e=[1, 5],
        f=[5, 4],
    ):
        pass


94
95
96
class ConfigTester(unittest.TestCase):
    def test_load_not_from_mixin(self):
        with self.assertRaises(ValueError):
97
            ConfigMixin.load_config("dummy_path")
98

99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
    def test_register_to_config(self):
        obj = SampleObject()
        config = obj.config
        assert config["a"] == 2
        assert config["b"] == 5
        assert config["c"] == (2, 5)
        assert config["d"] == "for diffusion"
        assert config["e"] == [1, 3]

        # init ignore private arguments
        obj = SampleObject(_name_or_path="lalala")
        config = obj.config
        assert config["a"] == 2
        assert config["b"] == 5
        assert config["c"] == (2, 5)
        assert config["d"] == "for diffusion"
        assert config["e"] == [1, 3]
116

117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
        # can override default
        obj = SampleObject(c=6)
        config = obj.config
        assert config["a"] == 2
        assert config["b"] == 5
        assert config["c"] == 6
        assert config["d"] == "for diffusion"
        assert config["e"] == [1, 3]

        # can use positional arguments.
        obj = SampleObject(1, c=6)
        config = obj.config
        assert config["a"] == 1
        assert config["b"] == 5
        assert config["c"] == 6
        assert config["d"] == "for diffusion"
        assert config["e"] == [1, 3]

    def test_save_load(self):
136
137
138
139
140
141
142
143
144
145
146
        obj = SampleObject()
        config = obj.config

        assert config["a"] == 2
        assert config["b"] == 5
        assert config["c"] == (2, 5)
        assert config["d"] == "for diffusion"
        assert config["e"] == [1, 3]

        with tempfile.TemporaryDirectory() as tmpdirname:
            obj.save_config(tmpdirname)
147
            new_obj = SampleObject.from_config(SampleObject.load_config(tmpdirname))
148
149
            new_config = new_obj.config

Patrick von Platen's avatar
Patrick von Platen committed
150
151
152
153
        # unfreeze configs
        config = dict(config)
        new_config = dict(new_config)

154
155
        assert config.pop("c") == (2, 5)  # instantiated as tuple
        assert new_config.pop("c") == [2, 5]  # saved & loaded as list because of json
156
        config.pop("_use_default_values")
157
        assert config == new_config
158
159
160

    def test_load_ddim_from_pndm(self):
        logger = logging.get_logger("diffusers.configuration_utils")
161
162
        # 30 for warning
        logger.setLevel(30)
163
164

        with CaptureLogger(logger) as cap_logger:
165
166
167
            ddim = DDIMScheduler.from_pretrained(
                "hf-internal-testing/tiny-stable-diffusion-torch", subfolder="scheduler"
            )
168
169
170
171
172

        assert ddim.__class__ == DDIMScheduler
        # no warning should be thrown
        assert cap_logger.out == ""

173
    def test_load_euler_from_pndm(self):
174
        logger = logging.get_logger("diffusers.configuration_utils")
175
176
        # 30 for warning
        logger.setLevel(30)
177
178

        with CaptureLogger(logger) as cap_logger:
179
            euler = EulerDiscreteScheduler.from_pretrained(
180
181
                "hf-internal-testing/tiny-stable-diffusion-torch", subfolder="scheduler"
            )
182
183
184
185
186

        assert euler.__class__ == EulerDiscreteScheduler
        # no warning should be thrown
        assert cap_logger.out == ""

187
    def test_load_euler_ancestral_from_pndm(self):
188
        logger = logging.get_logger("diffusers.configuration_utils")
189
190
        # 30 for warning
        logger.setLevel(30)
191
192

        with CaptureLogger(logger) as cap_logger:
193
            euler = EulerAncestralDiscreteScheduler.from_pretrained(
194
                "hf-internal-testing/tiny-stable-diffusion-torch", subfolder="scheduler"
195
196
197
198
199
200
201
202
            )

        assert euler.__class__ == EulerAncestralDiscreteScheduler
        # no warning should be thrown
        assert cap_logger.out == ""

    def test_load_pndm(self):
        logger = logging.get_logger("diffusers.configuration_utils")
203
204
        # 30 for warning
        logger.setLevel(30)
205
206

        with CaptureLogger(logger) as cap_logger:
207
208
209
            pndm = PNDMScheduler.from_pretrained(
                "hf-internal-testing/tiny-stable-diffusion-torch", subfolder="scheduler"
            )
210
211
212
213

        assert pndm.__class__ == PNDMScheduler
        # no warning should be thrown
        assert cap_logger.out == ""
214

215
216
    def test_overwrite_config_on_load(self):
        logger = logging.get_logger("diffusers.configuration_utils")
217
218
        # 30 for warning
        logger.setLevel(30)
219
220

        with CaptureLogger(logger) as cap_logger:
221
            ddpm = DDPMScheduler.from_pretrained(
222
223
                "hf-internal-testing/tiny-stable-diffusion-torch",
                subfolder="scheduler",
224
                prediction_type="sample",
225
226
227
228
                beta_end=8,
            )

        with CaptureLogger(logger) as cap_logger_2:
229
            ddpm_2 = DDPMScheduler.from_pretrained("google/ddpm-celebahq-256", beta_start=88)
230
231

        assert ddpm.__class__ == DDPMScheduler
232
        assert ddpm.config.prediction_type == "sample"
233
234
235
236
237
238
239
        assert ddpm.config.beta_end == 8
        assert ddpm_2.config.beta_start == 88

        # no warning should be thrown
        assert cap_logger.out == ""
        assert cap_logger_2.out == ""

240
241
    def test_load_dpmsolver(self):
        logger = logging.get_logger("diffusers.configuration_utils")
242
243
        # 30 for warning
        logger.setLevel(30)
244
245

        with CaptureLogger(logger) as cap_logger:
246
            dpm = DPMSolverMultistepScheduler.from_pretrained(
247
248
249
250
251
252
                "hf-internal-testing/tiny-stable-diffusion-torch", subfolder="scheduler"
            )

        assert dpm.__class__ == DPMSolverMultistepScheduler
        # no warning should be thrown
        assert cap_logger.out == ""
253
254
255
256
257
258
259
260
261
262
263
264
265
266

    def test_use_default_values(self):
        # let's first save a config that should be in the form
        #    a=2,
        #    b=5,
        #    c=(2, 5),
        #    d="for diffusion",
        #    e=[1, 3],

        config = SampleObject()

        config_dict = {k: v for k, v in config.config.items() if not k.startswith("_")}

        # make sure that default config has all keys in `_use_default_values`
267
        assert set(config_dict.keys()) == set(config.config._use_default_values)
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288

        with tempfile.TemporaryDirectory() as tmpdirname:
            config.save_config(tmpdirname)

            # now loading it with SampleObject2 should put f into `_use_default_values`
            config = SampleObject2.from_config(tmpdirname)

            assert "f" in config._use_default_values
            assert config.f == [1, 3]

        # now loading the config, should **NOT** use [1, 3] for `f`, but the default [1, 4] value
        # **BECAUSE** it is part of `config._use_default_values`
        new_config = SampleObject4.from_config(config.config)
        assert new_config.f == [5, 4]

        config.config._use_default_values.pop()
        new_config_2 = SampleObject4.from_config(config.config)
        assert new_config_2.f == [1, 3]

        # Nevertheless "e" should still be correctly loaded to [1, 3] from SampleObject2 instead of defaulting to [1, 5]
        assert new_config_2.e == [1, 3]