Unverified Commit 20e92586 authored by Patrick von Platen's avatar Patrick von Platen Committed by GitHub
Browse files

0.20.0dev0 (#4299)

* 0.20.0dev0

* make style
parent 5623ea06
...@@ -33,7 +33,7 @@ from diffusers.utils import check_min_version ...@@ -33,7 +33,7 @@ from diffusers.utils import check_min_version
# Will error if the minimal version of diffusers is not installed. Remove at your own risks. # Will error if the minimal version of diffusers is not installed. Remove at your own risks.
check_min_version("0.19.0.dev0") check_min_version("0.20.0.dev0")
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
......
...@@ -48,7 +48,7 @@ from diffusers.utils.import_utils import is_xformers_available ...@@ -48,7 +48,7 @@ from diffusers.utils.import_utils import is_xformers_available
# Will error if the minimal version of diffusers is not installed. Remove at your own risks. # Will error if the minimal version of diffusers is not installed. Remove at your own risks.
check_min_version("0.19.0.dev0") check_min_version("0.20.0.dev0")
logger = get_logger(__name__, log_level="INFO") logger = get_logger(__name__, log_level="INFO")
......
...@@ -78,7 +78,7 @@ else: ...@@ -78,7 +78,7 @@ else:
# Will error if the minimal version of diffusers is not installed. Remove at your own risks. # Will error if the minimal version of diffusers is not installed. Remove at your own risks.
check_min_version("0.19.0.dev0") check_min_version("0.20.0.dev0")
logger = get_logger(__name__) logger = get_logger(__name__)
......
...@@ -56,7 +56,7 @@ else: ...@@ -56,7 +56,7 @@ else:
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# Will error if the minimal version of diffusers is not installed. Remove at your own risks. # Will error if the minimal version of diffusers is not installed. Remove at your own risks.
check_min_version("0.19.0.dev0") check_min_version("0.20.0.dev0")
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
......
...@@ -30,7 +30,7 @@ from diffusers.utils.import_utils import is_xformers_available ...@@ -30,7 +30,7 @@ from diffusers.utils.import_utils import is_xformers_available
# Will error if the minimal version of diffusers is not installed. Remove at your own risks. # Will error if the minimal version of diffusers is not installed. Remove at your own risks.
check_min_version("0.19.0.dev0") check_min_version("0.20.0.dev0")
logger = get_logger(__name__, log_level="INFO") logger = get_logger(__name__, log_level="INFO")
......
...@@ -233,7 +233,7 @@ install_requires = [ ...@@ -233,7 +233,7 @@ install_requires = [
setup( setup(
name="diffusers", name="diffusers",
version="0.19.0.dev0", # expected format is one of x.y.z.dev0, or x.y.z.rc1 or x.y.z (no to dashes, yes to dots) version="0.20.0.dev0", # expected format is one of x.y.z.dev0, or x.y.z.rc1 or x.y.z (no to dashes, yes to dots)
description="Diffusers", description="Diffusers",
long_description=open("README.md", "r", encoding="utf-8").read(), long_description=open("README.md", "r", encoding="utf-8").read(),
long_description_content_type="text/markdown", long_description_content_type="text/markdown",
......
__version__ = "0.19.0.dev0" __version__ = "0.20.0.dev0"
from .configuration_utils import ConfigMixin from .configuration_utils import ConfigMixin
from .utils import ( from .utils import (
......
...@@ -189,7 +189,7 @@ class UNet2DConditionLoadersMixin: ...@@ -189,7 +189,7 @@ class UNet2DConditionLoadersMixin:
r""" r"""
Load pretrained attention processor layers into [`UNet2DConditionModel`]. Attention processor layers have to be Load pretrained attention processor layers into [`UNet2DConditionModel`]. Attention processor layers have to be
defined in defined in
[`cross_attention.py`](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py) [`attention_processor.py`](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py)
and be a `torch.nn.Module` class. and be a `torch.nn.Module` class.
Parameters: Parameters:
...@@ -444,7 +444,6 @@ class UNet2DConditionLoadersMixin: ...@@ -444,7 +444,6 @@ class UNet2DConditionLoadersMixin:
weight_name: str = None, weight_name: str = None,
save_function: Callable = None, save_function: Callable = None,
safe_serialization: bool = False, safe_serialization: bool = False,
**kwargs,
): ):
r""" r"""
Save an attention processor to a directory so that it can be reloaded using the Save an attention processor to a directory so that it can be reloaded using the
...@@ -468,12 +467,6 @@ class UNet2DConditionLoadersMixin: ...@@ -468,12 +467,6 @@ class UNet2DConditionLoadersMixin:
CustomDiffusionXFormersAttnProcessor, CustomDiffusionXFormersAttnProcessor,
) )
weight_name = weight_name or deprecate(
"weights_name",
"0.20.0",
"`weights_name` is deprecated, please use `weight_name` instead.",
take_from=kwargs,
)
if os.path.isfile(save_directory): if os.path.isfile(save_directory):
logger.error(f"Provided path ({save_directory}) should be a directory, not a file") logger.error(f"Provided path ({save_directory}) should be a directory, not a file")
return return
......
# Copyright 2023 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ..utils import deprecate
from .attention_processor import ( # noqa: F401
Attention,
AttentionProcessor,
AttnAddedKVProcessor,
AttnProcessor2_0,
LoRAAttnProcessor,
LoRALinearLayer,
LoRAXFormersAttnProcessor,
SlicedAttnAddedKVProcessor,
SlicedAttnProcessor,
XFormersAttnProcessor,
)
from .attention_processor import AttnProcessor as AttnProcessorRename # noqa: F401
deprecate(
"cross_attention",
"0.20.0",
"Importing from cross_attention is deprecated. Please import from diffusers.models.attention_processor instead.",
standard_warn=False,
)
AttnProcessor = AttentionProcessor
class CrossAttention(Attention):
def __init__(self, *args, **kwargs):
deprecation_message = f"{self.__class__.__name__} is deprecated and will be removed in `0.20.0`. Please use `from diffusers.models.attention_processor import {''.join(self.__class__.__name__.split('Cross'))} instead."
deprecate("cross_attention", "0.20.0", deprecation_message, standard_warn=False)
super().__init__(*args, **kwargs)
class CrossAttnProcessor(AttnProcessorRename):
def __init__(self, *args, **kwargs):
deprecation_message = f"{self.__class__.__name__} is deprecated and will be removed in `0.20.0`. Please use `from diffusers.models.attention_processor import {''.join(self.__class__.__name__.split('Cross'))} instead."
deprecate("cross_attention", "0.20.0", deprecation_message, standard_warn=False)
super().__init__(*args, **kwargs)
class LoRACrossAttnProcessor(LoRAAttnProcessor):
def __init__(self, *args, **kwargs):
deprecation_message = f"{self.__class__.__name__} is deprecated and will be removed in `0.20.0`. Please use `from diffusers.models.attention_processor import {''.join(self.__class__.__name__.split('Cross'))} instead."
deprecate("cross_attention", "0.20.0", deprecation_message, standard_warn=False)
super().__init__(*args, **kwargs)
class CrossAttnAddedKVProcessor(AttnAddedKVProcessor):
def __init__(self, *args, **kwargs):
deprecation_message = f"{self.__class__.__name__} is deprecated and will be removed in `0.20.0`. Please use `from diffusers.models.attention_processor import {''.join(self.__class__.__name__.split('Cross'))} instead."
deprecate("cross_attention", "0.20.0", deprecation_message, standard_warn=False)
super().__init__(*args, **kwargs)
class XFormersCrossAttnProcessor(XFormersAttnProcessor):
def __init__(self, *args, **kwargs):
deprecation_message = f"{self.__class__.__name__} is deprecated and will be removed in `0.20.0`. Please use `from diffusers.models.attention_processor import {''.join(self.__class__.__name__.split('Cross'))} instead."
deprecate("cross_attention", "0.20.0", deprecation_message, standard_warn=False)
super().__init__(*args, **kwargs)
class LoRAXFormersCrossAttnProcessor(LoRAXFormersAttnProcessor):
def __init__(self, *args, **kwargs):
deprecation_message = f"{self.__class__.__name__} is deprecated and will be removed in `0.20.0`. Please use `from diffusers.models.attention_processor import {''.join(self.__class__.__name__.split('Cross'))} instead."
deprecate("cross_attention", "0.20.0", deprecation_message, standard_warn=False)
super().__init__(*args, **kwargs)
class SlicedCrossAttnProcessor(SlicedAttnProcessor):
def __init__(self, *args, **kwargs):
deprecation_message = f"{self.__class__.__name__} is deprecated and will be removed in `0.20.0`. Please use `from diffusers.models.attention_processor import {''.join(self.__class__.__name__.split('Cross'))} instead."
deprecate("cross_attention", "0.20.0", deprecation_message, standard_warn=False)
super().__init__(*args, **kwargs)
class SlicedCrossAttnAddedKVProcessor(SlicedAttnAddedKVProcessor):
def __init__(self, *args, **kwargs):
deprecation_message = f"{self.__class__.__name__} is deprecated and will be removed in `0.20.0`. Please use `from diffusers.models.attention_processor import {''.join(self.__class__.__name__.split('Cross'))} instead."
deprecate("cross_attention", "0.20.0", deprecation_message, standard_warn=False)
super().__init__(*args, **kwargs)
...@@ -585,7 +585,7 @@ class AltDiffusionPipeline(DiffusionPipeline, TextualInversionLoaderMixin, LoraL ...@@ -585,7 +585,7 @@ class AltDiffusionPipeline(DiffusionPipeline, TextualInversionLoaderMixin, LoraL
every step. every step.
cross_attention_kwargs (`dict`, *optional*): cross_attention_kwargs (`dict`, *optional*):
A kwargs dictionary that if specified is passed along to the [`AttentionProcessor`] as defined in A kwargs dictionary that if specified is passed along to the [`AttentionProcessor`] as defined in
[`self.processor`](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py). [`self.processor`](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py).
guidance_rescale (`float`, *optional*, defaults to 0.7): guidance_rescale (`float`, *optional*, defaults to 0.7):
Guidance rescale factor from [Common Diffusion Noise Schedules and Sample Steps are Guidance rescale factor from [Common Diffusion Noise Schedules and Sample Steps are
Flawed](https://arxiv.org/pdf/2305.08891.pdf). Guidance rescale factor should fix overexposure when Flawed](https://arxiv.org/pdf/2305.08891.pdf). Guidance rescale factor should fix overexposure when
......
...@@ -634,7 +634,7 @@ class AltDiffusionImg2ImgPipeline( ...@@ -634,7 +634,7 @@ class AltDiffusionImg2ImgPipeline(
every step. every step.
cross_attention_kwargs (`dict`, *optional*): cross_attention_kwargs (`dict`, *optional*):
A kwargs dictionary that if specified is passed along to the [`AttentionProcessor`] as defined in A kwargs dictionary that if specified is passed along to the [`AttentionProcessor`] as defined in
[`self.processor`](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py). [`self.processor`](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py).
Examples: Examples:
......
...@@ -428,7 +428,7 @@ class AudioLDMPipeline(DiffusionPipeline): ...@@ -428,7 +428,7 @@ class AudioLDMPipeline(DiffusionPipeline):
every step. every step.
cross_attention_kwargs (`dict`, *optional*): cross_attention_kwargs (`dict`, *optional*):
A kwargs dictionary that if specified is passed along to the [`AttentionProcessor`] as defined in A kwargs dictionary that if specified is passed along to the [`AttentionProcessor`] as defined in
[`self.processor`](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py). [`self.processor`](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py).
output_type (`str`, *optional*, defaults to `"np"`): output_type (`str`, *optional*, defaults to `"np"`):
The output format of the generated image. Choose between `"np"` to return a NumPy `np.ndarray` or The output format of the generated image. Choose between `"np"` to return a NumPy `np.ndarray` or
`"pt"` to return a PyTorch `torch.Tensor` object. `"pt"` to return a PyTorch `torch.Tensor` object.
......
...@@ -760,7 +760,7 @@ class StableDiffusionControlNetPipeline( ...@@ -760,7 +760,7 @@ class StableDiffusionControlNetPipeline(
cross_attention_kwargs (`dict`, *optional*): cross_attention_kwargs (`dict`, *optional*):
A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under
`self.processor` in `self.processor` in
[diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py). [diffusers.models.attention_processor](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py).
controlnet_conditioning_scale (`float` or `List[float]`, *optional*, defaults to 1.0): controlnet_conditioning_scale (`float` or `List[float]`, *optional*, defaults to 1.0):
The outputs of the controlnet are multiplied by `controlnet_conditioning_scale` before they are added The outputs of the controlnet are multiplied by `controlnet_conditioning_scale` before they are added
to the residual in the original unet. If multiple ControlNets are specified in init, you can set the to the residual in the original unet. If multiple ControlNets are specified in init, you can set the
......
...@@ -851,7 +851,7 @@ class StableDiffusionControlNetImg2ImgPipeline( ...@@ -851,7 +851,7 @@ class StableDiffusionControlNetImg2ImgPipeline(
cross_attention_kwargs (`dict`, *optional*): cross_attention_kwargs (`dict`, *optional*):
A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under
`self.processor` in `self.processor` in
[diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py). [diffusers.models.attention_processor](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py).
controlnet_conditioning_scale (`float` or `List[float]`, *optional*, defaults to 1.0): controlnet_conditioning_scale (`float` or `List[float]`, *optional*, defaults to 1.0):
The outputs of the controlnet are multiplied by `controlnet_conditioning_scale` before they are added The outputs of the controlnet are multiplied by `controlnet_conditioning_scale` before they are added
to the residual in the original unet. If multiple ControlNets are specified in init, you can set the to the residual in the original unet. If multiple ControlNets are specified in init, you can set the
......
...@@ -1048,7 +1048,7 @@ class StableDiffusionControlNetInpaintPipeline( ...@@ -1048,7 +1048,7 @@ class StableDiffusionControlNetInpaintPipeline(
cross_attention_kwargs (`dict`, *optional*): cross_attention_kwargs (`dict`, *optional*):
A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under
`self.processor` in `self.processor` in
[diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py). [diffusers.models.attention_processor](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py).
controlnet_conditioning_scale (`float` or `List[float]`, *optional*, defaults to 0.5): controlnet_conditioning_scale (`float` or `List[float]`, *optional*, defaults to 0.5):
The outputs of the controlnet are multiplied by `controlnet_conditioning_scale` before they are added The outputs of the controlnet are multiplied by `controlnet_conditioning_scale` before they are added
to the residual in the original unet. If multiple ControlNets are specified in init, you can set the to the residual in the original unet. If multiple ControlNets are specified in init, you can set the
......
...@@ -741,7 +741,7 @@ class StableDiffusionXLControlNetPipeline(DiffusionPipeline, TextualInversionLoa ...@@ -741,7 +741,7 @@ class StableDiffusionXLControlNetPipeline(DiffusionPipeline, TextualInversionLoa
cross_attention_kwargs (`dict`, *optional*): cross_attention_kwargs (`dict`, *optional*):
A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under
`self.processor` in `self.processor` in
[diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py). [diffusers.models.attention_processor](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py).
controlnet_conditioning_scale (`float` or `List[float]`, *optional*, defaults to 1.0): controlnet_conditioning_scale (`float` or `List[float]`, *optional*, defaults to 1.0):
The outputs of the controlnet are multiplied by `controlnet_conditioning_scale` before they are added The outputs of the controlnet are multiplied by `controlnet_conditioning_scale` before they are added
to the residual in the original unet. If multiple ControlNets are specified in init, you can set the to the residual in the original unet. If multiple ControlNets are specified in init, you can set the
......
...@@ -662,7 +662,7 @@ class IFPipeline(DiffusionPipeline, LoraLoaderMixin): ...@@ -662,7 +662,7 @@ class IFPipeline(DiffusionPipeline, LoraLoaderMixin):
cross_attention_kwargs (`dict`, *optional*): cross_attention_kwargs (`dict`, *optional*):
A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under
`self.processor` in `self.processor` in
[diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py). [diffusers.models.attention_processor](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py).
Examples: Examples:
......
...@@ -783,7 +783,7 @@ class IFImg2ImgPipeline(DiffusionPipeline, LoraLoaderMixin): ...@@ -783,7 +783,7 @@ class IFImg2ImgPipeline(DiffusionPipeline, LoraLoaderMixin):
cross_attention_kwargs (`dict`, *optional*): cross_attention_kwargs (`dict`, *optional*):
A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under
`self.processor` in `self.processor` in
[diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py). [diffusers.models.attention_processor](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py).
Examples: Examples:
......
...@@ -865,7 +865,7 @@ class IFImg2ImgSuperResolutionPipeline(DiffusionPipeline, LoraLoaderMixin): ...@@ -865,7 +865,7 @@ class IFImg2ImgSuperResolutionPipeline(DiffusionPipeline, LoraLoaderMixin):
cross_attention_kwargs (`dict`, *optional*): cross_attention_kwargs (`dict`, *optional*):
A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under
`self.processor` in `self.processor` in
[diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py). [diffusers.models.attention_processor](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py).
noise_level (`int`, *optional*, defaults to 250): noise_level (`int`, *optional*, defaults to 250):
The amount of noise to add to the upscaled image. Must be in the range `[0, 1000)` The amount of noise to add to the upscaled image. Must be in the range `[0, 1000)`
clean_caption (`bool`, *optional*, defaults to `True`): clean_caption (`bool`, *optional*, defaults to `True`):
......
...@@ -883,7 +883,7 @@ class IFInpaintingPipeline(DiffusionPipeline, LoraLoaderMixin): ...@@ -883,7 +883,7 @@ class IFInpaintingPipeline(DiffusionPipeline, LoraLoaderMixin):
cross_attention_kwargs (`dict`, *optional*): cross_attention_kwargs (`dict`, *optional*):
A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under
`self.processor` in `self.processor` in
[diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py). [diffusers.models.attention_processor](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py).
Examples: Examples:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment