"src/git@developer.sourcefind.cn:renzhc/diffusers_dcu.git" did not exist on "eef2327a47aabd9c8f5d5a26d73b8c842509b55a"
Commit 9a95414e authored by Patrick von Platen's avatar Patrick von Platen
Browse files

Bump to v0.5.0dev0

parent 91ddd2a2
...@@ -211,7 +211,7 @@ install_requires = [ ...@@ -211,7 +211,7 @@ install_requires = [
setup( setup(
name="diffusers", name="diffusers",
version="0.4.1", # expected format is one of x.y.z.dev0, or x.y.z.rc1 or x.y.z (no to dashes, yes to dots) version="0.5.0.dev0", # expected format is one of x.y.z.dev0, or x.y.z.rc1 or x.y.z (no to dashes, yes to dots)
description="Diffusers", description="Diffusers",
long_description=open("README.md", "r", encoding="utf-8").read(), long_description=open("README.md", "r", encoding="utf-8").read(),
long_description_content_type="text/markdown", long_description_content_type="text/markdown",
......
...@@ -9,7 +9,7 @@ from .utils import ( ...@@ -9,7 +9,7 @@ from .utils import (
) )
__version__ = "0.4.1" __version__ = "0.5.0.dev0"
from .configuration_utils import ConfigMixin from .configuration_utils import ConfigMixin
from .onnx_utils import OnnxRuntimeModel from .onnx_utils import OnnxRuntimeModel
......
...@@ -123,7 +123,7 @@ class DDIMScheduler(SchedulerMixin, ConfigMixin): ...@@ -123,7 +123,7 @@ class DDIMScheduler(SchedulerMixin, ConfigMixin):
): ):
deprecate( deprecate(
"tensor_format", "tensor_format",
"0.5.0", "0.6.0",
"If you're running your code in PyTorch, you can safely remove this argument.", "If you're running your code in PyTorch, you can safely remove this argument.",
take_from=kwargs, take_from=kwargs,
) )
...@@ -192,7 +192,7 @@ class DDIMScheduler(SchedulerMixin, ConfigMixin): ...@@ -192,7 +192,7 @@ class DDIMScheduler(SchedulerMixin, ConfigMixin):
the number of diffusion steps used when generating samples with a pre-trained model. the number of diffusion steps used when generating samples with a pre-trained model.
""" """
deprecated_offset = deprecate( deprecated_offset = deprecate(
"offset", "0.5.0", "Please pass `steps_offset` to `__init__` instead.", take_from=kwargs "offset", "0.7.0", "Please pass `steps_offset` to `__init__` instead.", take_from=kwargs
) )
offset = deprecated_offset or self.config.steps_offset offset = deprecated_offset or self.config.steps_offset
......
...@@ -116,7 +116,7 @@ class DDPMScheduler(SchedulerMixin, ConfigMixin): ...@@ -116,7 +116,7 @@ class DDPMScheduler(SchedulerMixin, ConfigMixin):
): ):
deprecate( deprecate(
"tensor_format", "tensor_format",
"0.5.0", "0.6.0",
"If you're running your code in PyTorch, you can safely remove this argument.", "If you're running your code in PyTorch, you can safely remove this argument.",
take_from=kwargs, take_from=kwargs,
) )
......
...@@ -90,7 +90,7 @@ class KarrasVeScheduler(SchedulerMixin, ConfigMixin): ...@@ -90,7 +90,7 @@ class KarrasVeScheduler(SchedulerMixin, ConfigMixin):
): ):
deprecate( deprecate(
"tensor_format", "tensor_format",
"0.5.0", "0.6.0",
"If you're running your code in PyTorch, you can safely remove this argument.", "If you're running your code in PyTorch, you can safely remove this argument.",
take_from=kwargs, take_from=kwargs,
) )
......
...@@ -78,7 +78,7 @@ class LMSDiscreteScheduler(SchedulerMixin, ConfigMixin): ...@@ -78,7 +78,7 @@ class LMSDiscreteScheduler(SchedulerMixin, ConfigMixin):
): ):
deprecate( deprecate(
"tensor_format", "tensor_format",
"0.5.0", "0.6.0",
"If you're running your code in PyTorch, you can safely remove this argument.", "If you're running your code in PyTorch, you can safely remove this argument.",
take_from=kwargs, take_from=kwargs,
) )
...@@ -217,7 +217,7 @@ class LMSDiscreteScheduler(SchedulerMixin, ConfigMixin): ...@@ -217,7 +217,7 @@ class LMSDiscreteScheduler(SchedulerMixin, ConfigMixin):
): ):
deprecate( deprecate(
"timestep as an index", "timestep as an index",
"0.5.0", "0.7.0",
"Passing integer indices (e.g. from `enumerate(timesteps)`) as timesteps to" "Passing integer indices (e.g. from `enumerate(timesteps)`) as timesteps to"
" `LMSDiscreteScheduler.step()` will not be supported in future versions. Make sure to pass" " `LMSDiscreteScheduler.step()` will not be supported in future versions. Make sure to pass"
" one of the `scheduler.timesteps` as a timestep.", " one of the `scheduler.timesteps` as a timestep.",
...@@ -263,7 +263,7 @@ class LMSDiscreteScheduler(SchedulerMixin, ConfigMixin): ...@@ -263,7 +263,7 @@ class LMSDiscreteScheduler(SchedulerMixin, ConfigMixin):
if isinstance(timesteps, torch.IntTensor) or isinstance(timesteps, torch.LongTensor): if isinstance(timesteps, torch.IntTensor) or isinstance(timesteps, torch.LongTensor):
deprecate( deprecate(
"timesteps as indices", "timesteps as indices",
"0.5.0", "0.7.0",
"Passing integer indices (e.g. from `enumerate(timesteps)`) as timesteps to" "Passing integer indices (e.g. from `enumerate(timesteps)`) as timesteps to"
" `LMSDiscreteScheduler.add_noise()` will not be supported in future versions. Make sure to" " `LMSDiscreteScheduler.add_noise()` will not be supported in future versions. Make sure to"
" pass values from `scheduler.timesteps` as timesteps.", " pass values from `scheduler.timesteps` as timesteps.",
......
...@@ -104,7 +104,7 @@ class PNDMScheduler(SchedulerMixin, ConfigMixin): ...@@ -104,7 +104,7 @@ class PNDMScheduler(SchedulerMixin, ConfigMixin):
): ):
deprecate( deprecate(
"tensor_format", "tensor_format",
"0.5.0", "0.6.0",
"If you're running your code in PyTorch, you can safely remove this argument.", "If you're running your code in PyTorch, you can safely remove this argument.",
take_from=kwargs, take_from=kwargs,
) )
...@@ -159,7 +159,7 @@ class PNDMScheduler(SchedulerMixin, ConfigMixin): ...@@ -159,7 +159,7 @@ class PNDMScheduler(SchedulerMixin, ConfigMixin):
the number of diffusion steps used when generating samples with a pre-trained model. the number of diffusion steps used when generating samples with a pre-trained model.
""" """
deprecated_offset = deprecate( deprecated_offset = deprecate(
"offset", "0.5.0", "Please pass `steps_offset` to `__init__` instead.", take_from=kwargs "offset", "0.7.0", "Please pass `steps_offset` to `__init__` instead.", take_from=kwargs
) )
offset = deprecated_offset or self.config.steps_offset offset = deprecated_offset or self.config.steps_offset
......
...@@ -79,7 +79,7 @@ class ScoreSdeVeScheduler(SchedulerMixin, ConfigMixin): ...@@ -79,7 +79,7 @@ class ScoreSdeVeScheduler(SchedulerMixin, ConfigMixin):
): ):
deprecate( deprecate(
"tensor_format", "tensor_format",
"0.5.0", "0.6.0",
"If you're running your code in PyTorch, you can safely remove this argument.", "If you're running your code in PyTorch, you can safely remove this argument.",
take_from=kwargs, take_from=kwargs,
) )
...@@ -156,10 +156,6 @@ class ScoreSdeVeScheduler(SchedulerMixin, ConfigMixin): ...@@ -156,10 +156,6 @@ class ScoreSdeVeScheduler(SchedulerMixin, ConfigMixin):
self.discrete_sigmas[timesteps - 1].to(timesteps.device), self.discrete_sigmas[timesteps - 1].to(timesteps.device),
) )
def set_seed(self, seed):
deprecate("set_seed", "0.5.0", "Please consider passing a generator instead.")
torch.manual_seed(seed)
def step_pred( def step_pred(
self, self,
model_output: torch.FloatTensor, model_output: torch.FloatTensor,
...@@ -167,7 +163,6 @@ class ScoreSdeVeScheduler(SchedulerMixin, ConfigMixin): ...@@ -167,7 +163,6 @@ class ScoreSdeVeScheduler(SchedulerMixin, ConfigMixin):
sample: torch.FloatTensor, sample: torch.FloatTensor,
generator: Optional[torch.Generator] = None, generator: Optional[torch.Generator] = None,
return_dict: bool = True, return_dict: bool = True,
**kwargs,
) -> Union[SdeVeOutput, Tuple]: ) -> Union[SdeVeOutput, Tuple]:
""" """
Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion
...@@ -186,9 +181,6 @@ class ScoreSdeVeScheduler(SchedulerMixin, ConfigMixin): ...@@ -186,9 +181,6 @@ class ScoreSdeVeScheduler(SchedulerMixin, ConfigMixin):
`return_dict` is True, otherwise a `tuple`. When returning a tuple, the first element is the sample tensor. `return_dict` is True, otherwise a `tuple`. When returning a tuple, the first element is the sample tensor.
""" """
if "seed" in kwargs and kwargs["seed"] is not None:
self.set_seed(kwargs["seed"])
if self.timesteps is None: if self.timesteps is None:
raise ValueError( raise ValueError(
"`self.timesteps` is not set, you need to run 'set_timesteps' after creating the scheduler" "`self.timesteps` is not set, you need to run 'set_timesteps' after creating the scheduler"
...@@ -231,7 +223,6 @@ class ScoreSdeVeScheduler(SchedulerMixin, ConfigMixin): ...@@ -231,7 +223,6 @@ class ScoreSdeVeScheduler(SchedulerMixin, ConfigMixin):
sample: torch.FloatTensor, sample: torch.FloatTensor,
generator: Optional[torch.Generator] = None, generator: Optional[torch.Generator] = None,
return_dict: bool = True, return_dict: bool = True,
**kwargs,
) -> Union[SchedulerOutput, Tuple]: ) -> Union[SchedulerOutput, Tuple]:
""" """
Correct the predicted sample based on the output model_output of the network. This is often run repeatedly Correct the predicted sample based on the output model_output of the network. This is often run repeatedly
...@@ -249,9 +240,6 @@ class ScoreSdeVeScheduler(SchedulerMixin, ConfigMixin): ...@@ -249,9 +240,6 @@ class ScoreSdeVeScheduler(SchedulerMixin, ConfigMixin):
`return_dict` is True, otherwise a `tuple`. When returning a tuple, the first element is the sample tensor. `return_dict` is True, otherwise a `tuple`. When returning a tuple, the first element is the sample tensor.
""" """
if "seed" in kwargs and kwargs["seed"] is not None:
self.set_seed(kwargs["seed"])
if self.timesteps is None: if self.timesteps is None:
raise ValueError( raise ValueError(
"`self.timesteps` is not set, you need to run 'set_timesteps' after creating the scheduler" "`self.timesteps` is not set, you need to run 'set_timesteps' after creating the scheduler"
......
...@@ -43,7 +43,7 @@ class ScoreSdeVpScheduler(SchedulerMixin, ConfigMixin): ...@@ -43,7 +43,7 @@ class ScoreSdeVpScheduler(SchedulerMixin, ConfigMixin):
def __init__(self, num_train_timesteps=2000, beta_min=0.1, beta_max=20, sampling_eps=1e-3, **kwargs): def __init__(self, num_train_timesteps=2000, beta_min=0.1, beta_max=20, sampling_eps=1e-3, **kwargs):
deprecate( deprecate(
"tensor_format", "tensor_format",
"0.5.0", "0.6.0",
"If you're running your code in PyTorch, you can safely remove this argument.", "If you're running your code in PyTorch, you can safely remove this argument.",
take_from=kwargs, take_from=kwargs,
) )
......
...@@ -45,7 +45,7 @@ class SchedulerMixin: ...@@ -45,7 +45,7 @@ class SchedulerMixin:
def set_format(self, tensor_format="pt"): def set_format(self, tensor_format="pt"):
deprecate( deprecate(
"set_format", "set_format",
"0.5.0", "0.6.0",
"If you're running your code in PyTorch, you can safely remove this function as the schedulers are always" "If you're running your code in PyTorch, you can safely remove this function as the schedulers are always"
" in Pytorch", " in Pytorch",
) )
......
...@@ -11,7 +11,6 @@ ...@@ -11,7 +11,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import warnings
from dataclasses import dataclass from dataclasses import dataclass
import jax.numpy as jnp import jax.numpy as jnp
...@@ -42,12 +41,3 @@ class FlaxSchedulerMixin: ...@@ -42,12 +41,3 @@ class FlaxSchedulerMixin:
""" """
config_name = SCHEDULER_CONFIG_NAME config_name = SCHEDULER_CONFIG_NAME
def set_format(self, tensor_format="pt"):
warnings.warn(
"The method `set_format` is deprecated and will be removed in version `0.5.0`."
"If you're running your code in PyTorch, you can safely remove this function as the schedulers"
"are always in Pytorch",
DeprecationWarning,
)
return self
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment