Commit 9a95414e authored by Patrick von Platen's avatar Patrick von Platen
Browse files

Bump to v0.5.0dev0

parent 91ddd2a2
......@@ -211,7 +211,7 @@ install_requires = [
setup(
name="diffusers",
version="0.4.1", # expected format is one of x.y.z.dev0, or x.y.z.rc1 or x.y.z (no to dashes, yes to dots)
version="0.5.0.dev0", # expected format is one of x.y.z.dev0, or x.y.z.rc1 or x.y.z (no to dashes, yes to dots)
description="Diffusers",
long_description=open("README.md", "r", encoding="utf-8").read(),
long_description_content_type="text/markdown",
......
......@@ -9,7 +9,7 @@ from .utils import (
)
__version__ = "0.4.1"
__version__ = "0.5.0.dev0"
from .configuration_utils import ConfigMixin
from .onnx_utils import OnnxRuntimeModel
......
......@@ -123,7 +123,7 @@ class DDIMScheduler(SchedulerMixin, ConfigMixin):
):
deprecate(
"tensor_format",
"0.5.0",
"0.6.0",
"If you're running your code in PyTorch, you can safely remove this argument.",
take_from=kwargs,
)
......@@ -192,7 +192,7 @@ class DDIMScheduler(SchedulerMixin, ConfigMixin):
the number of diffusion steps used when generating samples with a pre-trained model.
"""
deprecated_offset = deprecate(
"offset", "0.5.0", "Please pass `steps_offset` to `__init__` instead.", take_from=kwargs
"offset", "0.7.0", "Please pass `steps_offset` to `__init__` instead.", take_from=kwargs
)
offset = deprecated_offset or self.config.steps_offset
......
......@@ -116,7 +116,7 @@ class DDPMScheduler(SchedulerMixin, ConfigMixin):
):
deprecate(
"tensor_format",
"0.5.0",
"0.6.0",
"If you're running your code in PyTorch, you can safely remove this argument.",
take_from=kwargs,
)
......
......@@ -90,7 +90,7 @@ class KarrasVeScheduler(SchedulerMixin, ConfigMixin):
):
deprecate(
"tensor_format",
"0.5.0",
"0.6.0",
"If you're running your code in PyTorch, you can safely remove this argument.",
take_from=kwargs,
)
......
......@@ -78,7 +78,7 @@ class LMSDiscreteScheduler(SchedulerMixin, ConfigMixin):
):
deprecate(
"tensor_format",
"0.5.0",
"0.6.0",
"If you're running your code in PyTorch, you can safely remove this argument.",
take_from=kwargs,
)
......@@ -217,7 +217,7 @@ class LMSDiscreteScheduler(SchedulerMixin, ConfigMixin):
):
deprecate(
"timestep as an index",
"0.5.0",
"0.7.0",
"Passing integer indices (e.g. from `enumerate(timesteps)`) as timesteps to"
" `LMSDiscreteScheduler.step()` will not be supported in future versions. Make sure to pass"
" one of the `scheduler.timesteps` as a timestep.",
......@@ -263,7 +263,7 @@ class LMSDiscreteScheduler(SchedulerMixin, ConfigMixin):
if isinstance(timesteps, torch.IntTensor) or isinstance(timesteps, torch.LongTensor):
deprecate(
"timesteps as indices",
"0.5.0",
"0.7.0",
"Passing integer indices (e.g. from `enumerate(timesteps)`) as timesteps to"
" `LMSDiscreteScheduler.add_noise()` will not be supported in future versions. Make sure to"
" pass values from `scheduler.timesteps` as timesteps.",
......
......@@ -104,7 +104,7 @@ class PNDMScheduler(SchedulerMixin, ConfigMixin):
):
deprecate(
"tensor_format",
"0.5.0",
"0.6.0",
"If you're running your code in PyTorch, you can safely remove this argument.",
take_from=kwargs,
)
......@@ -159,7 +159,7 @@ class PNDMScheduler(SchedulerMixin, ConfigMixin):
the number of diffusion steps used when generating samples with a pre-trained model.
"""
deprecated_offset = deprecate(
"offset", "0.5.0", "Please pass `steps_offset` to `__init__` instead.", take_from=kwargs
"offset", "0.7.0", "Please pass `steps_offset` to `__init__` instead.", take_from=kwargs
)
offset = deprecated_offset or self.config.steps_offset
......
......@@ -79,7 +79,7 @@ class ScoreSdeVeScheduler(SchedulerMixin, ConfigMixin):
):
deprecate(
"tensor_format",
"0.5.0",
"0.6.0",
"If you're running your code in PyTorch, you can safely remove this argument.",
take_from=kwargs,
)
......@@ -156,10 +156,6 @@ class ScoreSdeVeScheduler(SchedulerMixin, ConfigMixin):
self.discrete_sigmas[timesteps - 1].to(timesteps.device),
)
def set_seed(self, seed):
deprecate("set_seed", "0.5.0", "Please consider passing a generator instead.")
torch.manual_seed(seed)
def step_pred(
self,
model_output: torch.FloatTensor,
......@@ -167,7 +163,6 @@ class ScoreSdeVeScheduler(SchedulerMixin, ConfigMixin):
sample: torch.FloatTensor,
generator: Optional[torch.Generator] = None,
return_dict: bool = True,
**kwargs,
) -> Union[SdeVeOutput, Tuple]:
"""
Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion
......@@ -186,9 +181,6 @@ class ScoreSdeVeScheduler(SchedulerMixin, ConfigMixin):
`return_dict` is True, otherwise a `tuple`. When returning a tuple, the first element is the sample tensor.
"""
if "seed" in kwargs and kwargs["seed"] is not None:
self.set_seed(kwargs["seed"])
if self.timesteps is None:
raise ValueError(
"`self.timesteps` is not set, you need to run 'set_timesteps' after creating the scheduler"
......@@ -231,7 +223,6 @@ class ScoreSdeVeScheduler(SchedulerMixin, ConfigMixin):
sample: torch.FloatTensor,
generator: Optional[torch.Generator] = None,
return_dict: bool = True,
**kwargs,
) -> Union[SchedulerOutput, Tuple]:
"""
Correct the predicted sample based on the output model_output of the network. This is often run repeatedly
......@@ -249,9 +240,6 @@ class ScoreSdeVeScheduler(SchedulerMixin, ConfigMixin):
`return_dict` is True, otherwise a `tuple`. When returning a tuple, the first element is the sample tensor.
"""
if "seed" in kwargs and kwargs["seed"] is not None:
self.set_seed(kwargs["seed"])
if self.timesteps is None:
raise ValueError(
"`self.timesteps` is not set, you need to run 'set_timesteps' after creating the scheduler"
......
......@@ -43,7 +43,7 @@ class ScoreSdeVpScheduler(SchedulerMixin, ConfigMixin):
def __init__(self, num_train_timesteps=2000, beta_min=0.1, beta_max=20, sampling_eps=1e-3, **kwargs):
deprecate(
"tensor_format",
"0.5.0",
"0.6.0",
"If you're running your code in PyTorch, you can safely remove this argument.",
take_from=kwargs,
)
......
......@@ -45,7 +45,7 @@ class SchedulerMixin:
def set_format(self, tensor_format="pt"):
deprecate(
"set_format",
"0.5.0",
"0.6.0",
"If you're running your code in PyTorch, you can safely remove this function as the schedulers are always"
" in Pytorch",
)
......
......@@ -11,7 +11,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import warnings
from dataclasses import dataclass
import jax.numpy as jnp
......@@ -42,12 +41,3 @@ class FlaxSchedulerMixin:
"""
config_name = SCHEDULER_CONFIG_NAME
def set_format(self, tensor_format="pt"):
warnings.warn(
"The method `set_format` is deprecated and will be removed in version `0.5.0`."
"If you're running your code in PyTorch, you can safely remove this function as the schedulers"
"are always in Pytorch",
DeprecationWarning,
)
return self
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment