Unverified Commit 628f2c54 authored by hlky's avatar hlky Committed by GitHub
Browse files

Use Pipelines without scheduler (#10439)


Co-authored-by: default avatarSayak Paul <spsayakpaul@gmail.com>
parent 811560b1
...@@ -372,7 +372,7 @@ class AdaptiveMaskInpaintPipeline( ...@@ -372,7 +372,7 @@ class AdaptiveMaskInpaintPipeline(
self.register_adaptive_mask_model() self.register_adaptive_mask_model()
self.register_adaptive_mask_settings() self.register_adaptive_mask_settings()
if hasattr(scheduler.config, "steps_offset") and scheduler.config.steps_offset != 1: if scheduler is not None and getattr(scheduler.config, "steps_offset", 1) != 1:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`" f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`"
f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure " f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure "
...@@ -386,7 +386,7 @@ class AdaptiveMaskInpaintPipeline( ...@@ -386,7 +386,7 @@ class AdaptiveMaskInpaintPipeline(
new_config["steps_offset"] = 1 new_config["steps_offset"] = 1
scheduler._internal_dict = FrozenDict(new_config) scheduler._internal_dict = FrozenDict(new_config)
if hasattr(scheduler.config, "skip_prk_steps") and scheduler.config.skip_prk_steps is False: if scheduler is not None and getattr(scheduler.config, "skip_prk_steps", True) is False:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} has not set the configuration" f"The configuration file of this scheduler: {scheduler} has not set the configuration"
" `skip_prk_steps`. `skip_prk_steps` should be set to True in the configuration file. Please make" " `skip_prk_steps`. `skip_prk_steps` should be set to True in the configuration file. Please make"
......
...@@ -89,7 +89,7 @@ class ComposableStableDiffusionPipeline(DiffusionPipeline, StableDiffusionMixin) ...@@ -89,7 +89,7 @@ class ComposableStableDiffusionPipeline(DiffusionPipeline, StableDiffusionMixin)
): ):
super().__init__() super().__init__()
if hasattr(scheduler.config, "steps_offset") and scheduler.config.steps_offset != 1: if scheduler is not None and getattr(scheduler.config, "steps_offset", 1) != 1:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`" f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`"
f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure " f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure "
...@@ -103,7 +103,7 @@ class ComposableStableDiffusionPipeline(DiffusionPipeline, StableDiffusionMixin) ...@@ -103,7 +103,7 @@ class ComposableStableDiffusionPipeline(DiffusionPipeline, StableDiffusionMixin)
new_config["steps_offset"] = 1 new_config["steps_offset"] = 1
scheduler._internal_dict = FrozenDict(new_config) scheduler._internal_dict = FrozenDict(new_config)
if hasattr(scheduler.config, "clip_sample") and scheduler.config.clip_sample is True: if scheduler is not None and getattr(scheduler.config, "clip_sample", False) is True:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`." f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`."
" `clip_sample` should be set to False in the configuration file. Please make sure to update the" " `clip_sample` should be set to False in the configuration file. Please make sure to update the"
......
...@@ -95,7 +95,7 @@ class ImageToImageInpaintingPipeline(DiffusionPipeline): ...@@ -95,7 +95,7 @@ class ImageToImageInpaintingPipeline(DiffusionPipeline):
): ):
super().__init__() super().__init__()
if hasattr(scheduler.config, "steps_offset") and scheduler.config.steps_offset != 1: if scheduler is not None and getattr(scheduler.config, "steps_offset", 1) != 1:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`" f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`"
f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure " f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure "
......
...@@ -109,7 +109,7 @@ class InstaFlowPipeline( ...@@ -109,7 +109,7 @@ class InstaFlowPipeline(
): ):
super().__init__() super().__init__()
if hasattr(scheduler.config, "steps_offset") and scheduler.config.steps_offset != 1: if scheduler is not None and getattr(scheduler.config, "steps_offset", 1) != 1:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`" f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`"
f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure " f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure "
...@@ -123,7 +123,7 @@ class InstaFlowPipeline( ...@@ -123,7 +123,7 @@ class InstaFlowPipeline(
new_config["steps_offset"] = 1 new_config["steps_offset"] = 1
scheduler._internal_dict = FrozenDict(new_config) scheduler._internal_dict = FrozenDict(new_config)
if hasattr(scheduler.config, "clip_sample") and scheduler.config.clip_sample is True: if scheduler is not None and getattr(scheduler.config, "clip_sample", False) is True:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`." f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`."
" `clip_sample` should be set to False in the configuration file. Please make sure to update the" " `clip_sample` should be set to False in the configuration file. Please make sure to update the"
......
...@@ -86,7 +86,7 @@ class StableDiffusionWalkPipeline(DiffusionPipeline, StableDiffusionMixin): ...@@ -86,7 +86,7 @@ class StableDiffusionWalkPipeline(DiffusionPipeline, StableDiffusionMixin):
): ):
super().__init__() super().__init__()
if hasattr(scheduler.config, "steps_offset") and scheduler.config.steps_offset != 1: if scheduler is not None and getattr(scheduler.config, "steps_offset", 1) != 1:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`" f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`"
f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure " f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure "
......
...@@ -191,7 +191,7 @@ class IPAdapterFaceIDStableDiffusionPipeline( ...@@ -191,7 +191,7 @@ class IPAdapterFaceIDStableDiffusionPipeline(
): ):
super().__init__() super().__init__()
if hasattr(scheduler.config, "steps_offset") and scheduler.config.steps_offset != 1: if scheduler is not None and getattr(scheduler.config, "steps_offset", 1) != 1:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`" f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`"
f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure " f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure "
...@@ -205,7 +205,7 @@ class IPAdapterFaceIDStableDiffusionPipeline( ...@@ -205,7 +205,7 @@ class IPAdapterFaceIDStableDiffusionPipeline(
new_config["steps_offset"] = 1 new_config["steps_offset"] = 1
scheduler._internal_dict = FrozenDict(new_config) scheduler._internal_dict = FrozenDict(new_config)
if hasattr(scheduler.config, "clip_sample") and scheduler.config.clip_sample is True: if scheduler is not None and getattr(scheduler.config, "clip_sample", False) is True:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`." f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`."
" `clip_sample` should be set to False in the configuration file. Please make sure to update the" " `clip_sample` should be set to False in the configuration file. Please make sure to update the"
......
...@@ -336,7 +336,7 @@ class LLMGroundedDiffusionPipeline( ...@@ -336,7 +336,7 @@ class LLMGroundedDiffusionPipeline(
# This is copied from StableDiffusionPipeline, with hook initizations for LMD+. # This is copied from StableDiffusionPipeline, with hook initizations for LMD+.
super().__init__() super().__init__()
if hasattr(scheduler.config, "steps_offset") and scheduler.config.steps_offset != 1: if scheduler is not None and getattr(scheduler.config, "steps_offset", 1) != 1:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`" f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`"
f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure " f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure "
...@@ -350,7 +350,7 @@ class LLMGroundedDiffusionPipeline( ...@@ -350,7 +350,7 @@ class LLMGroundedDiffusionPipeline(
new_config["steps_offset"] = 1 new_config["steps_offset"] = 1
scheduler._internal_dict = FrozenDict(new_config) scheduler._internal_dict = FrozenDict(new_config)
if hasattr(scheduler.config, "clip_sample") and scheduler.config.clip_sample is True: if scheduler is not None and getattr(scheduler.config, "clip_sample", False) is True:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`." f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`."
" `clip_sample` should be set to False in the configuration file. Please make sure to update the" " `clip_sample` should be set to False in the configuration file. Please make sure to update the"
......
...@@ -496,7 +496,7 @@ class StableDiffusionLongPromptWeightingPipeline( ...@@ -496,7 +496,7 @@ class StableDiffusionLongPromptWeightingPipeline(
): ):
super().__init__() super().__init__()
if hasattr(scheduler.config, "steps_offset") and scheduler.config.steps_offset != 1: if scheduler is not None and getattr(scheduler.config, "steps_offset", 1) != 1:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`" f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`"
f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure " f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure "
...@@ -510,7 +510,7 @@ class StableDiffusionLongPromptWeightingPipeline( ...@@ -510,7 +510,7 @@ class StableDiffusionLongPromptWeightingPipeline(
new_config["steps_offset"] = 1 new_config["steps_offset"] = 1
scheduler._internal_dict = FrozenDict(new_config) scheduler._internal_dict = FrozenDict(new_config)
if hasattr(scheduler.config, "clip_sample") and scheduler.config.clip_sample is True: if scheduler is not None and getattr(scheduler.config, "clip_sample", False) is True:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`." f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`."
" `clip_sample` should be set to False in the configuration file. Please make sure to update the" " `clip_sample` should be set to False in the configuration file. Please make sure to update the"
......
...@@ -3766,7 +3766,7 @@ class MatryoshkaPipeline( ...@@ -3766,7 +3766,7 @@ class MatryoshkaPipeline(
else: else:
raise ValueError("Currently, nesting levels 0, 1, and 2 are supported.") raise ValueError("Currently, nesting levels 0, 1, and 2 are supported.")
if hasattr(scheduler.config, "steps_offset") and scheduler.config.steps_offset != 1: if scheduler is not None and getattr(scheduler.config, "steps_offset", 1) != 1:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`" f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`"
f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure " f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure "
...@@ -3780,7 +3780,7 @@ class MatryoshkaPipeline( ...@@ -3780,7 +3780,7 @@ class MatryoshkaPipeline(
new_config["steps_offset"] = 1 new_config["steps_offset"] = 1
scheduler._internal_dict = FrozenDict(new_config) scheduler._internal_dict = FrozenDict(new_config)
# if hasattr(scheduler.config, "clip_sample") and scheduler.config.clip_sample is True: # if scheduler is not None and getattr(scheduler.config, "clip_sample", False) is True:
# deprecation_message = ( # deprecation_message = (
# f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`." # f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`."
# " `clip_sample` should be set to False in the configuration file. Please make sure to update the" # " `clip_sample` should be set to False in the configuration file. Please make sure to update the"
......
...@@ -98,7 +98,7 @@ class MultilingualStableDiffusion(DiffusionPipeline, StableDiffusionMixin): ...@@ -98,7 +98,7 @@ class MultilingualStableDiffusion(DiffusionPipeline, StableDiffusionMixin):
): ):
super().__init__() super().__init__()
if hasattr(scheduler.config, "steps_offset") and scheduler.config.steps_offset != 1: if scheduler is not None and getattr(scheduler.config, "steps_offset", 1) != 1:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`" f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`"
f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure " f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure "
......
...@@ -131,7 +131,7 @@ class Prompt2PromptPipeline( ...@@ -131,7 +131,7 @@ class Prompt2PromptPipeline(
): ):
super().__init__() super().__init__()
if hasattr(scheduler.config, "steps_offset") and scheduler.config.steps_offset != 1: if scheduler is not None and getattr(scheduler.config, "steps_offset", 1) != 1:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`" f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`"
f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure " f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure "
...@@ -145,7 +145,7 @@ class Prompt2PromptPipeline( ...@@ -145,7 +145,7 @@ class Prompt2PromptPipeline(
new_config["steps_offset"] = 1 new_config["steps_offset"] = 1
scheduler._internal_dict = FrozenDict(new_config) scheduler._internal_dict = FrozenDict(new_config)
if hasattr(scheduler.config, "clip_sample") and scheduler.config.clip_sample is True: if scheduler is not None and getattr(scheduler.config, "clip_sample", False) is True:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`." f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`."
" `clip_sample` should be set to False in the configuration file. Please make sure to update the" " `clip_sample` should be set to False in the configuration file. Please make sure to update the"
......
...@@ -417,7 +417,7 @@ class StableDiffusionBoxDiffPipeline( ...@@ -417,7 +417,7 @@ class StableDiffusionBoxDiffPipeline(
): ):
super().__init__() super().__init__()
if hasattr(scheduler.config, "steps_offset") and scheduler.config.steps_offset != 1: if scheduler is not None and getattr(scheduler.config, "steps_offset", 1) != 1:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`" f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`"
f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure " f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure "
...@@ -431,7 +431,7 @@ class StableDiffusionBoxDiffPipeline( ...@@ -431,7 +431,7 @@ class StableDiffusionBoxDiffPipeline(
new_config["steps_offset"] = 1 new_config["steps_offset"] = 1
scheduler._internal_dict = FrozenDict(new_config) scheduler._internal_dict = FrozenDict(new_config)
if hasattr(scheduler.config, "clip_sample") and scheduler.config.clip_sample is True: if scheduler is not None and getattr(scheduler.config, "clip_sample", False) is True:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`." f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`."
" `clip_sample` should be set to False in the configuration file. Please make sure to update the" " `clip_sample` should be set to False in the configuration file. Please make sure to update the"
......
...@@ -384,7 +384,7 @@ class StableDiffusionPAGPipeline( ...@@ -384,7 +384,7 @@ class StableDiffusionPAGPipeline(
): ):
super().__init__() super().__init__()
if hasattr(scheduler.config, "steps_offset") and scheduler.config.steps_offset != 1: if scheduler is not None and getattr(scheduler.config, "steps_offset", 1) != 1:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`" f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`"
f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure " f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure "
...@@ -398,7 +398,7 @@ class StableDiffusionPAGPipeline( ...@@ -398,7 +398,7 @@ class StableDiffusionPAGPipeline(
new_config["steps_offset"] = 1 new_config["steps_offset"] = 1
scheduler._internal_dict = FrozenDict(new_config) scheduler._internal_dict = FrozenDict(new_config)
if hasattr(scheduler.config, "clip_sample") and scheduler.config.clip_sample is True: if scheduler is not None and getattr(scheduler.config, "clip_sample", False) is True:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`." f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`."
" `clip_sample` should be set to False in the configuration file. Please make sure to update the" " `clip_sample` should be set to False in the configuration file. Please make sure to update the"
......
...@@ -108,7 +108,7 @@ class Zero1to3StableDiffusionPipeline(DiffusionPipeline, StableDiffusionMixin): ...@@ -108,7 +108,7 @@ class Zero1to3StableDiffusionPipeline(DiffusionPipeline, StableDiffusionMixin):
): ):
super().__init__() super().__init__()
if hasattr(scheduler.config, "steps_offset") and scheduler.config.steps_offset != 1: if scheduler is not None and getattr(scheduler.config, "steps_offset", 1) != 1:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`" f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`"
f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure " f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure "
...@@ -122,7 +122,7 @@ class Zero1to3StableDiffusionPipeline(DiffusionPipeline, StableDiffusionMixin): ...@@ -122,7 +122,7 @@ class Zero1to3StableDiffusionPipeline(DiffusionPipeline, StableDiffusionMixin):
new_config["steps_offset"] = 1 new_config["steps_offset"] = 1
scheduler._internal_dict = FrozenDict(new_config) scheduler._internal_dict = FrozenDict(new_config)
if hasattr(scheduler.config, "clip_sample") and scheduler.config.clip_sample is True: if scheduler is not None and getattr(scheduler.config, "clip_sample", False) is True:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`." f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`."
" `clip_sample` should be set to False in the configuration file. Please make sure to update the" " `clip_sample` should be set to False in the configuration file. Please make sure to update the"
......
...@@ -105,7 +105,7 @@ class StableDiffusionIPEXPipeline( ...@@ -105,7 +105,7 @@ class StableDiffusionIPEXPipeline(
): ):
super().__init__() super().__init__()
if hasattr(scheduler.config, "steps_offset") and scheduler.config.steps_offset != 1: if scheduler is not None and getattr(scheduler.config, "steps_offset", 1) != 1:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`" f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`"
f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure " f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure "
...@@ -119,7 +119,7 @@ class StableDiffusionIPEXPipeline( ...@@ -119,7 +119,7 @@ class StableDiffusionIPEXPipeline(
new_config["steps_offset"] = 1 new_config["steps_offset"] = 1
scheduler._internal_dict = FrozenDict(new_config) scheduler._internal_dict = FrozenDict(new_config)
if hasattr(scheduler.config, "clip_sample") and scheduler.config.clip_sample is True: if scheduler is not None and getattr(scheduler.config, "clip_sample", False) is True:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`." f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`."
" `clip_sample` should be set to False in the configuration file. Please make sure to update the" " `clip_sample` should be set to False in the configuration file. Please make sure to update the"
......
...@@ -66,7 +66,7 @@ class StableDiffusionMegaPipeline(DiffusionPipeline, StableDiffusionMixin): ...@@ -66,7 +66,7 @@ class StableDiffusionMegaPipeline(DiffusionPipeline, StableDiffusionMixin):
requires_safety_checker: bool = True, requires_safety_checker: bool = True,
): ):
super().__init__() super().__init__()
if hasattr(scheduler.config, "steps_offset") and scheduler.config.steps_offset != 1: if scheduler is not None and getattr(scheduler.config, "steps_offset", 1) != 1:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`" f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`"
f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure " f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure "
......
...@@ -132,7 +132,7 @@ class StableDiffusionReferencePipeline( ...@@ -132,7 +132,7 @@ class StableDiffusionReferencePipeline(
): ):
super().__init__() super().__init__()
if hasattr(scheduler.config, "steps_offset") and scheduler.config.steps_offset != 1: if scheduler is not None and getattr(scheduler.config, "steps_offset", 1) != 1:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`" f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`"
f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure " f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure "
...@@ -146,7 +146,7 @@ class StableDiffusionReferencePipeline( ...@@ -146,7 +146,7 @@ class StableDiffusionReferencePipeline(
new_config["steps_offset"] = 1 new_config["steps_offset"] = 1
scheduler._internal_dict = FrozenDict(new_config) scheduler._internal_dict = FrozenDict(new_config)
if hasattr(scheduler.config, "skip_prk_steps") and scheduler.config.skip_prk_steps is False: if scheduler is not None and getattr(scheduler.config, "skip_prk_steps", True) is False:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} has not set the configuration" f"The configuration file of this scheduler: {scheduler} has not set the configuration"
" `skip_prk_steps`. `skip_prk_steps` should be set to True in the configuration file. Please make" " `skip_prk_steps`. `skip_prk_steps` should be set to True in the configuration file. Please make"
......
...@@ -187,7 +187,7 @@ class StableDiffusionRepaintPipeline( ...@@ -187,7 +187,7 @@ class StableDiffusionRepaintPipeline(
): ):
super().__init__() super().__init__()
if hasattr(scheduler.config, "steps_offset") and scheduler.config.steps_offset != 1: if scheduler is not None and getattr(scheduler.config, "steps_offset", 1) != 1:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`" f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`"
f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure " f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure "
...@@ -201,7 +201,7 @@ class StableDiffusionRepaintPipeline( ...@@ -201,7 +201,7 @@ class StableDiffusionRepaintPipeline(
new_config["steps_offset"] = 1 new_config["steps_offset"] = 1
scheduler._internal_dict = FrozenDict(new_config) scheduler._internal_dict = FrozenDict(new_config)
if hasattr(scheduler.config, "skip_prk_steps") and scheduler.config.skip_prk_steps is False: if scheduler is not None and getattr(scheduler.config, "skip_prk_steps", True) is False:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} has not set the configuration" f"The configuration file of this scheduler: {scheduler} has not set the configuration"
" `skip_prk_steps`. `skip_prk_steps` should be set to True in the configuration file. Please make" " `skip_prk_steps`. `skip_prk_steps` should be set to True in the configuration file. Please make"
......
...@@ -710,7 +710,7 @@ class TensorRTStableDiffusionImg2ImgPipeline(DiffusionPipeline): ...@@ -710,7 +710,7 @@ class TensorRTStableDiffusionImg2ImgPipeline(DiffusionPipeline):
): ):
super().__init__() super().__init__()
if hasattr(scheduler.config, "steps_offset") and scheduler.config.steps_offset != 1: if scheduler is not None and getattr(scheduler.config, "steps_offset", 1) != 1:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`" f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`"
f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure " f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure "
...@@ -724,7 +724,7 @@ class TensorRTStableDiffusionImg2ImgPipeline(DiffusionPipeline): ...@@ -724,7 +724,7 @@ class TensorRTStableDiffusionImg2ImgPipeline(DiffusionPipeline):
new_config["steps_offset"] = 1 new_config["steps_offset"] = 1
scheduler._internal_dict = FrozenDict(new_config) scheduler._internal_dict = FrozenDict(new_config)
if hasattr(scheduler.config, "clip_sample") and scheduler.config.clip_sample is True: if scheduler is not None and getattr(scheduler.config, "clip_sample", False) is True:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`." f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`."
" `clip_sample` should be set to False in the configuration file. Please make sure to update the" " `clip_sample` should be set to False in the configuration file. Please make sure to update the"
......
...@@ -714,7 +714,7 @@ class TensorRTStableDiffusionInpaintPipeline(DiffusionPipeline): ...@@ -714,7 +714,7 @@ class TensorRTStableDiffusionInpaintPipeline(DiffusionPipeline):
): ):
super().__init__() super().__init__()
if hasattr(scheduler.config, "steps_offset") and scheduler.config.steps_offset != 1: if scheduler is not None and getattr(scheduler.config, "steps_offset", 1) != 1:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`" f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`"
f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure " f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure "
...@@ -728,7 +728,7 @@ class TensorRTStableDiffusionInpaintPipeline(DiffusionPipeline): ...@@ -728,7 +728,7 @@ class TensorRTStableDiffusionInpaintPipeline(DiffusionPipeline):
new_config["steps_offset"] = 1 new_config["steps_offset"] = 1
scheduler._internal_dict = FrozenDict(new_config) scheduler._internal_dict = FrozenDict(new_config)
if hasattr(scheduler.config, "clip_sample") and scheduler.config.clip_sample is True: if scheduler is not None and getattr(scheduler.config, "clip_sample", False) is True:
deprecation_message = ( deprecation_message = (
f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`." f"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`."
" `clip_sample` should be set to False in the configuration file. Please make sure to update the" " `clip_sample` should be set to False in the configuration file. Please make sure to update the"
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment