Unverified Commit 55ac1dbd authored by hlky's avatar hlky Committed by GitHub
Browse files

Default values in SD3 pipelines when submodules are not loaded (#10393)

SD3 pipelines hasattr
parent 83da817f
...@@ -226,12 +226,21 @@ class StableDiffusion3Img2ImgPipeline(DiffusionPipeline, SD3LoraLoaderMixin, Fro ...@@ -226,12 +226,21 @@ class StableDiffusion3Img2ImgPipeline(DiffusionPipeline, SD3LoraLoaderMixin, Fro
transformer=transformer, transformer=transformer,
scheduler=scheduler, scheduler=scheduler,
) )
self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1) self.vae_scale_factor = (
2 ** (len(self.vae.config.block_out_channels) - 1) if hasattr(self, "vae") and self.vae is not None else 8
)
latent_channels = self.vae.config.latent_channels if hasattr(self, "vae") and self.vae is not None else 16
self.image_processor = VaeImageProcessor( self.image_processor = VaeImageProcessor(
vae_scale_factor=self.vae_scale_factor, vae_latent_channels=self.vae.config.latent_channels vae_scale_factor=self.vae_scale_factor, vae_latent_channels=latent_channels
)
self.tokenizer_max_length = (
self.tokenizer.model_max_length if hasattr(self, "tokenizer") and self.tokenizer is not None else 77
)
self.default_sample_size = (
self.transformer.config.sample_size
if hasattr(self, "transformer") and self.transformer is not None
else 128
) )
self.tokenizer_max_length = self.tokenizer.model_max_length
self.default_sample_size = self.transformer.config.sample_size
self.patch_size = ( self.patch_size = (
self.transformer.config.patch_size if hasattr(self, "transformer") and self.transformer is not None else 2 self.transformer.config.patch_size if hasattr(self, "transformer") and self.transformer is not None else 2
) )
......
...@@ -225,19 +225,28 @@ class StableDiffusion3InpaintPipeline(DiffusionPipeline, SD3LoraLoaderMixin, Fro ...@@ -225,19 +225,28 @@ class StableDiffusion3InpaintPipeline(DiffusionPipeline, SD3LoraLoaderMixin, Fro
transformer=transformer, transformer=transformer,
scheduler=scheduler, scheduler=scheduler,
) )
self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1) self.vae_scale_factor = (
2 ** (len(self.vae.config.block_out_channels) - 1) if hasattr(self, "vae") and self.vae is not None else 8
)
latent_channels = self.vae.config.latent_channels if hasattr(self, "vae") and self.vae is not None else 16
self.image_processor = VaeImageProcessor( self.image_processor = VaeImageProcessor(
vae_scale_factor=self.vae_scale_factor, vae_latent_channels=self.vae.config.latent_channels vae_scale_factor=self.vae_scale_factor, vae_latent_channels=latent_channels
) )
self.mask_processor = VaeImageProcessor( self.mask_processor = VaeImageProcessor(
vae_scale_factor=self.vae_scale_factor, vae_scale_factor=self.vae_scale_factor,
vae_latent_channels=self.vae.config.latent_channels, vae_latent_channels=latent_channels,
do_normalize=False, do_normalize=False,
do_binarize=True, do_binarize=True,
do_convert_grayscale=True, do_convert_grayscale=True,
) )
self.tokenizer_max_length = self.tokenizer.model_max_length self.tokenizer_max_length = (
self.default_sample_size = self.transformer.config.sample_size self.tokenizer.model_max_length if hasattr(self, "tokenizer") and self.tokenizer is not None else 77
)
self.default_sample_size = (
self.transformer.config.sample_size
if hasattr(self, "transformer") and self.transformer is not None
else 128
)
self.patch_size = ( self.patch_size = (
self.transformer.config.patch_size if hasattr(self, "transformer") and self.transformer is not None else 2 self.transformer.config.patch_size if hasattr(self, "transformer") and self.transformer is not None else 2
) )
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment