"git@developer.sourcefind.cn:OpenDAS/torchaudio.git" did not exist on "0cf4b8a9ec35ba0048051d0bc9d0ca8de5f085fe"
Unverified Commit 0d96a894 authored by Aryan's avatar Aryan Committed by GitHub
Browse files

Fix copied from comment in Mochi lora loader (#10255)

update
parent 6fb94d51
...@@ -3104,7 +3104,7 @@ class Mochi1LoraLoaderMixin(LoraBaseMixin): ...@@ -3104,7 +3104,7 @@ class Mochi1LoraLoaderMixin(LoraBaseMixin):
) )
@classmethod @classmethod
# Copied from diffusers.loaders.lora_pipeline.SD3LoraLoaderMixin.load_lora_into_transformer with SD3Transformer2DModel->CogVideoXTransformer3DModel # Copied from diffusers.loaders.lora_pipeline.SD3LoraLoaderMixin.load_lora_into_transformer with SD3Transformer2DModel->MochiTransformer3DModel
def load_lora_into_transformer( def load_lora_into_transformer(
cls, state_dict, transformer, adapter_name=None, _pipeline=None, low_cpu_mem_usage=False cls, state_dict, transformer, adapter_name=None, _pipeline=None, low_cpu_mem_usage=False
): ):
...@@ -3116,7 +3116,7 @@ class Mochi1LoraLoaderMixin(LoraBaseMixin): ...@@ -3116,7 +3116,7 @@ class Mochi1LoraLoaderMixin(LoraBaseMixin):
A standard state dict containing the lora layer parameters. The keys can either be indexed directly A standard state dict containing the lora layer parameters. The keys can either be indexed directly
into the unet or prefixed with an additional `unet` which can be used to distinguish between text into the unet or prefixed with an additional `unet` which can be used to distinguish between text
encoder lora layers. encoder lora layers.
transformer (`CogVideoXTransformer3DModel`): transformer (`MochiTransformer3DModel`):
The Transformer model to load the LoRA layers into. The Transformer model to load the LoRA layers into.
adapter_name (`str`, *optional*): adapter_name (`str`, *optional*):
Adapter name to be used for referencing the loaded adapter model. If not specified, it will use Adapter name to be used for referencing the loaded adapter model. If not specified, it will use
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment