Unverified Commit 66829563 authored by Ju Hoon Park's avatar Ju Hoon Park Committed by GitHub
Browse files

Add AttentionMixin to WanVACETransformer3DModel (#12268)

* Add AttentionMixin to WanVACETransformer3DModel

to enable methods like `set_attn_processor()`.

* Import AttentionMixin in transformer_wan_vace.py

Special thanks to @tolgacangoz πŸ™‡β€β™‚οΈ
parent ffc8c0c1
......@@ -21,7 +21,7 @@ import torch.nn as nn
from ...configuration_utils import ConfigMixin, register_to_config
from ...loaders import FromOriginalModelMixin, PeftAdapterMixin
from ...utils import USE_PEFT_BACKEND, logging, scale_lora_layers, unscale_lora_layers
from ..attention import FeedForward
from ..attention import AttentionMixin, FeedForward
from ..cache_utils import CacheMixin
from ..modeling_outputs import Transformer2DModelOutput
from ..modeling_utils import ModelMixin
......@@ -134,7 +134,9 @@ class WanVACETransformerBlock(nn.Module):
return conditioning_states, control_hidden_states
class WanVACETransformer3DModel(ModelMixin, ConfigMixin, PeftAdapterMixin, FromOriginalModelMixin, CacheMixin):
class WanVACETransformer3DModel(
ModelMixin, ConfigMixin, PeftAdapterMixin, FromOriginalModelMixin, CacheMixin, AttentionMixin
):
r"""
A Transformer model for video-like data used in the Wan model.
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment