Unverified Commit fbe29c62 authored by Aryan's avatar Aryan Committed by GitHub
Browse files

[refactor] create modeling blocks specific to AnimateDiff (#8979)



* animatediff specific transformer model

* make style

* make fix-copies

* move blocks to unet motion model

* make style

* remove dummy object

* fix incorrectly passed param causing test failures

* rename model and output class

* fix sparsectrl imports

* remove todo comments

* remove temporal double self attn param from controlnet sparsectrl

* add deprecated versions of blocks

* apply suggestions from review

* update

---------
Co-authored-by: default avatarDhruv Nair <dhruv.nair@gmail.com>
parent 7071b746
......@@ -32,10 +32,7 @@ from .embeddings import TimestepEmbedding, Timesteps
from .modeling_utils import ModelMixin
from .unets.unet_2d_blocks import UNetMidBlock2DCrossAttn
from .unets.unet_2d_condition import UNet2DConditionModel
from .unets.unet_3d_blocks import (
CrossAttnDownBlockMotion,
DownBlockMotion,
)
from .unets.unet_motion_model import CrossAttnDownBlockMotion, DownBlockMotion
logger = logging.get_logger(__name__) # pylint: disable=invalid-name
......@@ -317,7 +314,6 @@ class SparseControlNetModel(ModelMixin, ConfigMixin):
temporal_num_attention_heads=motion_num_attention_heads[i],
temporal_max_seq_length=motion_max_seq_length,
temporal_transformer_layers_per_block=temporal_transformer_layers_per_block[i],
temporal_double_self_attention=False,
)
elif down_block_type == "DownBlockMotion":
down_block = DownBlockMotion(
......@@ -334,7 +330,6 @@ class SparseControlNetModel(ModelMixin, ConfigMixin):
add_downsample=not is_final_block,
temporal_num_attention_heads=motion_num_attention_heads[i],
temporal_max_seq_length=motion_max_seq_length,
temporal_double_self_attention=False,
temporal_transformer_layers_per_block=temporal_transformer_layers_per_block[i],
)
else:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment