Unverified Commit 6f2b310a authored by Xiaodong Wang's avatar Xiaodong Wang Committed by GitHub
Browse files

[UNet_Spatio_Temporal_Condition] fix default num_attention_heads in...


[UNet_Spatio_Temporal_Condition] fix default num_attention_heads in unet_spatio_temporal_condition (#7205)

fix default num_attention_heads in unet_spatio_temporal_condition
Co-authored-by: default avatarSayak Paul <spsayakpaul@gmail.com>
parent e3cd6cae
...@@ -90,7 +90,7 @@ class UNetSpatioTemporalConditionModel(ModelMixin, ConfigMixin, UNet2DConditionL ...@@ -90,7 +90,7 @@ class UNetSpatioTemporalConditionModel(ModelMixin, ConfigMixin, UNet2DConditionL
layers_per_block: Union[int, Tuple[int]] = 2, layers_per_block: Union[int, Tuple[int]] = 2,
cross_attention_dim: Union[int, Tuple[int]] = 1024, cross_attention_dim: Union[int, Tuple[int]] = 1024,
transformer_layers_per_block: Union[int, Tuple[int], Tuple[Tuple]] = 1, transformer_layers_per_block: Union[int, Tuple[int], Tuple[Tuple]] = 1,
num_attention_heads: Union[int, Tuple[int]] = (5, 10, 10, 20), num_attention_heads: Union[int, Tuple[int]] = (5, 10, 20, 20),
num_frames: int = 25, num_frames: int = 25,
): ):
super().__init__() super().__init__()
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment