Unverified Commit debc74f4 authored by Patrick von Platen's avatar Patrick von Platen Committed by GitHub
Browse files

[Versatile Diffusion] Fix cross_attention_kwargs (#1849)

fix versatile
parent 2ba42aa9
...@@ -703,7 +703,13 @@ class DualTransformer2DModel(nn.Module): ...@@ -703,7 +703,13 @@ class DualTransformer2DModel(nn.Module):
self.transformer_index_for_condition = [1, 0] self.transformer_index_for_condition = [1, 0]
def forward( def forward(
self, hidden_states, encoder_hidden_states, timestep=None, attention_mask=None, return_dict: bool = True self,
hidden_states,
encoder_hidden_states,
timestep=None,
attention_mask=None,
cross_attention_kwargs=None,
return_dict: bool = True,
): ):
""" """
Args: Args:
...@@ -738,6 +744,7 @@ class DualTransformer2DModel(nn.Module): ...@@ -738,6 +744,7 @@ class DualTransformer2DModel(nn.Module):
input_states, input_states,
encoder_hidden_states=condition_state, encoder_hidden_states=condition_state,
timestep=timestep, timestep=timestep,
cross_attention_kwargs=cross_attention_kwargs,
return_dict=False, return_dict=False,
)[0] )[0]
encoded_states.append(encoded_state - input_states) encoded_states.append(encoded_state - input_states)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment