Unverified Commit 414d7c49 authored by Dhruv Nair's avatar Dhruv Nair Committed by GitHub
Browse files

Fix Basic Transformer Block (#5683)



* fix

* Update src/diffusers/models/attention.py
Co-authored-by: default avatarPatrick von Platen <patrick.v.platen@gmail.com>

---------
Co-authored-by: default avatarPatrick von Platen <patrick.v.platen@gmail.com>
parent 8ca179a0
...@@ -287,7 +287,7 @@ class BasicTransformerBlock(nn.Module): ...@@ -287,7 +287,7 @@ class BasicTransformerBlock(nn.Module):
else: else:
raise ValueError("Incorrect norm") raise ValueError("Incorrect norm")
if self.pos_embed is not None and self.use_ada_layer_norm_single is None: if self.pos_embed is not None and self.use_ada_layer_norm_single is False:
norm_hidden_states = self.pos_embed(norm_hidden_states) norm_hidden_states = self.pos_embed(norm_hidden_states)
attn_output = self.attn2( attn_output = self.attn2(
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment