Unverified Commit 32a5d70c authored by Anton Lozhkov's avatar Anton Lozhkov Committed by GitHub
Browse files

Support attn2==None for xformers (#1759)

parent 429e5449
...@@ -473,6 +473,7 @@ class BasicTransformerBlock(nn.Module): ...@@ -473,6 +473,7 @@ class BasicTransformerBlock(nn.Module):
except Exception as e: except Exception as e:
raise e raise e
self.attn1._use_memory_efficient_attention_xformers = use_memory_efficient_attention_xformers self.attn1._use_memory_efficient_attention_xformers = use_memory_efficient_attention_xformers
if self.attn2 is not None:
self.attn2._use_memory_efficient_attention_xformers = use_memory_efficient_attention_xformers self.attn2._use_memory_efficient_attention_xformers = use_memory_efficient_attention_xformers
def forward(self, hidden_states, encoder_hidden_states=None, timestep=None, attention_mask=None): def forward(self, hidden_states, encoder_hidden_states=None, timestep=None, attention_mask=None):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment