Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
renzhc
diffusers_dcu
Commits
de71fa59
Unverified
Commit
de71fa59
authored
Oct 25, 2023
by
AnyISalIn
Committed by
GitHub
Oct 25, 2023
Browse files
fix error of peft lora when xformers enabled (#5506)
Signed-off-by:
AnyISalIn
<
anyisalin@gmail.com
>
parent
dcbfe662
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
6 additions
and
4 deletions
+6
-4
src/diffusers/models/attention_processor.py
src/diffusers/models/attention_processor.py
+6
-4
No files found.
src/diffusers/models/attention_processor.py
View file @
de71fa59
...
...
@@ -909,6 +909,8 @@ class XFormersAttnProcessor:
):
residual
=
hidden_states
args
=
()
if
USE_PEFT_BACKEND
else
(
scale
,)
if
attn
.
spatial_norm
is
not
None
:
hidden_states
=
attn
.
spatial_norm
(
hidden_states
,
temb
)
...
...
@@ -936,15 +938,15 @@ class XFormersAttnProcessor:
if
attn
.
group_norm
is
not
None
:
hidden_states
=
attn
.
group_norm
(
hidden_states
.
transpose
(
1
,
2
)).
transpose
(
1
,
2
)
query
=
attn
.
to_q
(
hidden_states
,
scale
=
scale
)
query
=
attn
.
to_q
(
hidden_states
,
*
args
)
if
encoder_hidden_states
is
None
:
encoder_hidden_states
=
hidden_states
elif
attn
.
norm_cross
:
encoder_hidden_states
=
attn
.
norm_encoder_hidden_states
(
encoder_hidden_states
)
key
=
attn
.
to_k
(
encoder_hidden_states
,
scale
=
scale
)
value
=
attn
.
to_v
(
encoder_hidden_states
,
scale
=
scale
)
key
=
attn
.
to_k
(
encoder_hidden_states
,
*
args
)
value
=
attn
.
to_v
(
encoder_hidden_states
,
*
args
)
query
=
attn
.
head_to_batch_dim
(
query
).
contiguous
()
key
=
attn
.
head_to_batch_dim
(
key
).
contiguous
()
...
...
@@ -957,7 +959,7 @@ class XFormersAttnProcessor:
hidden_states
=
attn
.
batch_to_head_dim
(
hidden_states
)
# linear proj
hidden_states
=
attn
.
to_out
[
0
](
hidden_states
,
scale
=
scale
)
hidden_states
=
attn
.
to_out
[
0
](
hidden_states
,
*
args
)
# dropout
hidden_states
=
attn
.
to_out
[
1
](
hidden_states
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment