Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
ComfyUI
Commits
8b65f5de
"git@developer.sourcefind.cn:gaoqiong/composable_kernel.git" did not exist on "bf779629b57a19bf0ce25a35f592960a90d611d5"
Commit
8b65f5de
authored
Oct 22, 2023
by
comfyanonymous
Browse files
attention_basic now works with hypertile.
parent
e6bc42df
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
18 additions
and
3 deletions
+18
-3
comfy/ldm/modules/attention.py
comfy/ldm/modules/attention.py
+18
-3
No files found.
comfy/ldm/modules/attention.py
View file @
8b65f5de
...
@@ -95,9 +95,19 @@ def Normalize(in_channels, dtype=None, device=None):
...
@@ -95,9 +95,19 @@ def Normalize(in_channels, dtype=None, device=None):
return
torch
.
nn
.
GroupNorm
(
num_groups
=
32
,
num_channels
=
in_channels
,
eps
=
1e-6
,
affine
=
True
,
dtype
=
dtype
,
device
=
device
)
return
torch
.
nn
.
GroupNorm
(
num_groups
=
32
,
num_channels
=
in_channels
,
eps
=
1e-6
,
affine
=
True
,
dtype
=
dtype
,
device
=
device
)
def
attention_basic
(
q
,
k
,
v
,
heads
,
mask
=
None
):
def
attention_basic
(
q
,
k
,
v
,
heads
,
mask
=
None
):
b
,
_
,
dim_head
=
q
.
shape
dim_head
//=
heads
scale
=
dim_head
**
-
0.5
h
=
heads
h
=
heads
scale
=
(
q
.
shape
[
-
1
]
//
heads
)
**
-
0.5
q
,
k
,
v
=
map
(
q
,
k
,
v
=
map
(
lambda
t
:
rearrange
(
t
,
'b n (h d) -> (b h) n d'
,
h
=
h
),
(
q
,
k
,
v
))
lambda
t
:
t
.
unsqueeze
(
3
)
.
reshape
(
b
,
-
1
,
heads
,
dim_head
)
.
permute
(
0
,
2
,
1
,
3
)
.
reshape
(
b
*
heads
,
-
1
,
dim_head
)
.
contiguous
(),
(
q
,
k
,
v
),
)
# force cast to fp32 to avoid overflowing
# force cast to fp32 to avoid overflowing
if
_ATTN_PRECISION
==
"fp32"
:
if
_ATTN_PRECISION
==
"fp32"
:
...
@@ -119,7 +129,12 @@ def attention_basic(q, k, v, heads, mask=None):
...
@@ -119,7 +129,12 @@ def attention_basic(q, k, v, heads, mask=None):
sim
=
sim
.
softmax
(
dim
=-
1
)
sim
=
sim
.
softmax
(
dim
=-
1
)
out
=
einsum
(
'b i j, b j d -> b i d'
,
sim
.
to
(
v
.
dtype
),
v
)
out
=
einsum
(
'b i j, b j d -> b i d'
,
sim
.
to
(
v
.
dtype
),
v
)
out
=
rearrange
(
out
,
'(b h) n d -> b n (h d)'
,
h
=
h
)
out
=
(
out
.
unsqueeze
(
0
)
.
reshape
(
b
,
heads
,
-
1
,
dim_head
)
.
permute
(
0
,
2
,
1
,
3
)
.
reshape
(
b
,
-
1
,
heads
*
dim_head
)
)
return
out
return
out
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment