Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
ComfyUI
Commits
e46b1c30
Commit
e46b1c30
authored
Apr 04, 2023
by
comfyanonymous
Browse files
Disable xformers in VAE when xformers == 0.0.18
parent
af291e6f
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
16 additions
and
2 deletions
+16
-2
comfy/ldm/modules/diffusionmodules/model.py
comfy/ldm/modules/diffusionmodules/model.py
+2
-2
comfy/model_management.py
comfy/model_management.py
+14
-0
No files found.
comfy/ldm/modules/diffusionmodules/model.py
View file @
e46b1c30
...
...
@@ -9,7 +9,7 @@ from typing import Optional, Any
from
ldm.modules.attention
import
MemoryEfficientCrossAttention
import
model_management
if
model_management
.
xformers_enabled
():
if
model_management
.
xformers_enabled
_vae
():
import
xformers
import
xformers.ops
...
...
@@ -364,7 +364,7 @@ class MemoryEfficientCrossAttentionWrapper(MemoryEfficientCrossAttention):
def
make_attn
(
in_channels
,
attn_type
=
"vanilla"
,
attn_kwargs
=
None
):
assert
attn_type
in
[
"vanilla"
,
"vanilla-xformers"
,
"memory-efficient-cross-attn"
,
"linear"
,
"none"
],
f
'attn_type
{
attn_type
}
unknown'
if
model_management
.
xformers_enabled
()
and
attn_type
==
"vanilla"
:
if
model_management
.
xformers_enabled
_vae
()
and
attn_type
==
"vanilla"
:
attn_type
=
"vanilla-xformers"
if
model_management
.
pytorch_attention_enabled
()
and
attn_type
==
"vanilla"
:
attn_type
=
"vanilla-pytorch"
...
...
comfy/model_management.py
View file @
e46b1c30
...
...
@@ -199,11 +199,25 @@ def get_autocast_device(dev):
return
dev
.
type
return
"cuda"
def
xformers_enabled
():
if
vram_state
==
CPU
:
return
False
return
XFORMERS_IS_AVAILBLE
def
xformers_enabled_vae
():
enabled
=
xformers_enabled
()
if
not
enabled
:
return
False
try
:
#0.0.18 has a bug where Nan is returned when inputs are too big (1152x1920 res images and above)
if
xformers
.
version
.
__version__
==
"0.0.18"
:
return
False
except
:
pass
return
enabled
def
pytorch_attention_enabled
():
return
ENABLE_PYTORCH_ATTENTION
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment