Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
ComfyUI
Commits
861fd588
Commit
861fd588
authored
Aug 13, 2023
by
comfyanonymous
Browse files
Add a warning if a card that doesn't support cuda malloc has it enabled.
parent
585a0629
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
20 additions
and
6 deletions
+20
-6
cuda_malloc.py
cuda_malloc.py
+6
-6
main.py
main.py
+14
-0
No files found.
cuda_malloc.py
View file @
861fd588
...
@@ -36,13 +36,13 @@ def get_gpu_names():
...
@@ -36,13 +36,13 @@ def get_gpu_names():
else
:
else
:
return
set
()
return
set
()
def
cuda_malloc_supported
():
blacklist
=
{
"GeForce GTX TITAN X"
,
"GeForce GTX 980"
,
"GeForce GTX 970"
,
"GeForce GTX 960"
,
"GeForce GTX 950"
,
"GeForce 945M"
,
blacklist
=
{
"GeForce GTX TITAN X"
,
"GeForce GTX 980"
,
"GeForce GTX 970"
,
"GeForce GTX 960"
,
"GeForce GTX 950"
,
"GeForce 945M"
,
"GeForce 940M"
,
"GeForce 930M"
,
"GeForce 920M"
,
"GeForce 910M"
,
"GeForce GTX 750"
,
"GeForce GTX 745"
,
"Quadro K620"
,
"GeForce 940M"
,
"GeForce 930M"
,
"GeForce 920M"
,
"GeForce 910M"
,
"GeForce GTX 750"
,
"GeForce GTX 745"
,
"Quadro K620"
,
"Quadro K1200"
,
"Quadro K2200"
,
"Quadro M500"
,
"Quadro M520"
,
"Quadro M600"
,
"Quadro M620"
,
"Quadro M1000"
,
"Quadro K1200"
,
"Quadro K2200"
,
"Quadro M500"
,
"Quadro M520"
,
"Quadro M600"
,
"Quadro M620"
,
"Quadro M1000"
,
"Quadro M1200"
,
"Quadro M2000"
,
"Quadro M2200"
,
"Quadro M3000"
,
"Quadro M4000"
,
"Quadro M5000"
,
"Quadro M5500"
,
"Quadro M6000"
,
"Quadro M1200"
,
"Quadro M2000"
,
"Quadro M2200"
,
"Quadro M3000"
,
"Quadro M4000"
,
"Quadro M5000"
,
"Quadro M5500"
,
"Quadro M6000"
,
"GeForce MX110"
,
"GeForce MX130"
,
"GeForce 830M"
,
"GeForce 840M"
,
"GeForce GTX 850M"
,
"GeForce GTX 860M"
}
"GeForce MX110"
,
"GeForce MX130"
,
"GeForce 830M"
,
"GeForce 840M"
,
"GeForce GTX 850M"
,
"GeForce GTX 860M"
}
def
cuda_malloc_supported
():
try
:
try
:
names
=
get_gpu_names
()
names
=
get_gpu_names
()
except
:
except
:
...
...
main.py
View file @
861fd588
...
@@ -72,6 +72,17 @@ from server import BinaryEventTypes
...
@@ -72,6 +72,17 @@ from server import BinaryEventTypes
from
nodes
import
init_custom_nodes
from
nodes
import
init_custom_nodes
import
comfy.model_management
import
comfy.model_management
def
cuda_malloc_warning
():
device
=
comfy
.
model_management
.
get_torch_device
()
device_name
=
comfy
.
model_management
.
get_torch_device_name
(
device
)
cuda_malloc_warning
=
False
if
"cudaMallocAsync"
in
device_name
:
for
b
in
cuda_malloc
.
blacklist
:
if
b
in
device_name
:
cuda_malloc_warning
=
True
if
cuda_malloc_warning
:
print
(
"
\n
WARNING: this card most likely does not support cuda-malloc, if you get
\"
CUDA error
\"
please run ComfyUI with: --disable-cuda-malloc
\n
"
)
def
prompt_worker
(
q
,
server
):
def
prompt_worker
(
q
,
server
):
e
=
execution
.
PromptExecutor
(
server
)
e
=
execution
.
PromptExecutor
(
server
)
while
True
:
while
True
:
...
@@ -147,6 +158,9 @@ if __name__ == "__main__":
...
@@ -147,6 +158,9 @@ if __name__ == "__main__":
load_extra_path_config
(
config_path
)
load_extra_path_config
(
config_path
)
init_custom_nodes
()
init_custom_nodes
()
cuda_malloc_warning
()
server
.
add_routes
()
server
.
add_routes
()
hijack_progress
(
server
)
hijack_progress
(
server
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment