Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
ComfyUI
Commits
3a1f4776
"extra_model_paths.yaml.example" did not exist on "8bad32258e65a3a7e4c634e4cbb2a53a62212880"
Commit
3a1f4776
authored
May 13, 2023
by
comfyanonymous
Browse files
Print the torch device that is used on startup.
parent
b0505eb7
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
26 additions
and
16 deletions
+26
-16
comfy/model_management.py
comfy/model_management.py
+26
-16
No files found.
comfy/model_management.py
View file @
3a1f4776
...
@@ -127,6 +127,32 @@ if args.cpu:
...
@@ -127,6 +127,32 @@ if args.cpu:
print
(
f
"Set vram state to:
{
vram_state
.
name
}
"
)
print
(
f
"Set vram state to:
{
vram_state
.
name
}
"
)
def
get_torch_device
():
global
xpu_available
global
directml_enabled
if
directml_enabled
:
global
directml_device
return
directml_device
if
vram_state
==
VRAMState
.
MPS
:
return
torch
.
device
(
"mps"
)
if
vram_state
==
VRAMState
.
CPU
:
return
torch
.
device
(
"cpu"
)
else
:
if
xpu_available
:
return
torch
.
device
(
"xpu"
)
else
:
return
torch
.
cuda
.
current_device
()
def
get_torch_device_name
(
device
):
if
hasattr
(
device
,
'type'
):
return
"{}"
.
format
(
device
.
type
)
return
"CUDA {}: {}"
.
format
(
device
,
torch
.
cuda
.
get_device_name
(
device
))
try
:
print
(
"Using device:"
,
get_torch_device_name
(
get_torch_device
()))
except
:
print
(
"Could not pick default device."
)
current_loaded_model
=
None
current_loaded_model
=
None
current_gpu_controlnets
=
[]
current_gpu_controlnets
=
[]
...
@@ -233,22 +259,6 @@ def unload_if_low_vram(model):
...
@@ -233,22 +259,6 @@ def unload_if_low_vram(model):
return
model
.
cpu
()
return
model
.
cpu
()
return
model
return
model
def
get_torch_device
():
global
xpu_available
global
directml_enabled
if
directml_enabled
:
global
directml_device
return
directml_device
if
vram_state
==
VRAMState
.
MPS
:
return
torch
.
device
(
"mps"
)
if
vram_state
==
VRAMState
.
CPU
:
return
torch
.
device
(
"cpu"
)
else
:
if
xpu_available
:
return
torch
.
device
(
"xpu"
)
else
:
return
torch
.
cuda
.
current_device
()
def
get_autocast_device
(
dev
):
def
get_autocast_device
(
dev
):
if
hasattr
(
dev
,
'type'
):
if
hasattr
(
dev
,
'type'
):
return
dev
.
type
return
dev
.
type
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment