Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
ComfyUI
Commits
1b103e0c
"web/git@developer.sourcefind.cn:chenpangpang/ComfyUI.git" did not exist on "852cf4db99e8fbcac1b4d675b73417b57e14756f"
Commit
1b103e0c
authored
Dec 30, 2023
by
comfyanonymous
Browse files
Add argument to run the VAE on the CPU.
parent
144e6580
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
7 additions
and
0 deletions
+7
-0
comfy/cli_args.py
comfy/cli_args.py
+2
-0
comfy/model_management.py
comfy/model_management.py
+5
-0
No files found.
comfy/cli_args.py
View file @
1b103e0c
...
@@ -66,6 +66,8 @@ fpvae_group.add_argument("--fp16-vae", action="store_true", help="Run the VAE in
...
@@ -66,6 +66,8 @@ fpvae_group.add_argument("--fp16-vae", action="store_true", help="Run the VAE in
fpvae_group
.
add_argument
(
"--fp32-vae"
,
action
=
"store_true"
,
help
=
"Run the VAE in full precision fp32."
)
fpvae_group
.
add_argument
(
"--fp32-vae"
,
action
=
"store_true"
,
help
=
"Run the VAE in full precision fp32."
)
fpvae_group
.
add_argument
(
"--bf16-vae"
,
action
=
"store_true"
,
help
=
"Run the VAE in bf16."
)
fpvae_group
.
add_argument
(
"--bf16-vae"
,
action
=
"store_true"
,
help
=
"Run the VAE in bf16."
)
parser
.
add_argument
(
"--cpu-vae"
,
action
=
"store_true"
,
help
=
"Run the VAE on the CPU."
)
fpte_group
=
parser
.
add_mutually_exclusive_group
()
fpte_group
=
parser
.
add_mutually_exclusive_group
()
fpte_group
.
add_argument
(
"--fp8_e4m3fn-text-enc"
,
action
=
"store_true"
,
help
=
"Store text encoder weights in fp8 (e4m3fn variant)."
)
fpte_group
.
add_argument
(
"--fp8_e4m3fn-text-enc"
,
action
=
"store_true"
,
help
=
"Store text encoder weights in fp8 (e4m3fn variant)."
)
fpte_group
.
add_argument
(
"--fp8_e5m2-text-enc"
,
action
=
"store_true"
,
help
=
"Store text encoder weights in fp8 (e5m2 variant)."
)
fpte_group
.
add_argument
(
"--fp8_e5m2-text-enc"
,
action
=
"store_true"
,
help
=
"Store text encoder weights in fp8 (e5m2 variant)."
)
...
...
comfy/model_management.py
View file @
1b103e0c
...
@@ -186,6 +186,9 @@ except:
...
@@ -186,6 +186,9 @@ except:
if
is_intel_xpu
():
if
is_intel_xpu
():
VAE_DTYPE
=
torch
.
bfloat16
VAE_DTYPE
=
torch
.
bfloat16
if
args
.
cpu_vae
:
VAE_DTYPE
=
torch
.
float32
if
args
.
fp16_vae
:
if
args
.
fp16_vae
:
VAE_DTYPE
=
torch
.
float16
VAE_DTYPE
=
torch
.
float16
elif
args
.
bf16_vae
:
elif
args
.
bf16_vae
:
...
@@ -555,6 +558,8 @@ def intermediate_device():
...
@@ -555,6 +558,8 @@ def intermediate_device():
return
torch
.
device
(
"cpu"
)
return
torch
.
device
(
"cpu"
)
def
vae_device
():
def
vae_device
():
if
args
.
cpu_vae
:
return
torch
.
device
(
"cpu"
)
return
get_torch_device
()
return
get_torch_device
()
def
vae_offload_device
():
def
vae_offload_device
():
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment