Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
ComfyUI
Commits
b0aab1e4
"git@developer.sourcefind.cn:wangsen/paddle_dbnet.git" did not exist on "b6a21419d6930001fba0a80bcb0b0dbc5529bcbc"
Commit
b0aab1e4
authored
Dec 11, 2023
by
comfyanonymous
Browse files
Add an option --fp16-unet to force using fp16 for the unet.
parent
ba07cb74
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
3 additions
and
0 deletions
+3
-0
comfy/cli_args.py
comfy/cli_args.py
+1
-0
comfy/model_management.py
comfy/model_management.py
+2
-0
No files found.
comfy/cli_args.py
View file @
b0aab1e4
...
@@ -57,6 +57,7 @@ fp_group.add_argument("--force-fp16", action="store_true", help="Force fp16.")
...
@@ -57,6 +57,7 @@ fp_group.add_argument("--force-fp16", action="store_true", help="Force fp16.")
fpunet_group
=
parser
.
add_mutually_exclusive_group
()
fpunet_group
=
parser
.
add_mutually_exclusive_group
()
fpunet_group
.
add_argument
(
"--bf16-unet"
,
action
=
"store_true"
,
help
=
"Run the UNET in bf16. This should only be used for testing stuff."
)
fpunet_group
.
add_argument
(
"--bf16-unet"
,
action
=
"store_true"
,
help
=
"Run the UNET in bf16. This should only be used for testing stuff."
)
fpunet_group
.
add_argument
(
"--fp16-unet"
,
action
=
"store_true"
,
help
=
"Store unet weights in fp16."
)
fpunet_group
.
add_argument
(
"--fp8_e4m3fn-unet"
,
action
=
"store_true"
,
help
=
"Store unet weights in fp8_e4m3fn."
)
fpunet_group
.
add_argument
(
"--fp8_e4m3fn-unet"
,
action
=
"store_true"
,
help
=
"Store unet weights in fp8_e4m3fn."
)
fpunet_group
.
add_argument
(
"--fp8_e5m2-unet"
,
action
=
"store_true"
,
help
=
"Store unet weights in fp8_e5m2."
)
fpunet_group
.
add_argument
(
"--fp8_e5m2-unet"
,
action
=
"store_true"
,
help
=
"Store unet weights in fp8_e5m2."
)
...
...
comfy/model_management.py
View file @
b0aab1e4
...
@@ -466,6 +466,8 @@ def unet_inital_load_device(parameters, dtype):
...
@@ -466,6 +466,8 @@ def unet_inital_load_device(parameters, dtype):
def
unet_dtype
(
device
=
None
,
model_params
=
0
):
def
unet_dtype
(
device
=
None
,
model_params
=
0
):
if
args
.
bf16_unet
:
if
args
.
bf16_unet
:
return
torch
.
bfloat16
return
torch
.
bfloat16
if
args
.
fp16_unet
:
return
torch
.
float16
if
args
.
fp8_e4m3fn_unet
:
if
args
.
fp8_e4m3fn_unet
:
return
torch
.
float8_e4m3fn
return
torch
.
float8_e4m3fn
if
args
.
fp8_e5m2_unet
:
if
args
.
fp8_e5m2_unet
:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment