Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
ComfyUI
Commits
fed315a7
"tests/vscode:/vscode.git/clone" did not exist on "89459a5d561b9c0bf1316f1be955154275d9d24a"
Commit
fed315a7
authored
Mar 03, 2023
by
comfyanonymous
Browse files
To be really simple CheckpointLoaderSimple should pick the right type.
parent
94bb0375
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
29 additions
and
7 deletions
+29
-7
comfy/model_management.py
comfy/model_management.py
+24
-0
comfy/sd.py
comfy/sd.py
+3
-1
nodes.py
nodes.py
+2
-6
No files found.
comfy/model_management.py
View file @
fed315a7
...
@@ -173,6 +173,30 @@ def maximum_batch_area():
...
@@ -173,6 +173,30 @@ def maximum_batch_area():
memory_free
=
get_free_memory
()
/
(
1024
*
1024
)
memory_free
=
get_free_memory
()
/
(
1024
*
1024
)
area
=
((
memory_free
-
1024
)
*
0.9
)
/
(
0.6
)
area
=
((
memory_free
-
1024
)
*
0.9
)
/
(
0.6
)
return
int
(
max
(
area
,
0
))
return
int
(
max
(
area
,
0
))
def
cpu_mode
():
global
vram_state
return
vram_state
==
CPU
def
should_use_fp16
():
if
cpu_mode
():
return
False
#TODO ?
if
torch
.
cuda
.
is_bf16_supported
():
return
True
props
=
torch
.
cuda
.
get_device_properties
()
if
props
.
major
<
7
:
return
False
#FP32 is faster on those cards?
nvidia_16_series
=
[
"1660"
,
"1650"
,
"1630"
]
for
x
in
nvidia_16_series
:
if
x
in
props
.
name
:
return
False
return
True
#TODO: might be cleaner to put this somewhere else
#TODO: might be cleaner to put this somewhere else
import
threading
import
threading
...
...
comfy/sd.py
View file @
fed315a7
...
@@ -656,12 +656,14 @@ def load_checkpoint(config_path, ckpt_path, output_vae=True, output_clip=True, e
...
@@ -656,12 +656,14 @@ def load_checkpoint(config_path, ckpt_path, output_vae=True, output_clip=True, e
return
(
ModelPatcher
(
model
),
clip
,
vae
)
return
(
ModelPatcher
(
model
),
clip
,
vae
)
def
load_checkpoint_guess_config
(
ckpt_path
,
fp16
=
False
,
output_vae
=
True
,
output_clip
=
True
,
embedding_directory
=
None
):
def
load_checkpoint_guess_config
(
ckpt_path
,
output_vae
=
True
,
output_clip
=
True
,
embedding_directory
=
None
):
sd
=
load_torch_file
(
ckpt_path
)
sd
=
load_torch_file
(
ckpt_path
)
sd_keys
=
sd
.
keys
()
sd_keys
=
sd
.
keys
()
clip
=
None
clip
=
None
vae
=
None
vae
=
None
fp16
=
model_management
.
should_use_fp16
()
class
WeightsLoader
(
torch
.
nn
.
Module
):
class
WeightsLoader
(
torch
.
nn
.
Module
):
pass
pass
...
...
nodes.py
View file @
fed315a7
...
@@ -209,19 +209,15 @@ class CheckpointLoaderSimple:
...
@@ -209,19 +209,15 @@ class CheckpointLoaderSimple:
@
classmethod
@
classmethod
def
INPUT_TYPES
(
s
):
def
INPUT_TYPES
(
s
):
return
{
"required"
:
{
"ckpt_name"
:
(
filter_files_extensions
(
recursive_search
(
s
.
ckpt_dir
),
supported_ckpt_extensions
),
),
return
{
"required"
:
{
"ckpt_name"
:
(
filter_files_extensions
(
recursive_search
(
s
.
ckpt_dir
),
supported_ckpt_extensions
),
),
"type"
:
([
"fp16"
,
"fp32"
],),
"stop_at_clip_layer"
:
(
"INT"
,
{
"default"
:
-
1
,
"min"
:
-
24
,
"max"
:
-
1
,
"step"
:
1
}),
}}
}}
RETURN_TYPES
=
(
"MODEL"
,
"CLIP"
,
"VAE"
)
RETURN_TYPES
=
(
"MODEL"
,
"CLIP"
,
"VAE"
)
FUNCTION
=
"load_checkpoint"
FUNCTION
=
"load_checkpoint"
CATEGORY
=
"_for_testing"
CATEGORY
=
"_for_testing"
def
load_checkpoint
(
self
,
ckpt_name
,
type
,
stop_at_clip_layer
,
output_vae
=
True
,
output_clip
=
True
):
def
load_checkpoint
(
self
,
ckpt_name
,
output_vae
=
True
,
output_clip
=
True
):
ckpt_path
=
os
.
path
.
join
(
self
.
ckpt_dir
,
ckpt_name
)
ckpt_path
=
os
.
path
.
join
(
self
.
ckpt_dir
,
ckpt_name
)
out
=
comfy
.
sd
.
load_checkpoint_guess_config
(
ckpt_path
,
type
==
"fp16"
,
output_vae
=
True
,
output_clip
=
True
,
embedding_directory
=
CheckpointLoader
.
embedding_directory
)
out
=
comfy
.
sd
.
load_checkpoint_guess_config
(
ckpt_path
,
output_vae
=
True
,
output_clip
=
True
,
embedding_directory
=
CheckpointLoader
.
embedding_directory
)
if
out
[
1
]
is
not
None
:
out
[
1
].
clip_layer
(
stop_at_clip_layer
)
return
out
return
out
class
LoraLoader
:
class
LoraLoader
:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment