Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
ComfyUI
Commits
20d3852a
Commit
20d3852a
authored
Oct 11, 2023
by
comfyanonymous
Browse files
Pull some small changes from the other repo.
parent
ac7d8cfa
Changes
6
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
22 additions
and
13 deletions
+22
-13
comfy/model_management.py
comfy/model_management.py
+3
-2
comfy/utils.py
comfy/utils.py
+4
-0
comfy_extras/nodes_custom_sampler.py
comfy_extras/nodes_custom_sampler.py
+2
-1
execution.py
execution.py
+10
-9
folder_paths.py
folder_paths.py
+2
-0
nodes.py
nodes.py
+1
-1
No files found.
comfy/model_management.py
View file @
20d3852a
...
@@ -354,6 +354,8 @@ def load_models_gpu(models, memory_required=0):
...
@@ -354,6 +354,8 @@ def load_models_gpu(models, memory_required=0):
current_loaded_models
.
insert
(
0
,
current_loaded_models
.
pop
(
index
))
current_loaded_models
.
insert
(
0
,
current_loaded_models
.
pop
(
index
))
models_already_loaded
.
append
(
loaded_model
)
models_already_loaded
.
append
(
loaded_model
)
else
:
else
:
if
hasattr
(
x
,
"model"
):
print
(
f
"Requested to load
{
x
.
model
.
__class__
.
__name__
}
"
)
models_to_load
.
append
(
loaded_model
)
models_to_load
.
append
(
loaded_model
)
if
len
(
models_to_load
)
==
0
:
if
len
(
models_to_load
)
==
0
:
...
@@ -363,7 +365,7 @@ def load_models_gpu(models, memory_required=0):
...
@@ -363,7 +365,7 @@ def load_models_gpu(models, memory_required=0):
free_memory
(
extra_mem
,
d
,
models_already_loaded
)
free_memory
(
extra_mem
,
d
,
models_already_loaded
)
return
return
print
(
"l
oading
new
"
)
print
(
f
"L
oading
{
len
(
models_to_load
)
}
new model
{
's'
if
len
(
models_to_load
)
>
1
else
''
}
"
)
total_memory_required
=
{}
total_memory_required
=
{}
for
loaded_model
in
models_to_load
:
for
loaded_model
in
models_to_load
:
...
@@ -405,7 +407,6 @@ def load_model_gpu(model):
...
@@ -405,7 +407,6 @@ def load_model_gpu(model):
def
cleanup_models
():
def
cleanup_models
():
to_delete
=
[]
to_delete
=
[]
for
i
in
range
(
len
(
current_loaded_models
)):
for
i
in
range
(
len
(
current_loaded_models
)):
print
(
sys
.
getrefcount
(
current_loaded_models
[
i
].
model
))
if
sys
.
getrefcount
(
current_loaded_models
[
i
].
model
)
<=
2
:
if
sys
.
getrefcount
(
current_loaded_models
[
i
].
model
)
<=
2
:
to_delete
=
[
i
]
+
to_delete
to_delete
=
[
i
]
+
to_delete
...
...
comfy/utils.py
View file @
20d3852a
...
@@ -408,6 +408,10 @@ def tiled_scale(samples, function, tile_x=64, tile_y=64, overlap = 8, upscale_am
...
@@ -408,6 +408,10 @@ def tiled_scale(samples, function, tile_x=64, tile_y=64, overlap = 8, upscale_am
output
[
b
:
b
+
1
]
=
out
/
out_div
output
[
b
:
b
+
1
]
=
out
/
out_div
return
output
return
output
PROGRESS_BAR_ENABLED
=
True
def
set_progress_bar_enabled
(
enabled
):
global
PROGRESS_BAR_ENABLED
PROGRESS_BAR_ENABLED
=
enabled
PROGRESS_BAR_HOOK
=
None
PROGRESS_BAR_HOOK
=
None
def
set_progress_bar_global_hook
(
function
):
def
set_progress_bar_global_hook
(
function
):
...
...
comfy_extras/nodes_custom_sampler.py
View file @
20d3852a
...
@@ -3,6 +3,7 @@ import comfy.sample
...
@@ -3,6 +3,7 @@ import comfy.sample
from
comfy.k_diffusion
import
sampling
as
k_diffusion_sampling
from
comfy.k_diffusion
import
sampling
as
k_diffusion_sampling
import
latent_preview
import
latent_preview
import
torch
import
torch
import
comfy.utils
class
BasicScheduler
:
class
BasicScheduler
:
...
@@ -219,7 +220,7 @@ class SamplerCustom:
...
@@ -219,7 +220,7 @@ class SamplerCustom:
x0_output
=
{}
x0_output
=
{}
callback
=
latent_preview
.
prepare_callback
(
model
,
sigmas
.
shape
[
-
1
]
-
1
,
x0_output
)
callback
=
latent_preview
.
prepare_callback
(
model
,
sigmas
.
shape
[
-
1
]
-
1
,
x0_output
)
disable_pbar
=
False
disable_pbar
=
not
comfy
.
utils
.
PROGRESS_BAR_ENABLED
samples
=
comfy
.
sample
.
sample_custom
(
model
,
noise
,
cfg
,
sampler
,
sigmas
,
positive
,
negative
,
latent_image
,
noise_mask
=
noise_mask
,
callback
=
callback
,
disable_pbar
=
disable_pbar
,
seed
=
noise_seed
)
samples
=
comfy
.
sample
.
sample_custom
(
model
,
noise
,
cfg
,
sampler
,
sigmas
,
positive
,
negative
,
latent_image
,
noise_mask
=
noise_mask
,
callback
=
callback
,
disable_pbar
=
disable_pbar
,
seed
=
noise_seed
)
out
=
latent
.
copy
()
out
=
latent
.
copy
()
...
...
execution.py
View file @
20d3852a
...
@@ -2,6 +2,7 @@ import os
...
@@ -2,6 +2,7 @@ import os
import
sys
import
sys
import
copy
import
copy
import
json
import
json
import
logging
import
threading
import
threading
import
heapq
import
heapq
import
traceback
import
traceback
...
@@ -156,7 +157,7 @@ def recursive_execute(server, prompt, outputs, current_item, extra_data, execute
...
@@ -156,7 +157,7 @@ def recursive_execute(server, prompt, outputs, current_item, extra_data, execute
if
server
.
client_id
is
not
None
:
if
server
.
client_id
is
not
None
:
server
.
send_sync
(
"executed"
,
{
"node"
:
unique_id
,
"output"
:
output_ui
,
"prompt_id"
:
prompt_id
},
server
.
client_id
)
server
.
send_sync
(
"executed"
,
{
"node"
:
unique_id
,
"output"
:
output_ui
,
"prompt_id"
:
prompt_id
},
server
.
client_id
)
except
comfy
.
model_management
.
InterruptProcessingException
as
iex
:
except
comfy
.
model_management
.
InterruptProcessingException
as
iex
:
print
(
"Processing interrupted"
)
logging
.
info
(
"Processing interrupted"
)
# skip formatting inputs/outputs
# skip formatting inputs/outputs
error_details
=
{
error_details
=
{
...
@@ -177,8 +178,8 @@ def recursive_execute(server, prompt, outputs, current_item, extra_data, execute
...
@@ -177,8 +178,8 @@ def recursive_execute(server, prompt, outputs, current_item, extra_data, execute
for
node_id
,
node_outputs
in
outputs
.
items
():
for
node_id
,
node_outputs
in
outputs
.
items
():
output_data_formatted
[
node_id
]
=
[[
format_value
(
x
)
for
x
in
l
]
for
l
in
node_outputs
]
output_data_formatted
[
node_id
]
=
[[
format_value
(
x
)
for
x
in
l
]
for
l
in
node_outputs
]
print
(
"!!! Exception during processing !!!"
)
logging
.
error
(
"!!! Exception during processing !!!"
)
print
(
traceback
.
format_exc
())
logging
.
error
(
traceback
.
format_exc
())
error_details
=
{
error_details
=
{
"node_id"
:
unique_id
,
"node_id"
:
unique_id
,
...
@@ -636,11 +637,11 @@ def validate_prompt(prompt):
...
@@ -636,11 +637,11 @@ def validate_prompt(prompt):
if
valid
is
True
:
if
valid
is
True
:
good_outputs
.
add
(
o
)
good_outputs
.
add
(
o
)
else
:
else
:
print
(
f
"Failed to validate prompt for output
{
o
}
:"
)
logging
.
error
(
f
"Failed to validate prompt for output
{
o
}
:"
)
if
len
(
reasons
)
>
0
:
if
len
(
reasons
)
>
0
:
print
(
"* (prompt):"
)
logging
.
error
(
"* (prompt):"
)
for
reason
in
reasons
:
for
reason
in
reasons
:
print
(
f
" -
{
reason
[
'message'
]
}
:
{
reason
[
'details'
]
}
"
)
logging
.
error
(
f
" -
{
reason
[
'message'
]
}
:
{
reason
[
'details'
]
}
"
)
errors
+=
[(
o
,
reasons
)]
errors
+=
[(
o
,
reasons
)]
for
node_id
,
result
in
validated
.
items
():
for
node_id
,
result
in
validated
.
items
():
valid
=
result
[
0
]
valid
=
result
[
0
]
...
@@ -656,11 +657,11 @@ def validate_prompt(prompt):
...
@@ -656,11 +657,11 @@ def validate_prompt(prompt):
"dependent_outputs"
:
[],
"dependent_outputs"
:
[],
"class_type"
:
class_type
"class_type"
:
class_type
}
}
print
(
f
"*
{
class_type
}
{
node_id
}
:"
)
logging
.
error
(
f
"*
{
class_type
}
{
node_id
}
:"
)
for
reason
in
reasons
:
for
reason
in
reasons
:
print
(
f
" -
{
reason
[
'message'
]
}
:
{
reason
[
'details'
]
}
"
)
logging
.
error
(
f
" -
{
reason
[
'message'
]
}
:
{
reason
[
'details'
]
}
"
)
node_errors
[
node_id
][
"dependent_outputs"
].
append
(
o
)
node_errors
[
node_id
][
"dependent_outputs"
].
append
(
o
)
print
(
"Output will be ignored"
)
logging
.
error
(
"Output will be ignored"
)
if
len
(
good_outputs
)
==
0
:
if
len
(
good_outputs
)
==
0
:
errors_list
=
[]
errors_list
=
[]
...
...
folder_paths.py
View file @
20d3852a
...
@@ -29,6 +29,8 @@ folder_names_and_paths["custom_nodes"] = ([os.path.join(base_path, "custom_nodes
...
@@ -29,6 +29,8 @@ folder_names_and_paths["custom_nodes"] = ([os.path.join(base_path, "custom_nodes
folder_names_and_paths
[
"hypernetworks"
]
=
([
os
.
path
.
join
(
models_dir
,
"hypernetworks"
)],
supported_pt_extensions
)
folder_names_and_paths
[
"hypernetworks"
]
=
([
os
.
path
.
join
(
models_dir
,
"hypernetworks"
)],
supported_pt_extensions
)
folder_names_and_paths
[
"classifiers"
]
=
([
os
.
path
.
join
(
models_dir
,
"classifiers"
)],
{
""
})
output_directory
=
os
.
path
.
join
(
os
.
path
.
dirname
(
os
.
path
.
realpath
(
__file__
)),
"output"
)
output_directory
=
os
.
path
.
join
(
os
.
path
.
dirname
(
os
.
path
.
realpath
(
__file__
)),
"output"
)
temp_directory
=
os
.
path
.
join
(
os
.
path
.
dirname
(
os
.
path
.
realpath
(
__file__
)),
"temp"
)
temp_directory
=
os
.
path
.
join
(
os
.
path
.
dirname
(
os
.
path
.
realpath
(
__file__
)),
"temp"
)
input_directory
=
os
.
path
.
join
(
os
.
path
.
dirname
(
os
.
path
.
realpath
(
__file__
)),
"input"
)
input_directory
=
os
.
path
.
join
(
os
.
path
.
dirname
(
os
.
path
.
realpath
(
__file__
)),
"input"
)
...
...
nodes.py
View file @
20d3852a
...
@@ -1202,7 +1202,7 @@ def common_ksampler(model, seed, steps, cfg, sampler_name, scheduler, positive,
...
@@ -1202,7 +1202,7 @@ def common_ksampler(model, seed, steps, cfg, sampler_name, scheduler, positive,
noise_mask
=
latent
[
"noise_mask"
]
noise_mask
=
latent
[
"noise_mask"
]
callback
=
latent_preview
.
prepare_callback
(
model
,
steps
)
callback
=
latent_preview
.
prepare_callback
(
model
,
steps
)
disable_pbar
=
False
disable_pbar
=
not
comfy
.
utils
.
PROGRESS_BAR_ENABLED
samples
=
comfy
.
sample
.
sample
(
model
,
noise
,
steps
,
cfg
,
sampler_name
,
scheduler
,
positive
,
negative
,
latent_image
,
samples
=
comfy
.
sample
.
sample
(
model
,
noise
,
steps
,
cfg
,
sampler_name
,
scheduler
,
positive
,
negative
,
latent_image
,
denoise
=
denoise
,
disable_noise
=
disable_noise
,
start_step
=
start_step
,
last_step
=
last_step
,
denoise
=
denoise
,
disable_noise
=
disable_noise
,
start_step
=
start_step
,
last_step
=
last_step
,
force_full_denoise
=
force_full_denoise
,
noise_mask
=
noise_mask
,
callback
=
callback
,
disable_pbar
=
disable_pbar
,
seed
=
seed
)
force_full_denoise
=
force_full_denoise
,
noise_mask
=
noise_mask
,
callback
=
callback
,
disable_pbar
=
disable_pbar
,
seed
=
seed
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment