Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
ComfyUI
Commits
1a0486bb
"git@developer.sourcefind.cn:change/sglang.git" did not exist on "da7fac1b7571f65d846741338c52741977236542"
Commit
1a0486bb
authored
Apr 04, 2024
by
comfyanonymous
Browse files
Fix model needing to be loaded on GPU to generate the sigmas.
parent
1f8d8e6c
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
21 additions
and
16 deletions
+21
-16
comfy/model_patcher.py
comfy/model_patcher.py
+6
-0
comfy/samplers.py
comfy/samplers.py
+14
-14
comfy_extras/nodes_custom_sampler.py
comfy_extras/nodes_custom_sampler.py
+1
-2
No files found.
comfy/model_patcher.py
View file @
1a0486bb
...
@@ -150,6 +150,12 @@ class ModelPatcher:
...
@@ -150,6 +150,12 @@ class ModelPatcher:
def
add_object_patch
(
self
,
name
,
obj
):
def
add_object_patch
(
self
,
name
,
obj
):
self
.
object_patches
[
name
]
=
obj
self
.
object_patches
[
name
]
=
obj
def
get_model_object
(
self
,
name
):
if
name
in
self
.
object_patches
:
return
self
.
object_patches
[
name
]
else
:
return
comfy
.
utils
.
get_attr
(
self
.
model
,
name
)
def
model_patches_to
(
self
,
device
):
def
model_patches_to
(
self
,
device
):
to
=
self
.
model_options
[
"transformer_options"
]
to
=
self
.
model_options
[
"transformer_options"
]
if
"patches"
in
to
:
if
"patches"
in
to
:
...
...
comfy/samplers.py
View file @
1a0486bb
...
@@ -274,8 +274,8 @@ class KSamplerX0Inpaint:
...
@@ -274,8 +274,8 @@ class KSamplerX0Inpaint:
out
=
out
*
denoise_mask
+
self
.
latent_image
*
latent_mask
out
=
out
*
denoise_mask
+
self
.
latent_image
*
latent_mask
return
out
return
out
def
simple_scheduler
(
model
,
steps
):
def
simple_scheduler
(
model
_sampling
,
steps
):
s
=
model
.
model_sampling
s
=
model_sampling
sigs
=
[]
sigs
=
[]
ss
=
len
(
s
.
sigmas
)
/
steps
ss
=
len
(
s
.
sigmas
)
/
steps
for
x
in
range
(
steps
):
for
x
in
range
(
steps
):
...
@@ -283,8 +283,8 @@ def simple_scheduler(model, steps):
...
@@ -283,8 +283,8 @@ def simple_scheduler(model, steps):
sigs
+=
[
0.0
]
sigs
+=
[
0.0
]
return
torch
.
FloatTensor
(
sigs
)
return
torch
.
FloatTensor
(
sigs
)
def
ddim_scheduler
(
model
,
steps
):
def
ddim_scheduler
(
model
_sampling
,
steps
):
s
=
model
.
model_sampling
s
=
model_sampling
sigs
=
[]
sigs
=
[]
ss
=
max
(
len
(
s
.
sigmas
)
//
steps
,
1
)
ss
=
max
(
len
(
s
.
sigmas
)
//
steps
,
1
)
x
=
1
x
=
1
...
@@ -295,8 +295,8 @@ def ddim_scheduler(model, steps):
...
@@ -295,8 +295,8 @@ def ddim_scheduler(model, steps):
sigs
+=
[
0.0
]
sigs
+=
[
0.0
]
return
torch
.
FloatTensor
(
sigs
)
return
torch
.
FloatTensor
(
sigs
)
def
normal_scheduler
(
model
,
steps
,
sgm
=
False
,
floor
=
False
):
def
normal_scheduler
(
model
_sampling
,
steps
,
sgm
=
False
,
floor
=
False
):
s
=
model
.
model_sampling
s
=
model_sampling
start
=
s
.
timestep
(
s
.
sigma_max
)
start
=
s
.
timestep
(
s
.
sigma_max
)
end
=
s
.
timestep
(
s
.
sigma_min
)
end
=
s
.
timestep
(
s
.
sigma_min
)
...
@@ -660,19 +660,19 @@ def sample(model, noise, positive, negative, cfg, device, sampler, sigmas, model
...
@@ -660,19 +660,19 @@ def sample(model, noise, positive, negative, cfg, device, sampler, sigmas, model
SCHEDULER_NAMES
=
[
"normal"
,
"karras"
,
"exponential"
,
"sgm_uniform"
,
"simple"
,
"ddim_uniform"
]
SCHEDULER_NAMES
=
[
"normal"
,
"karras"
,
"exponential"
,
"sgm_uniform"
,
"simple"
,
"ddim_uniform"
]
SAMPLER_NAMES
=
KSAMPLER_NAMES
+
[
"ddim"
,
"uni_pc"
,
"uni_pc_bh2"
]
SAMPLER_NAMES
=
KSAMPLER_NAMES
+
[
"ddim"
,
"uni_pc"
,
"uni_pc_bh2"
]
def
calculate_sigmas
_scheduler
(
model
,
scheduler_name
,
steps
):
def
calculate_sigmas
(
model_sampling
,
scheduler_name
,
steps
):
if
scheduler_name
==
"karras"
:
if
scheduler_name
==
"karras"
:
sigmas
=
k_diffusion_sampling
.
get_sigmas_karras
(
n
=
steps
,
sigma_min
=
float
(
model
.
model
_sampling
.
sigma_min
),
sigma_max
=
float
(
model
.
model
_sampling
.
sigma_max
))
sigmas
=
k_diffusion_sampling
.
get_sigmas_karras
(
n
=
steps
,
sigma_min
=
float
(
model_sampling
.
sigma_min
),
sigma_max
=
float
(
model_sampling
.
sigma_max
))
elif
scheduler_name
==
"exponential"
:
elif
scheduler_name
==
"exponential"
:
sigmas
=
k_diffusion_sampling
.
get_sigmas_exponential
(
n
=
steps
,
sigma_min
=
float
(
model
.
model
_sampling
.
sigma_min
),
sigma_max
=
float
(
model
.
model
_sampling
.
sigma_max
))
sigmas
=
k_diffusion_sampling
.
get_sigmas_exponential
(
n
=
steps
,
sigma_min
=
float
(
model_sampling
.
sigma_min
),
sigma_max
=
float
(
model_sampling
.
sigma_max
))
elif
scheduler_name
==
"normal"
:
elif
scheduler_name
==
"normal"
:
sigmas
=
normal_scheduler
(
model
,
steps
)
sigmas
=
normal_scheduler
(
model
_sampling
,
steps
)
elif
scheduler_name
==
"simple"
:
elif
scheduler_name
==
"simple"
:
sigmas
=
simple_scheduler
(
model
,
steps
)
sigmas
=
simple_scheduler
(
model
_sampling
,
steps
)
elif
scheduler_name
==
"ddim_uniform"
:
elif
scheduler_name
==
"ddim_uniform"
:
sigmas
=
ddim_scheduler
(
model
,
steps
)
sigmas
=
ddim_scheduler
(
model
_sampling
,
steps
)
elif
scheduler_name
==
"sgm_uniform"
:
elif
scheduler_name
==
"sgm_uniform"
:
sigmas
=
normal_scheduler
(
model
,
steps
,
sgm
=
True
)
sigmas
=
normal_scheduler
(
model
_sampling
,
steps
,
sgm
=
True
)
else
:
else
:
logging
.
error
(
"error invalid scheduler {}"
.
format
(
scheduler_name
))
logging
.
error
(
"error invalid scheduler {}"
.
format
(
scheduler_name
))
return
sigmas
return
sigmas
...
@@ -714,7 +714,7 @@ class KSampler:
...
@@ -714,7 +714,7 @@ class KSampler:
steps
+=
1
steps
+=
1
discard_penultimate_sigma
=
True
discard_penultimate_sigma
=
True
sigmas
=
calculate_sigmas
_scheduler
(
self
.
model
.
model
,
self
.
scheduler
,
steps
)
sigmas
=
calculate_sigmas
(
self
.
model
.
get_
model
_object
(
"model_sampling"
)
,
self
.
scheduler
,
steps
)
if
discard_penultimate_sigma
:
if
discard_penultimate_sigma
:
sigmas
=
torch
.
cat
([
sigmas
[:
-
2
],
sigmas
[
-
1
:]])
sigmas
=
torch
.
cat
([
sigmas
[:
-
2
],
sigmas
[
-
1
:]])
...
...
comfy_extras/nodes_custom_sampler.py
View file @
1a0486bb
...
@@ -28,8 +28,7 @@ class BasicScheduler:
...
@@ -28,8 +28,7 @@ class BasicScheduler:
return
(
torch
.
FloatTensor
([]),)
return
(
torch
.
FloatTensor
([]),)
total_steps
=
int
(
steps
/
denoise
)
total_steps
=
int
(
steps
/
denoise
)
comfy
.
model_management
.
load_models_gpu
([
model
])
sigmas
=
comfy
.
samplers
.
calculate_sigmas
(
model
.
get_model_object
(
"model_sampling"
),
scheduler
,
total_steps
).
cpu
()
sigmas
=
comfy
.
samplers
.
calculate_sigmas_scheduler
(
model
.
model
,
scheduler
,
total_steps
).
cpu
()
sigmas
=
sigmas
[
-
(
steps
+
1
):]
sigmas
=
sigmas
[
-
(
steps
+
1
):]
return
(
sigmas
,
)
return
(
sigmas
,
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment