Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
ComfyUI
Commits
bbdcf0b7
Commit
bbdcf0b7
authored
Feb 08, 2023
by
comfyanonymous
Browse files
Use relative imports for k_diffusion.
parent
3e22815a
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
8 additions
and
8 deletions
+8
-8
comfy/samplers.py
comfy/samplers.py
+8
-8
No files found.
comfy/samplers.py
View file @
bbdcf0b7
import
k_diffusion
.
sampling
from
.k_diffusion
import
sampling
as
k_diffusion
_
sampling
import
k_diffusion
.
external
from
.k_diffusion
import
external
as
k_diffusion
_
external
import
torch
import
torch
import
contextlib
import
contextlib
import
model_management
import
model_management
...
@@ -185,9 +185,9 @@ class KSampler:
...
@@ -185,9 +185,9 @@ class KSampler:
def
__init__
(
self
,
model
,
steps
,
device
,
sampler
=
None
,
scheduler
=
None
,
denoise
=
None
):
def
__init__
(
self
,
model
,
steps
,
device
,
sampler
=
None
,
scheduler
=
None
,
denoise
=
None
):
self
.
model
=
model
self
.
model
=
model
if
self
.
model
.
parameterization
==
"v"
:
if
self
.
model
.
parameterization
==
"v"
:
self
.
model_wrap
=
k_diffusion
.
external
.
CompVisVDenoiser
(
self
.
model
,
quantize
=
True
)
self
.
model_wrap
=
k_diffusion
_
external
.
CompVisVDenoiser
(
self
.
model
,
quantize
=
True
)
else
:
else
:
self
.
model_wrap
=
k_diffusion
.
external
.
CompVisDenoiser
(
self
.
model
,
quantize
=
True
)
self
.
model_wrap
=
k_diffusion
_
external
.
CompVisDenoiser
(
self
.
model
,
quantize
=
True
)
self
.
model_k
=
CFGDenoiserComplex
(
self
.
model_wrap
)
self
.
model_k
=
CFGDenoiserComplex
(
self
.
model_wrap
)
self
.
device
=
device
self
.
device
=
device
if
scheduler
not
in
self
.
SCHEDULERS
:
if
scheduler
not
in
self
.
SCHEDULERS
:
...
@@ -209,7 +209,7 @@ class KSampler:
...
@@ -209,7 +209,7 @@ class KSampler:
discard_penultimate_sigma
=
True
discard_penultimate_sigma
=
True
if
self
.
scheduler
==
"karras"
:
if
self
.
scheduler
==
"karras"
:
sigmas
=
k_diffusion
.
sampling
.
get_sigmas_karras
(
n
=
steps
,
sigma_min
=
self
.
sigma_min
,
sigma_max
=
self
.
sigma_max
,
device
=
self
.
device
)
sigmas
=
k_diffusion
_
sampling
.
get_sigmas_karras
(
n
=
steps
,
sigma_min
=
self
.
sigma_min
,
sigma_max
=
self
.
sigma_max
,
device
=
self
.
device
)
elif
self
.
scheduler
==
"normal"
:
elif
self
.
scheduler
==
"normal"
:
sigmas
=
self
.
model_wrap
.
get_sigmas
(
steps
).
to
(
self
.
device
)
sigmas
=
self
.
model_wrap
.
get_sigmas
(
steps
).
to
(
self
.
device
)
elif
self
.
scheduler
==
"simple"
:
elif
self
.
scheduler
==
"simple"
:
...
@@ -269,9 +269,9 @@ class KSampler:
...
@@ -269,9 +269,9 @@ class KSampler:
with
precision_scope
(
self
.
device
):
with
precision_scope
(
self
.
device
):
if
self
.
sampler
==
"sample_dpm_fast"
:
if
self
.
sampler
==
"sample_dpm_fast"
:
samples
=
k_diffusion
.
sampling
.
sample_dpm_fast
(
self
.
model_k
,
noise
,
sigma_min
,
sigmas
[
0
],
self
.
steps
,
extra_args
=
{
"cond"
:
positive
,
"uncond"
:
negative
,
"cond_scale"
:
cfg
})
samples
=
k_diffusion
_
sampling
.
sample_dpm_fast
(
self
.
model_k
,
noise
,
sigma_min
,
sigmas
[
0
],
self
.
steps
,
extra_args
=
{
"cond"
:
positive
,
"uncond"
:
negative
,
"cond_scale"
:
cfg
})
elif
self
.
sampler
==
"sample_dpm_adaptive"
:
elif
self
.
sampler
==
"sample_dpm_adaptive"
:
samples
=
k_diffusion
.
sampling
.
sample_dpm_adaptive
(
self
.
model_k
,
noise
,
sigma_min
,
sigmas
[
0
],
extra_args
=
{
"cond"
:
positive
,
"uncond"
:
negative
,
"cond_scale"
:
cfg
})
samples
=
k_diffusion
_
sampling
.
sample_dpm_adaptive
(
self
.
model_k
,
noise
,
sigma_min
,
sigmas
[
0
],
extra_args
=
{
"cond"
:
positive
,
"uncond"
:
negative
,
"cond_scale"
:
cfg
})
else
:
else
:
samples
=
getattr
(
k_diffusion
.
sampling
,
self
.
sampler
)(
self
.
model_k
,
noise
,
sigmas
,
extra_args
=
{
"cond"
:
positive
,
"uncond"
:
negative
,
"cond_scale"
:
cfg
})
samples
=
getattr
(
k_diffusion
_
sampling
,
self
.
sampler
)(
self
.
model_k
,
noise
,
sigmas
,
extra_args
=
{
"cond"
:
positive
,
"uncond"
:
negative
,
"cond_scale"
:
cfg
})
return
samples
.
to
(
torch
.
float32
)
return
samples
.
to
(
torch
.
float32
)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment