Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
ComfyUI
Commits
c910b4a0
Commit
c910b4a0
authored
Jul 28, 2023
by
comfyanonymous
Browse files
Remove unused code and torchdiffeq dependency.
parent
1141029a
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
0 additions
and
26 deletions
+0
-26
comfy/k_diffusion/sampling.py
comfy/k_diffusion/sampling.py
+0
-25
requirements.txt
requirements.txt
+0
-1
No files found.
comfy/k_diffusion/sampling.py
View file @
c910b4a0
...
...
@@ -3,7 +3,6 @@ import math
from
scipy
import
integrate
import
torch
from
torch
import
nn
from
torchdiffeq
import
odeint
import
torchsde
from
tqdm.auto
import
trange
,
tqdm
...
...
@@ -287,30 +286,6 @@ def sample_lms(model, x, sigmas, extra_args=None, callback=None, disable=None, o
return
x
@
torch
.
no_grad
()
def
log_likelihood
(
model
,
x
,
sigma_min
,
sigma_max
,
extra_args
=
None
,
atol
=
1e-4
,
rtol
=
1e-4
):
extra_args
=
{}
if
extra_args
is
None
else
extra_args
s_in
=
x
.
new_ones
([
x
.
shape
[
0
]])
v
=
torch
.
randint_like
(
x
,
2
)
*
2
-
1
fevals
=
0
def
ode_fn
(
sigma
,
x
):
nonlocal
fevals
with
torch
.
enable_grad
():
x
=
x
[
0
].
detach
().
requires_grad_
()
denoised
=
model
(
x
,
sigma
*
s_in
,
**
extra_args
)
d
=
to_d
(
x
,
sigma
,
denoised
)
fevals
+=
1
grad
=
torch
.
autograd
.
grad
((
d
*
v
).
sum
(),
x
)[
0
]
d_ll
=
(
v
*
grad
).
flatten
(
1
).
sum
(
1
)
return
d
.
detach
(),
d_ll
x_min
=
x
,
x
.
new_zeros
([
x
.
shape
[
0
]])
t
=
x
.
new_tensor
([
sigma_min
,
sigma_max
])
sol
=
odeint
(
ode_fn
,
x_min
,
t
,
atol
=
atol
,
rtol
=
rtol
,
method
=
'dopri5'
)
latent
,
delta_ll
=
sol
[
0
][
-
1
],
sol
[
1
][
-
1
]
ll_prior
=
torch
.
distributions
.
Normal
(
0
,
sigma_max
).
log_prob
(
latent
).
flatten
(
1
).
sum
(
1
)
return
ll_prior
+
delta_ll
,
{
'fevals'
:
fevals
}
class
PIDStepSizeController
:
"""A PID controller for ODE adaptive step size control."""
def
__init__
(
self
,
h
,
pcoeff
,
icoeff
,
dcoeff
,
order
=
1
,
accept_safety
=
0.81
,
eps
=
1e-8
):
...
...
requirements.txt
View file @
c910b4a0
torch
torchdiffeq
torchsde
einops
transformers
>=4.25.1
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment