Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
ComfyUI
Commits
77a176f9
"source/vscode:/vscode.git/clone" did not exist on "56d2b104df15b509ff1c7417a923bc25312440dd"
Commit
77a176f9
authored
Sep 02, 2023
by
comfyanonymous
Browse files
Use common function to reshape batch to.
parent
36ea8784
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
10 additions
and
5 deletions
+10
-5
comfy/sample.py
comfy/sample.py
+3
-5
comfy/utils.py
comfy/utils.py
+7
-0
No files found.
comfy/sample.py
View file @
77a176f9
import
torch
import
comfy.model_management
import
comfy.samplers
import
comfy.utils
import
math
import
numpy
as
np
...
...
@@ -28,8 +29,7 @@ def prepare_mask(noise_mask, shape, device):
noise_mask
=
torch
.
nn
.
functional
.
interpolate
(
noise_mask
.
reshape
((
-
1
,
1
,
noise_mask
.
shape
[
-
2
],
noise_mask
.
shape
[
-
1
])),
size
=
(
shape
[
2
],
shape
[
3
]),
mode
=
"bilinear"
)
noise_mask
=
noise_mask
.
round
()
noise_mask
=
torch
.
cat
([
noise_mask
]
*
shape
[
1
],
dim
=
1
)
if
noise_mask
.
shape
[
0
]
<
shape
[
0
]:
noise_mask
=
noise_mask
.
repeat
(
math
.
ceil
(
shape
[
0
]
/
noise_mask
.
shape
[
0
]),
1
,
1
,
1
)[:
shape
[
0
]]
noise_mask
=
comfy
.
utils
.
repeat_to_batch_size
(
noise_mask
,
shape
[
0
])
noise_mask
=
noise_mask
.
to
(
device
)
return
noise_mask
...
...
@@ -37,9 +37,7 @@ def broadcast_cond(cond, batch, device):
"""broadcasts conditioning to the batch size"""
copy
=
[]
for
p
in
cond
:
t
=
p
[
0
]
if
t
.
shape
[
0
]
<
batch
:
t
=
torch
.
cat
([
t
]
*
batch
)
t
=
comfy
.
utils
.
repeat_to_batch_size
(
p
[
0
],
batch
)
t
=
t
.
to
(
device
)
copy
+=
[[
t
]
+
p
[
1
:]]
return
copy
...
...
comfy/utils.py
View file @
77a176f9
...
...
@@ -223,6 +223,13 @@ def unet_to_diffusers(unet_config):
return
diffusers_unet_map
def
repeat_to_batch_size
(
tensor
,
batch_size
):
if
tensor
.
shape
[
0
]
>
batch_size
:
return
tensor
[:
batch_size
]
elif
tensor
.
shape
[
0
]
<
batch_size
:
return
tensor
.
repeat
([
math
.
ceil
(
batch_size
/
tensor
.
shape
[
0
])]
+
[
1
]
*
(
len
(
tensor
.
shape
)
-
1
))[:
batch_size
]
return
tensor
def
convert_sd_to
(
state_dict
,
dtype
):
keys
=
list
(
state_dict
.
keys
())
for
k
in
keys
:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment