Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
ComfyUI
Commits
87755384
"deploy/vscode:/vscode.git/clone" did not exist on "c8aa6d0372d835111f722fd567f4628cb34494bc"
Commit
87755384
authored
Oct 10, 2023
by
comfyanonymous
Browse files
Add a CLIPSave node to save CLIP model weights.
parent
d1a0abd4
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
57 additions
and
0 deletions
+57
-0
comfy_extras/nodes_model_merging.py
comfy_extras/nodes_model_merging.py
+57
-0
No files found.
comfy_extras/nodes_model_merging.py
View file @
87755384
...
...
@@ -179,6 +179,62 @@ class CheckpointSave:
comfy
.
sd
.
save_checkpoint
(
output_checkpoint
,
model
,
clip
,
vae
,
metadata
=
metadata
)
return
{}
class
CLIPSave
:
def
__init__
(
self
):
self
.
output_dir
=
folder_paths
.
get_output_directory
()
@
classmethod
def
INPUT_TYPES
(
s
):
return
{
"required"
:
{
"clip"
:
(
"CLIP"
,),
"filename_prefix"
:
(
"STRING"
,
{
"default"
:
"clip/ComfyUI"
}),},
"hidden"
:
{
"prompt"
:
"PROMPT"
,
"extra_pnginfo"
:
"EXTRA_PNGINFO"
},}
RETURN_TYPES
=
()
FUNCTION
=
"save"
OUTPUT_NODE
=
True
CATEGORY
=
"advanced/model_merging"
def
save
(
self
,
clip
,
filename_prefix
,
prompt
=
None
,
extra_pnginfo
=
None
):
prompt_info
=
""
if
prompt
is
not
None
:
prompt_info
=
json
.
dumps
(
prompt
)
metadata
=
{}
if
not
args
.
disable_metadata
:
metadata
[
"prompt"
]
=
prompt_info
if
extra_pnginfo
is
not
None
:
for
x
in
extra_pnginfo
:
metadata
[
x
]
=
json
.
dumps
(
extra_pnginfo
[
x
])
comfy
.
model_management
.
load_models_gpu
([
clip
.
load_model
()])
clip_sd
=
clip
.
get_sd
()
for
prefix
in
[
"clip_l."
,
"clip_g."
,
""
]:
k
=
list
(
filter
(
lambda
a
:
a
.
startswith
(
prefix
),
clip_sd
.
keys
()))
current_clip_sd
=
{}
for
x
in
k
:
current_clip_sd
[
x
]
=
clip_sd
.
pop
(
x
)
if
len
(
current_clip_sd
)
==
0
:
continue
p
=
prefix
[:
-
1
]
replace_prefix
=
{}
filename_prefix_
=
filename_prefix
if
len
(
p
)
>
0
:
filename_prefix_
=
"{}_{}"
.
format
(
filename_prefix_
,
p
)
replace_prefix
[
prefix
]
=
""
replace_prefix
[
"transformer."
]
=
""
full_output_folder
,
filename
,
counter
,
subfolder
,
filename_prefix_
=
folder_paths
.
get_save_image_path
(
filename_prefix_
,
self
.
output_dir
)
output_checkpoint
=
f
"
{
filename
}
_
{
counter
:
05
}
_.safetensors"
output_checkpoint
=
os
.
path
.
join
(
full_output_folder
,
output_checkpoint
)
current_clip_sd
=
comfy
.
utils
.
state_dict_prefix_replace
(
current_clip_sd
,
replace_prefix
)
comfy
.
utils
.
save_torch_file
(
current_clip_sd
,
output_checkpoint
,
metadata
=
metadata
)
return
{}
class
VAESave
:
def
__init__
(
self
):
self
.
output_dir
=
folder_paths
.
get_output_directory
()
...
...
@@ -220,5 +276,6 @@ NODE_CLASS_MAPPINGS = {
"ModelMergeAdd"
:
ModelAdd
,
"CheckpointSave"
:
CheckpointSave
,
"CLIPMergeSimple"
:
CLIPMergeSimple
,
"CLIPSave"
:
CLIPSave
,
"VAESave"
:
VAESave
,
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment