"ppocr/git@developer.sourcefind.cn:wangsen/paddle_dbnet.git" did not exist on "f7b5edcc723f3fbd4e2d0584cc770d436237437c"
Commit ffe0bb0a authored by comfyanonymous's avatar comfyanonymous
Browse files

Remove useless code.

parent 391c1046
...@@ -28,36 +28,6 @@ import comfy.t2i_adapter.adapter ...@@ -28,36 +28,6 @@ import comfy.t2i_adapter.adapter
import comfy.supported_models_base import comfy.supported_models_base
import comfy.taesd.taesd import comfy.taesd.taesd
def load_model_weights(model, sd):
m, u = model.load_state_dict(sd, strict=False)
m = set(m)
unexpected_keys = set(u)
k = list(sd.keys())
for x in k:
if x not in unexpected_keys:
w = sd.pop(x)
del w
if len(m) > 0:
logging.warning("missing {}".format(m))
return model
def load_clip_weights(model, sd):
k = list(sd.keys())
for x in k:
if x.startswith("cond_stage_model.transformer.") and not x.startswith("cond_stage_model.transformer.text_model."):
y = x.replace("cond_stage_model.transformer.", "cond_stage_model.transformer.text_model.")
sd[y] = sd.pop(x)
if 'cond_stage_model.transformer.text_model.embeddings.position_ids' in sd:
ids = sd['cond_stage_model.transformer.text_model.embeddings.position_ids']
if ids.dtype == torch.float32:
sd['cond_stage_model.transformer.text_model.embeddings.position_ids'] = ids.round()
sd = comfy.utils.clip_text_transformers_convert(sd, "cond_stage_model.model.", "cond_stage_model.transformer.")
return load_model_weights(model, sd)
def load_lora_for_models(model, clip, lora, strength_model, strength_clip): def load_lora_for_models(model, clip, lora, strength_model, strength_clip):
key_map = {} key_map = {}
if model is not None: if model is not None:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment