Commit 334aab05 authored by comfyanonymous's avatar comfyanonymous
Browse files

Don't stop workflow if loading embedding fails.

parent 61e7767c
......@@ -2,6 +2,7 @@ import os
from transformers import CLIPTokenizer, CLIPTextModel, CLIPTextConfig
import torch
import traceback
class ClipTokenWeightEncoder:
def encode_token_weights(self, token_weight_pairs):
......@@ -194,6 +195,7 @@ def load_embed(embedding_name, embedding_directory):
embed_path = valid_file
try:
if embed_path.lower().endswith(".safetensors"):
import safetensors.torch
embed = safetensors.torch.load_file(embed_path, device="cpu")
......@@ -202,6 +204,12 @@ def load_embed(embedding_name, embedding_directory):
embed = torch.load(embed_path, weights_only=True, map_location="cpu")
else:
embed = torch.load(embed_path, map_location="cpu")
except Exception as e:
print(traceback.format_exc())
print()
print("error loading embedding, skipping loading:", embedding_name)
return None
if 'string_to_param' in embed:
values = embed['string_to_param'].values()
else:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment