execution.py 13.5 KB
Newer Older
1
2
3
4
5
6
7
import os
import sys
import copy
import json
import threading
import heapq
import traceback
8
import gc
9
10
11
12

import torch
import nodes

13
import comfy.model_management
14

15
def get_input_data(inputs, class_def, unique_id, outputs={}, prompt={}, extra_data={}):
16
17
18
19
20
21
22
    valid_inputs = class_def.INPUT_TYPES()
    input_data_all = {}
    for x in inputs:
        input_data = inputs[x]
        if isinstance(input_data, list):
            input_unique_id = input_data[0]
            output_index = input_data[1]
23
24
            if input_unique_id not in outputs:
                return None
25
26
27
28
29
30
31
32
33
34
35
36
37
38
            obj = outputs[input_unique_id][output_index]
            input_data_all[x] = obj
        else:
            if ("required" in valid_inputs and x in valid_inputs["required"]) or ("optional" in valid_inputs and x in valid_inputs["optional"]):
                input_data_all[x] = input_data

    if "hidden" in valid_inputs:
        h = valid_inputs["hidden"]
        for x in h:
            if h[x] == "PROMPT":
                input_data_all[x] = prompt
            if h[x] == "EXTRA_PNGINFO":
                if "extra_pnginfo" in extra_data:
                    input_data_all[x] = extra_data['extra_pnginfo']
39
40
            if h[x] == "UNIQUE_ID":
                input_data_all[x] = unique_id
41
42
    return input_data_all

43
def recursive_execute(server, prompt, outputs, current_item, extra_data, executed):
44
45
46
47
48
    unique_id = current_item
    inputs = prompt[unique_id]['inputs']
    class_type = prompt[unique_id]['class_type']
    class_def = nodes.NODE_CLASS_MAPPINGS[class_type]
    if unique_id in outputs:
49
        return
50
51
52
53
54
55
56
57

    for x in inputs:
        input_data = inputs[x]

        if isinstance(input_data, list):
            input_unique_id = input_data[0]
            output_index = input_data[1]
            if input_unique_id not in outputs:
58
                recursive_execute(server, prompt, outputs, input_unique_id, extra_data, executed)
59

60
    input_data_all = get_input_data(inputs, class_def, unique_id, outputs, prompt, extra_data)
61
    if server.client_id is not None:
62
        server.last_node_id = unique_id
63
64
65
        server.send_sync("executing", { "node": unique_id }, server.client_id)
    obj = class_def()

66
    nodes.before_node_execution()
67
    outputs[unique_id] = getattr(obj, obj.FUNCTION)(**input_data_all)
68
69
70
71
72
    if "ui" in outputs[unique_id]:
        if server.client_id is not None:
            server.send_sync("executed", { "node": unique_id, "output": outputs[unique_id]["ui"] }, server.client_id)
        if "result" in outputs[unique_id]:
            outputs[unique_id] = outputs[unique_id]["result"]
73
    executed.add(unique_id)
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99

def recursive_will_execute(prompt, outputs, current_item):
    unique_id = current_item
    inputs = prompt[unique_id]['inputs']
    will_execute = []
    if unique_id in outputs:
        return []

    for x in inputs:
        input_data = inputs[x]
        if isinstance(input_data, list):
            input_unique_id = input_data[0]
            output_index = input_data[1]
            if input_unique_id not in outputs:
                will_execute += recursive_will_execute(prompt, outputs, input_unique_id)

    return will_execute + [unique_id]

def recursive_output_delete_if_changed(prompt, old_prompt, outputs, current_item):
    unique_id = current_item
    inputs = prompt[unique_id]['inputs']
    class_type = prompt[unique_id]['class_type']
    class_def = nodes.NODE_CLASS_MAPPINGS[class_type]

    is_changed_old = ''
    is_changed = ''
100
    to_delete = False
101
102
103
104
    if hasattr(class_def, 'IS_CHANGED'):
        if unique_id in old_prompt and 'is_changed' in old_prompt[unique_id]:
            is_changed_old = old_prompt[unique_id]['is_changed']
        if 'is_changed' not in prompt[unique_id]:
105
            input_data_all = get_input_data(inputs, class_def, unique_id, outputs)
106
            if input_data_all is not None:
107
108
109
110
111
                try:
                    is_changed = class_def.IS_CHANGED(**input_data_all)
                    prompt[unique_id]['is_changed'] = is_changed
                except:
                    to_delete = True
112
113
114
115
116
117
        else:
            is_changed = prompt[unique_id]['is_changed']

    if unique_id not in outputs:
        return True

118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
    if not to_delete:
        if is_changed != is_changed_old:
            to_delete = True
        elif unique_id not in old_prompt:
            to_delete = True
        elif inputs == old_prompt[unique_id]['inputs']:
            for x in inputs:
                input_data = inputs[x]

                if isinstance(input_data, list):
                    input_unique_id = input_data[0]
                    output_index = input_data[1]
                    if input_unique_id in outputs:
                        to_delete = recursive_output_delete_if_changed(prompt, old_prompt, outputs, input_unique_id)
                    else:
                        to_delete = True
                    if to_delete:
                        break
        else:
            to_delete = True
138
139
140
141
142
143
144
145
146
147
148
149

    if to_delete:
        d = outputs.pop(unique_id)
        del d
    return to_delete

class PromptExecutor:
    def __init__(self, server):
        self.outputs = {}
        self.old_prompt = {}
        self.server = server

150
    def execute(self, prompt, extra_data={}, execute_outputs=[]):
151
152
        nodes.interrupt_processing(False)

153
154
155
156
157
        if "client_id" in extra_data:
            self.server.client_id = extra_data["client_id"]
        else:
            self.server.client_id = None

158
        with torch.inference_mode():
159
160
161
162
163
164
165
166
167
            #delete cached outputs if nodes don't exist for them
            to_delete = []
            for o in self.outputs:
                if o not in prompt:
                    to_delete += [o]
            for o in to_delete:
                d = self.outputs.pop(o)
                del d

168
169
170
171
            for x in prompt:
                recursive_output_delete_if_changed(prompt, self.old_prompt, self.outputs, x)

            current_outputs = set(self.outputs.keys())
172
            executed = set()
173
174
            try:
                to_execute = []
175
176
                for x in list(execute_outputs):
                    to_execute += [(0, x)]
177
178
179
180
181
182

                while len(to_execute) > 0:
                    #always execute the output that depends on the least amount of unexecuted nodes first
                    to_execute = sorted(list(map(lambda a: (len(recursive_will_execute(prompt, self.outputs, a[-1])), a[-1]), to_execute)))
                    x = to_execute.pop(0)[-1]

183
                    recursive_execute(self.server, prompt, self.outputs, x, extra_data, executed)
184
            except Exception as e:
185
186
187
188
                if isinstance(e, comfy.model_management.InterruptProcessingException):
                    print("Processing interrupted")
                else:
                    print(traceback.format_exc())
189
190
                to_delete = []
                for o in self.outputs:
191
                    if (o not in current_outputs) and (o not in executed):
192
193
194
195
196
197
198
                        to_delete += [o]
                        if o in self.old_prompt:
                            d = self.old_prompt.pop(o)
                            del d
                for o in to_delete:
                    d = self.outputs.pop(o)
                    del d
199
            finally:
200
201
                for x in executed:
                    self.old_prompt[x] = copy.deepcopy(prompt[x])
202
                self.server.last_node_id = None
203
204
205
                if self.server.client_id is not None:
                    self.server.send_sync("executing", { "node": None }, self.server.client_id)

206
        gc.collect()
207
        comfy.model_management.soft_empty_cache()
208

209

210
def validate_inputs(prompt, item, validated):
211
    unique_id = item
212
213
214
    if unique_id in validated:
        return validated[unique_id]

215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
    inputs = prompt[unique_id]['inputs']
    class_type = prompt[unique_id]['class_type']
    obj_class = nodes.NODE_CLASS_MAPPINGS[class_type]

    class_inputs = obj_class.INPUT_TYPES()
    required_inputs = class_inputs['required']
    for x in required_inputs:
        if x not in inputs:
            return (False, "Required input is missing. {}, {}".format(class_type, x))
        val = inputs[x]
        info = required_inputs[x]
        type_input = info[0]
        if isinstance(val, list):
            if len(val) != 2:
                return (False, "Bad Input. {}, {}".format(class_type, x))
            o_id = val[0]
            o_class_type = prompt[o_id]['class_type']
            r = nodes.NODE_CLASS_MAPPINGS[o_class_type].RETURN_TYPES
            if r[val[1]] != type_input:
                return (False, "Return type mismatch. {}, {}, {} != {}".format(class_type, x, r[val[1]], type_input))
235
            r = validate_inputs(prompt, o_id, validated)
236
            if r[0] == False:
237
                validated[o_id] = r
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
                return r
        else:
            if type_input == "INT":
                val = int(val)
                inputs[x] = val
            if type_input == "FLOAT":
                val = float(val)
                inputs[x] = val
            if type_input == "STRING":
                val = str(val)
                inputs[x] = val

            if len(info) > 1:
                if "min" in info[1] and val < info[1]["min"]:
                    return (False, "Value smaller than min. {}, {}".format(class_type, x))
                if "max" in info[1] and val > info[1]["max"]:
                    return (False, "Value bigger than max. {}, {}".format(class_type, x))

256
257
258
259
260
261
262
263
264
            if hasattr(obj_class, "VALIDATE_INPUTS"):
                input_data_all = get_input_data(inputs, obj_class, unique_id)
                ret = obj_class.VALIDATE_INPUTS(**input_data_all)
                if ret != True:
                    return (False, "{}, {}".format(class_type, ret))
            else:
                if isinstance(type_input, list):
                    if val not in type_input:
                        return (False, "Value not in list. {}, {}: {} not in {}".format(class_type, x, val, type_input))
265
266
267
268

    ret = (True, "")
    validated[unique_id] = ret
    return ret
269
270
271
272
273
274
275
276
277
278
279
280
281

def validate_prompt(prompt):
    outputs = set()
    for x in prompt:
        class_ = nodes.NODE_CLASS_MAPPINGS[prompt[x]['class_type']]
        if hasattr(class_, 'OUTPUT_NODE') and class_.OUTPUT_NODE == True:
            outputs.add(x)

    if len(outputs) == 0:
        return (False, "Prompt has no outputs")

    good_outputs = set()
    errors = []
282
    validated = {}
283
284
285
286
    for o in outputs:
        valid = False
        reason = ""
        try:
287
            m = validate_inputs(prompt, o, validated)
288
289
            valid = m[0]
            reason = m[1]
290
291
        except Exception as e:
            print(traceback.format_exc())
292
293
294
295
            valid = False
            reason = "Parsing error"

        if valid == True:
296
            good_outputs.add(o)
297
298
299
300
301
302
        else:
            print("Failed to validate prompt for output {} {}".format(o, reason))
            print("output will be ignored")
            errors += [(o, reason)]

    if len(good_outputs) == 0:
303
        errors_list = "\n".join(set(map(lambda a: "{}".format(a[1]), errors)))
304
305
        return (False, "Prompt has no properly connected outputs\n {}".format(errors_list))

306
    return (True, "", list(good_outputs))
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385


class PromptQueue:
    def __init__(self, server):
        self.server = server
        self.mutex = threading.RLock()
        self.not_empty = threading.Condition(self.mutex)
        self.task_counter = 0
        self.queue = []
        self.currently_running = {}
        self.history = {}
        server.prompt_queue = self

    def put(self, item):
        with self.mutex:
            heapq.heappush(self.queue, item)
            self.server.queue_updated()
            self.not_empty.notify()

    def get(self):
        with self.not_empty:
            while len(self.queue) == 0:
                self.not_empty.wait()
            item = heapq.heappop(self.queue)
            i = self.task_counter
            self.currently_running[i] = copy.deepcopy(item)
            self.task_counter += 1
            self.server.queue_updated()
            return (item, i)

    def task_done(self, item_id, outputs):
        with self.mutex:
            prompt = self.currently_running.pop(item_id)
            self.history[prompt[1]] = { "prompt": prompt, "outputs": {} }
            for o in outputs:
                if "ui" in outputs[o]:
                    self.history[prompt[1]]["outputs"][o] = outputs[o]["ui"]
            self.server.queue_updated()

    def get_current_queue(self):
        with self.mutex:
            out = []
            for x in self.currently_running.values():
                out += [x]
            return (out, copy.deepcopy(self.queue))

    def get_tasks_remaining(self):
        with self.mutex:
            return len(self.queue) + len(self.currently_running)

    def wipe_queue(self):
        with self.mutex:
            self.queue = []
            self.server.queue_updated()

    def delete_queue_item(self, function):
        with self.mutex:
            for x in range(len(self.queue)):
                if function(self.queue[x]):
                    if len(self.queue) == 1:
                        self.wipe_queue()
                    else:
                        self.queue.pop(x)
                        heapq.heapify(self.queue)
                    self.server.queue_updated()
                    return True
        return False

    def get_history(self):
        with self.mutex:
            return copy.deepcopy(self.history)

    def wipe_history(self):
        with self.mutex:
            self.history = {}

    def delete_history_item(self, id_to_delete):
        with self.mutex:
            self.history.pop(id_to_delete, None)