Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
ComfyUI
Commits
6b2a8a38
Commit
6b2a8a38
authored
May 25, 2023
by
space-nuko
Browse files
Show message in the frontend if prompt execution raises an exception
parent
ffec8152
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
45 additions
and
10 deletions
+45
-10
execution.py
execution.py
+9
-5
web/scripts/api.js
web/scripts/api.js
+6
-0
web/scripts/app.js
web/scripts/app.js
+30
-5
No files found.
execution.py
View file @
6b2a8a38
...
@@ -258,27 +258,31 @@ class PromptExecutor:
...
@@ -258,27 +258,31 @@ class PromptExecutor:
self
.
old_prompt
=
{}
self
.
old_prompt
=
{}
self
.
server
=
server
self
.
server
=
server
def
handle_execution_error
(
self
,
prompt_id
,
current_outputs
,
executed
,
error
,
ex
):
def
handle_execution_error
(
self
,
prompt_id
,
prompt
,
current_outputs
,
executed
,
error
,
ex
):
node_id
=
error
[
"node_id"
]
class_type
=
prompt
[
node_id
][
"class_type"
]
# First, send back the status to the frontend depending
# First, send back the status to the frontend depending
# on the exception type
# on the exception type
if
isinstance
(
ex
,
comfy
.
model_management
.
InterruptProcessingException
):
if
isinstance
(
ex
,
comfy
.
model_management
.
InterruptProcessingException
):
mes
=
{
mes
=
{
"prompt_id"
:
prompt_id
,
"prompt_id"
:
prompt_id
,
"node_id"
:
node_id
,
"node_type"
:
class_type
,
"executed"
:
list
(
executed
),
"executed"
:
list
(
executed
),
"node_id"
:
error
[
"node_id"
],
}
}
self
.
server
.
send_sync
(
"execution_interrupted"
,
mes
,
self
.
server
.
client_id
)
self
.
server
.
send_sync
(
"execution_interrupted"
,
mes
,
self
.
server
.
client_id
)
else
:
else
:
if
self
.
server
.
client_id
is
not
None
:
if
self
.
server
.
client_id
is
not
None
:
mes
=
{
mes
=
{
"prompt_id"
:
prompt_id
,
"prompt_id"
:
prompt_id
,
"node_id"
:
node_id
,
"node_type"
:
class_type
,
"executed"
:
list
(
executed
),
"executed"
:
list
(
executed
),
"message"
:
error
[
"message"
],
"message"
:
error
[
"message"
],
"exception_type"
:
error
[
"exception_type"
],
"exception_type"
:
error
[
"exception_type"
],
"traceback"
:
error
[
"traceback"
],
"traceback"
:
error
[
"traceback"
],
"node_id"
:
error
[
"node_id"
],
"current_inputs"
:
error
[
"current_inputs"
],
"current_inputs"
:
error
[
"current_inputs"
],
"current_outputs"
:
error
[
"current_outputs"
],
"current_outputs"
:
error
[
"current_outputs"
],
}
}
...
@@ -346,7 +350,7 @@ class PromptExecutor:
...
@@ -346,7 +350,7 @@ class PromptExecutor:
# error was raised
# error was raised
success
,
error
,
ex
=
recursive_execute
(
self
.
server
,
prompt
,
self
.
outputs
,
output_node_id
,
extra_data
,
executed
,
prompt_id
,
self
.
outputs_ui
)
success
,
error
,
ex
=
recursive_execute
(
self
.
server
,
prompt
,
self
.
outputs
,
output_node_id
,
extra_data
,
executed
,
prompt_id
,
self
.
outputs_ui
)
if
success
is
not
True
:
if
success
is
not
True
:
self
.
handle_execution_error
(
prompt_id
,
current_outputs
,
executed
,
error
,
ex
)
self
.
handle_execution_error
(
prompt_id
,
prompt
,
current_outputs
,
executed
,
error
,
ex
)
for
x
in
executed
:
for
x
in
executed
:
self
.
old_prompt
[
x
]
=
copy
.
deepcopy
(
prompt
[
x
])
self
.
old_prompt
[
x
]
=
copy
.
deepcopy
(
prompt
[
x
])
...
...
web/scripts/api.js
View file @
6b2a8a38
...
@@ -88,6 +88,12 @@ class ComfyApi extends EventTarget {
...
@@ -88,6 +88,12 @@ class ComfyApi extends EventTarget {
case
"
executed
"
:
case
"
executed
"
:
this
.
dispatchEvent
(
new
CustomEvent
(
"
executed
"
,
{
detail
:
msg
.
data
}));
this
.
dispatchEvent
(
new
CustomEvent
(
"
executed
"
,
{
detail
:
msg
.
data
}));
break
;
break
;
case
"
execution_start
"
:
this
.
dispatchEvent
(
new
CustomEvent
(
"
execution_start
"
,
{
detail
:
msg
.
data
}));
break
;
case
"
execution_error
"
:
this
.
dispatchEvent
(
new
CustomEvent
(
"
execution_error
"
,
{
detail
:
msg
.
data
}));
break
;
default
:
default
:
if
(
this
.
#
registered
.
has
(
msg
.
type
))
{
if
(
this
.
#
registered
.
has
(
msg
.
type
))
{
this
.
dispatchEvent
(
new
CustomEvent
(
msg
.
type
,
{
detail
:
msg
.
data
}));
this
.
dispatchEvent
(
new
CustomEvent
(
msg
.
type
,
{
detail
:
msg
.
data
}));
...
...
web/scripts/app.js
View file @
6b2a8a38
...
@@ -784,8 +784,10 @@ export class ComfyApp {
...
@@ -784,8 +784,10 @@ export class ComfyApp {
color
=
"
red
"
;
color
=
"
red
"
;
lineWidth
=
2
;
lineWidth
=
2
;
}
}
else
if
(
self
.
lastExecutionError
&&
+
self
.
lastExecutionError
.
node_id
===
node
.
id
)
{
self
.
graphTime
=
Date
.
now
()
color
=
"
#f0f
"
;
lineWidth
=
2
;
}
if
(
color
)
{
if
(
color
)
{
const
shape
=
node
.
_shape
||
node
.
constructor
.
shape
||
LiteGraph
.
ROUND_SHAPE
;
const
shape
=
node
.
_shape
||
node
.
constructor
.
shape
||
LiteGraph
.
ROUND_SHAPE
;
...
@@ -895,6 +897,17 @@ export class ComfyApp {
...
@@ -895,6 +897,17 @@ export class ComfyApp {
}
}
});
});
api
.
addEventListener
(
"
execution_start
"
,
({
detail
})
=>
{
this
.
lastExecutionError
=
null
});
api
.
addEventListener
(
"
execution_error
"
,
({
detail
})
=>
{
this
.
lastExecutionError
=
detail
;
const
formattedError
=
this
.
#
formatExecutionError
(
detail
);
this
.
ui
.
dialog
.
show
(
formattedError
);
this
.
canvas
.
draw
(
true
,
true
);
});
api
.
init
();
api
.
init
();
}
}
...
@@ -1269,7 +1282,7 @@ export class ComfyApp {
...
@@ -1269,7 +1282,7 @@ export class ComfyApp {
return
{
workflow
,
output
};
return
{
workflow
,
output
};
}
}
#
formatError
(
error
)
{
#
format
Prompt
Error
(
error
)
{
if
(
error
==
null
)
{
if
(
error
==
null
)
{
return
"
(unknown error)
"
return
"
(unknown error)
"
}
}
...
@@ -1294,6 +1307,18 @@ export class ComfyApp {
...
@@ -1294,6 +1307,18 @@ export class ComfyApp {
return
"
(unknown error)
"
return
"
(unknown error)
"
}
}
#
formatExecutionError
(
error
)
{
if
(
error
==
null
)
{
return
"
(unknown error)
"
}
const
traceback
=
error
.
traceback
.
join
(
""
)
const
nodeId
=
error
.
node_id
const
nodeType
=
error
.
node_type
return
`Error occurred when executing
${
nodeType
}
:\n\n
${
error
.
message
}
\n\n
${
traceback
}
`
}
async
queuePrompt
(
number
,
batchCount
=
1
)
{
async
queuePrompt
(
number
,
batchCount
=
1
)
{
this
.
#
queueItems
.
push
({
number
,
batchCount
});
this
.
#
queueItems
.
push
({
number
,
batchCount
});
...
@@ -1315,7 +1340,7 @@ export class ComfyApp {
...
@@ -1315,7 +1340,7 @@ export class ComfyApp {
try
{
try
{
await
api
.
queuePrompt
(
number
,
p
);
await
api
.
queuePrompt
(
number
,
p
);
}
catch
(
error
)
{
}
catch
(
error
)
{
const
formattedError
=
this
.
#
formatError
(
error
)
const
formattedError
=
this
.
#
format
Prompt
Error
(
error
)
this
.
ui
.
dialog
.
show
(
formattedError
);
this
.
ui
.
dialog
.
show
(
formattedError
);
if
(
error
.
response
)
{
if
(
error
.
response
)
{
this
.
lastPromptError
=
error
.
response
;
this
.
lastPromptError
=
error
.
response
;
...
@@ -1419,7 +1444,7 @@ export class ComfyApp {
...
@@ -1419,7 +1444,7 @@ export class ComfyApp {
clean
()
{
clean
()
{
this
.
nodeOutputs
=
{};
this
.
nodeOutputs
=
{};
this
.
lastPromptError
=
null
;
this
.
lastPromptError
=
null
;
this
.
graphTime
=
null
this
.
lastExecutionError
=
null
;
}
}
}
}
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment