Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
ComfyUI
Commits
56d9496b
Commit
56d9496b
authored
Jan 12, 2024
by
comfyanonymous
Browse files
Rename status notes to status messages.
I think message describes them better.
parent
bcc0bde2
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
10 additions
and
10 deletions
+10
-10
execution.py
execution.py
+9
-9
main.py
main.py
+1
-1
No files found.
execution.py
View file @
56d9496b
...
@@ -276,12 +276,12 @@ class PromptExecutor:
...
@@ -276,12 +276,12 @@ class PromptExecutor:
self
.
outputs
=
{}
self
.
outputs
=
{}
self
.
object_storage
=
{}
self
.
object_storage
=
{}
self
.
outputs_ui
=
{}
self
.
outputs_ui
=
{}
self
.
status_
not
es
=
[]
self
.
status_
messag
es
=
[]
self
.
success
=
True
self
.
success
=
True
self
.
old_prompt
=
{}
self
.
old_prompt
=
{}
def
add_
not
e
(
self
,
event
,
data
,
broadcast
:
bool
):
def
add_
messag
e
(
self
,
event
,
data
,
broadcast
:
bool
):
self
.
status_
not
es
.
append
((
event
,
data
))
self
.
status_
messag
es
.
append
((
event
,
data
))
if
self
.
server
.
client_id
is
not
None
or
broadcast
:
if
self
.
server
.
client_id
is
not
None
or
broadcast
:
self
.
server
.
send_sync
(
event
,
data
,
self
.
server
.
client_id
)
self
.
server
.
send_sync
(
event
,
data
,
self
.
server
.
client_id
)
...
@@ -298,7 +298,7 @@ class PromptExecutor:
...
@@ -298,7 +298,7 @@ class PromptExecutor:
"node_type"
:
class_type
,
"node_type"
:
class_type
,
"executed"
:
list
(
executed
),
"executed"
:
list
(
executed
),
}
}
self
.
add_
not
e
(
"execution_interrupted"
,
mes
,
broadcast
=
True
)
self
.
add_
messag
e
(
"execution_interrupted"
,
mes
,
broadcast
=
True
)
else
:
else
:
mes
=
{
mes
=
{
"prompt_id"
:
prompt_id
,
"prompt_id"
:
prompt_id
,
...
@@ -312,7 +312,7 @@ class PromptExecutor:
...
@@ -312,7 +312,7 @@ class PromptExecutor:
"current_inputs"
:
error
[
"current_inputs"
],
"current_inputs"
:
error
[
"current_inputs"
],
"current_outputs"
:
error
[
"current_outputs"
],
"current_outputs"
:
error
[
"current_outputs"
],
}
}
self
.
add_
not
e
(
"execution_error"
,
mes
,
broadcast
=
False
)
self
.
add_
messag
e
(
"execution_error"
,
mes
,
broadcast
=
False
)
# Next, remove the subsequent outputs since they will not be executed
# Next, remove the subsequent outputs since they will not be executed
to_delete
=
[]
to_delete
=
[]
...
@@ -334,8 +334,8 @@ class PromptExecutor:
...
@@ -334,8 +334,8 @@ class PromptExecutor:
else
:
else
:
self
.
server
.
client_id
=
None
self
.
server
.
client_id
=
None
self
.
status_
not
es
=
[]
self
.
status_
messag
es
=
[]
self
.
add_
not
e
(
"execution_start"
,
{
"prompt_id"
:
prompt_id
},
broadcast
=
False
)
self
.
add_
messag
e
(
"execution_start"
,
{
"prompt_id"
:
prompt_id
},
broadcast
=
False
)
with
torch
.
inference_mode
():
with
torch
.
inference_mode
():
#delete cached outputs if nodes don't exist for them
#delete cached outputs if nodes don't exist for them
...
@@ -368,7 +368,7 @@ class PromptExecutor:
...
@@ -368,7 +368,7 @@ class PromptExecutor:
del
d
del
d
comfy
.
model_management
.
cleanup_models
()
comfy
.
model_management
.
cleanup_models
()
self
.
add_
not
e
(
"execution_cached"
,
self
.
add_
messag
e
(
"execution_cached"
,
{
"nodes"
:
list
(
current_outputs
)
,
"prompt_id"
:
prompt_id
},
{
"nodes"
:
list
(
current_outputs
)
,
"prompt_id"
:
prompt_id
},
broadcast
=
False
)
broadcast
=
False
)
executed
=
set
()
executed
=
set
()
...
@@ -742,7 +742,7 @@ class PromptQueue:
...
@@ -742,7 +742,7 @@ class PromptQueue:
class
ExecutionStatus
(
NamedTuple
):
class
ExecutionStatus
(
NamedTuple
):
status_str
:
Literal
[
'success'
,
'error'
]
status_str
:
Literal
[
'success'
,
'error'
]
completed
:
bool
completed
:
bool
not
es
:
List
[
str
]
messag
es
:
List
[
str
]
def
task_done
(
self
,
item_id
,
outputs
,
def
task_done
(
self
,
item_id
,
outputs
,
status
:
Optional
[
'PromptQueue.ExecutionStatus'
]):
status
:
Optional
[
'PromptQueue.ExecutionStatus'
]):
...
...
main.py
View file @
56d9496b
...
@@ -115,7 +115,7 @@ def prompt_worker(q, server):
...
@@ -115,7 +115,7 @@ def prompt_worker(q, server):
status
=
execution
.
PromptQueue
.
ExecutionStatus
(
status
=
execution
.
PromptQueue
.
ExecutionStatus
(
status_str
=
'success'
if
e
.
success
else
'error'
,
status_str
=
'success'
if
e
.
success
else
'error'
,
completed
=
e
.
success
,
completed
=
e
.
success
,
not
es
=
e
.
status_
not
es
))
messag
es
=
e
.
status_
messag
es
))
if
server
.
client_id
is
not
None
:
if
server
.
client_id
is
not
None
:
server
.
send_sync
(
"executing"
,
{
"node"
:
None
,
"prompt_id"
:
prompt_id
},
server
.
client_id
)
server
.
send_sync
(
"executing"
,
{
"node"
:
None
,
"prompt_id"
:
prompt_id
},
server
.
client_id
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment