Commit a34cde08 authored by comfyanonymous's avatar comfyanonymous
Browse files

Nodes now support single line string properties.

Resize nodes automatically if they are too small because of added properties.
parent a37261bb
......@@ -28,7 +28,7 @@ def filter_files_extensions(files, extensions):
class CLIPTextEncode:
@classmethod
def INPUT_TYPES(s):
return {"required": {"text": ("STRING", ), "clip": ("CLIP", )}}
return {"required": {"text": ("STRING", {"multiline": True}), "clip": ("CLIP", )}}
RETURN_TYPES = ("CONDITIONING",)
FUNCTION = "encode"
......
......@@ -65,17 +65,29 @@ window.addEventListener("resize", resizeCanvas);
var default_graph = {"last_node_id":9,"last_link_id":9,"nodes":[{"id":7,"type":"CLIPTextEncode","pos":[413,389],"size":{"0":425.27801513671875,"1":180.6060791015625},"flags":{},"order":3,"mode":0,"inputs":[{"name":"clip","type":"CLIP","link":5}],"outputs":[{"name":"CONDITIONING","type":"CONDITIONING","links":[6],"slot_index":0}],"properties":{},"widgets_values":["bad hands"]},{"id":6,"type":"CLIPTextEncode","pos":[415,186],"size":{"0":422.84503173828125,"1":164.31304931640625},"flags":{},"order":2,"mode":0,"inputs":[{"name":"clip","type":"CLIP","link":3}],"outputs":[{"name":"CONDITIONING","type":"CONDITIONING","links":[4],"slot_index":0}],"properties":{},"widgets_values":["masterpiece best quality girl"]},{"id":5,"type":"EmptyLatentImage","pos":[473,609],"size":{"0":315,"1":106},"flags":{},"order":1,"mode":0,"outputs":[{"name":"LATENT","type":"LATENT","links":[2],"slot_index":0}],"properties":{},"widgets_values":[512,512,1]},{"id":3,"type":"KSampler","pos":[863,186],"size":{"0":315,"1":262},"flags":{},"order":4,"mode":0,"inputs":[{"name":"model","type":"MODEL","link":1},{"name":"positive","type":"CONDITIONING","link":4},{"name":"negative","type":"CONDITIONING","link":6},{"name":"latent_image","type":"LATENT","link":2}],"outputs":[{"name":"LATENT","type":"LATENT","links":[7],"slot_index":0}],"properties":{},"widgets_values":[8566257,true,20,8,"sample_euler","normal",1]},{"id":8,"type":"VAEDecode","pos":[1209,188],"size":{"0":210,"1":46},"flags":{},"order":5,"mode":0,"inputs":[{"name":"samples","type":"LATENT","link":7},{"name":"vae","type":"VAE","link":8}],"outputs":[{"name":"IMAGE","type":"IMAGE","links":[9],"slot_index":0}],"properties":{}},{"id":9,"type":"SaveImage","pos":[1451,189],"size":{"0":210,"1":26},"flags":{},"order":6,"mode":0,"inputs":[{"name":"images","type":"IMAGE","link":9}],"properties":{}},{"id":4,"type":"CheckpointLoader","pos":[26,474],"size":{"0":315,"1":122},"flags":{},"order":0,"mode":0,"outputs":[{"name":"MODEL","type":"MODEL","links":[1],"slot_index":0},{"name":"CLIP","type":"CLIP","links":[3,5],"slot_index":1},{"name":"VAE","type":"VAE","links":[8],"slot_index":2}],"properties":{},"widgets_values":["v1-inference.yaml","v1-5-pruned-emaonly.ckpt"]}],"links":[[1,4,0,3,0,"MODEL"],[2,5,0,3,3,"LATENT"],[3,4,1,6,0,"CLIP"],[4,6,0,3,1,"CONDITIONING"],[5,4,1,7,0,"CLIP"],[6,7,0,3,2,"CONDITIONING"],[7,3,0,8,0,"LATENT"],[8,4,2,8,1,"VAE"],[9,8,0,9,0,"IMAGE"]],"groups":[],"config":{},"extra":{},"version":0.4}
function loadGraphData(graph, graph_data)
{
graph.configure( graph_data);
for (let n in graph._nodes) {
n = graph._nodes[n];
s = n.computeSize();
s[0] = Math.max(n.size[0], s[0]);
s[1] = Math.max(n.size[1], s[1]);
n.size = s;
}
}
function afterLoadGraph()
{
let workflow = null;
try {
workflow = JSON.parse(localStorage.getItem("workflow"));
graph.configure(workflow);
loadGraphData(graph, workflow);
} catch(err) {
}
if (!workflow) {
graph.configure(default_graph);
loadGraphData(graph, default_graph);
}
function saveGraph() {
......@@ -97,12 +109,13 @@ function onObjectInfo(json) {
min_height = 1;
min_width = 1;
for (let x in inp) {
let default_val = min_val = max_val = step_val = undefined;
let default_val = min_val = max_val = step_val = multiline = undefined;
if (inp[x].length > 1) {
default_val = inp[x][1]['default'];
min_val = inp[x][1]['min'];
max_val = inp[x][1]['max'];
step_val = inp[x][1]['step'];
multiline = inp[x][1]['multiline'];
}
let type = inp[x][0];
......@@ -134,6 +147,10 @@ function onObjectInfo(json) {
// }
this._widgets += [w]
} else if (type == "STRING") {
if (default_val == undefined) default_val = "";
if (multiline == undefined) multiline = false;
if (multiline) {
var w = {
type: "customtext",
name: x,
......@@ -173,6 +190,7 @@ function onObjectInfo(json) {
w.input_div.contentEditable = true;
w.input_div.style.backgroundColor = "#FFFFFF";
w.input_div.style.overflow = 'hidden';
w.input_div.innerText = default_val;
document.addEventListener('click', function(event) {
if (!w.input_div.contains(event.target)) {
w.input_div.blur();
......@@ -194,6 +212,10 @@ function onObjectInfo(json) {
}
}
}
} else {
w = this.addWidget("text", x, default_val, function(v){}, { multiline:false } );
this._widgets += [w];
}
} else {
this.addInput(x, type);
}
......@@ -219,7 +241,7 @@ function onObjectInfo(json) {
};
afterLoadGraph();
// graph.configure(JSON.parse(base_txt2img_graph));
// loadGraphData(graph, JSON.parse(base_txt2img_graph));
}
fetch("object_info", {cache: "no-store"})
......@@ -376,7 +398,7 @@ function prompt_file_load(file)
}
console.log(txt_chunks);
console.log(JSON.parse(txt_chunks["prompt"]));
graph.configure(JSON.parse(txt_chunks["workflow"]));
loadGraphData(graph, JSON.parse(txt_chunks["workflow"]));
};
reader.readAsArrayBuffer(file);
} else if (file.type === "application/json" || file.name.endsWith(".json")) {
......@@ -384,7 +406,7 @@ function prompt_file_load(file)
reader.onload = function() {
console.log(reader.result);
var jsonData = JSON.parse(reader.result);
graph.configure(jsonData);
loadGraphData(graph, jsonData);
};
reader.readAsText(file);
}
......@@ -413,7 +435,7 @@ function clearGraph() {
}
function loadTxt2Img() {
graph.configure(default_graph);
loadGraphData(graph, default_graph);
}
function saveGraph() {
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment