nodes_upscale_model.py 1.63 KB
Newer Older
1
2
3
import os
from comfy_extras.chainner_models import model_loading
from comfy.sd import load_torch_file
4
import model_management
5
6
from nodes import filter_files_extensions, recursive_search, supported_ckpt_extensions
import torch
7
import comfy.utils
8
import folder_paths
9
10
11
12

class UpscaleModelLoader:
    @classmethod
    def INPUT_TYPES(s):
13
        return {"required": { "model_name": (folder_paths.get_filename_list("upscale_models"), ),
14
15
16
17
18
19
20
                             }}
    RETURN_TYPES = ("UPSCALE_MODEL",)
    FUNCTION = "load_model"

    CATEGORY = "loaders"

    def load_model(self, model_name):
21
        model_path = folder_paths.get_full_path("upscale_models", model_name)
22
23
24
25
26
27
28
29
30
31
32
33
34
35
        sd = load_torch_file(model_path)
        out = model_loading.load_state_dict(sd).eval()
        return (out, )


class ImageUpscaleWithModel:
    @classmethod
    def INPUT_TYPES(s):
        return {"required": { "upscale_model": ("UPSCALE_MODEL",),
                              "image": ("IMAGE",),
                              }}
    RETURN_TYPES = ("IMAGE",)
    FUNCTION = "upscale"

36
    CATEGORY = "image/upscaling"
37
38

    def upscale(self, upscale_model, image):
39
        device = model_management.get_torch_device()
40
41
        upscale_model.to(device)
        in_img = image.movedim(-1,-3).to(device)
42
        s = comfy.utils.tiled_scale(in_img, lambda a: upscale_model(a), tile_x=128 + 64, tile_y=128 + 64, overlap = 8, upscale_amount=upscale_model.scale)
43
44
45
46
47
48
49
50
        upscale_model.cpu()
        s = torch.clamp(s.movedim(-3,-1), min=0, max=1.0)
        return (s,)

NODE_CLASS_MAPPINGS = {
    "UpscaleModelLoader": UpscaleModelLoader,
    "ImageUpscaleWithModel": ImageUpscaleWithModel
}