nodes_upscale_model.py 1.81 KB
Newer Older
1
2
3
import os
from comfy_extras.chainner_models import model_loading
from comfy.sd import load_torch_file
4
import model_management
5
6
from nodes import filter_files_extensions, recursive_search, supported_ckpt_extensions
import torch
7
import comfy.utils
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37

class UpscaleModelLoader:
    models_dir = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), "models")
    upscale_model_dir = os.path.join(models_dir, "upscale_models")

    @classmethod
    def INPUT_TYPES(s):
        return {"required": { "model_name": (filter_files_extensions(recursive_search(s.upscale_model_dir), supported_ckpt_extensions), ),
                             }}
    RETURN_TYPES = ("UPSCALE_MODEL",)
    FUNCTION = "load_model"

    CATEGORY = "loaders"

    def load_model(self, model_name):
        model_path = os.path.join(self.upscale_model_dir, model_name)
        sd = load_torch_file(model_path)
        out = model_loading.load_state_dict(sd).eval()
        return (out, )


class ImageUpscaleWithModel:
    @classmethod
    def INPUT_TYPES(s):
        return {"required": { "upscale_model": ("UPSCALE_MODEL",),
                              "image": ("IMAGE",),
                              }}
    RETURN_TYPES = ("IMAGE",)
    FUNCTION = "upscale"

38
    CATEGORY = "image/upscaling"
39
40

    def upscale(self, upscale_model, image):
41
        device = model_management.get_torch_device()
42
43
        upscale_model.to(device)
        in_img = image.movedim(-1,-3).to(device)
44
        s = comfy.utils.tiled_scale(in_img, lambda a: upscale_model(a), tile_x=128 + 64, tile_y=128 + 64, overlap = 8, upscale_amount=upscale_model.scale)
45
46
47
48
49
50
51
52
        upscale_model.cpu()
        s = torch.clamp(s.movedim(-3,-1), min=0, max=1.0)
        return (s,)

NODE_CLASS_MAPPINGS = {
    "UpscaleModelLoader": UpscaleModelLoader,
    "ImageUpscaleWithModel": ImageUpscaleWithModel
}