upsampler.example0.json 1.94 KB
Newer Older
dongchy920's avatar
dongchy920 committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
{
    "decoder": {
        "unet_sources": [
            {
                "unet_numbers": [1],
                "default_cond_scale": [1.7],
                "load_model_from": {
                    "load_type": "local",
                    "path": "/public/home/dongchy920/dalle2-laion-main/models/new_decoder.pth",
                    "cache_dir": "./models",
                    "filename_override": "new_decoder.pth"
                }
            },
            {
                "unet_numbers": [2],
                "load_model_from": {
                    "load_type": "local",
                    "path": "/public/home/dongchy920/dalle2-laion-main/models/second_decoder.pth",
                    "cache_dir": "./models",
                    "filename_override": "second_decoder.pth"
                },
                "load_config_from": {
                    "load_type": "local",
                    "path": "/public/home/dongchy920/dalle2-laion-main/models/second_decoder_config.json",
                    "checksum_file_path":  "https://huggingface.co/Veldrovive/upsamplers/raw/main/working/decoder_config.json",
                    "cache_dir": "./models",
                    "filename_override": "second_decoder_config.json"
                }
            }
        ]
    },
    "prior": {
        "load_model_from": {
            "load_type": "local",
            "path": "/public/home/dongchy920/dalle2-laion-main/models/prior.pth",
            "cache_dir": "./models",
            "filename_override": "prior.pth"
        },
        "load_config_from": {
            "load_type": "local",
            "path": "/public/home/dongchy920/dalle2-laion-main/models/prior_config.json",
            "checksum_file_path": "https://huggingface.co/laion/DALLE2-PyTorch/raw/main/prior/prior_config.json",
            "cache_dir": "./models"
        }
    },
    "clip": {
        "make": "openai",
        "model": "ViT-L/14"
    },

    "devices": "cuda:0",
    "strict_loading": false
}