"i18n/README_ja.md" did not exist on "e842e181df57c06039ec1724fc0dd8b875b8f4d4"
set_config.py 3.63 KB
Newer Older
1
import json
2
import os
3
from easydict import EasyDict
4
from loguru import logger
5
6
import torch.distributed as dist
from torch.distributed.tensor.device_mesh import init_device_mesh
7
8


helloyongyang's avatar
helloyongyang committed
9
10
11
12
13
14
15
16
17
18
19
def get_default_config():
    default_config = {
        "do_mm_calib": False,
        "cpu_offload": False,
        "max_area": False,
        "vae_stride": (4, 8, 8),
        "patch_size": (1, 2, 2),
        "feature_caching": "NoCaching",  # ["NoCaching", "TaylorSeer", "Tea"]
        "teacache_thresh": 0.26,
        "use_ret_steps": False,
        "use_bfloat16": True,
20
        "lora_configs": None,  # List of dicts with 'path' and 'strength' keys
21
        "mm_config": {},
helloyongyang's avatar
helloyongyang committed
22
        "use_prompt_enhancer": False,
23
        "parallel": False,
24
        "enable_cfg": False,
helloyongyang's avatar
helloyongyang committed
25
26
27
28
    }
    return default_config


29
def set_config(args):
helloyongyang's avatar
helloyongyang committed
30
31
    config = get_default_config()
    config.update({k: v for k, v in vars(args).items()})
32
33
    config = EasyDict(config)

helloyongyang's avatar
helloyongyang committed
34
    with open(config.config_json, "r") as f:
helloyongyang's avatar
helloyongyang committed
35
36
        config_json = json.load(f)
    config.update(config_json)
37

helloyongyang's avatar
helloyongyang committed
38
39
    if os.path.exists(os.path.join(config.model_path, "config.json")):
        with open(os.path.join(config.model_path, "config.json"), "r") as f:
helloyongyang's avatar
helloyongyang committed
40
41
42
43
            model_config = json.load(f)
        config.update(model_config)
    elif os.path.exists(os.path.join(config.model_path, "low_noise_model", "config.json")):  # 需要一个更优雅的update方法
        with open(os.path.join(config.model_path, "low_noise_model", "config.json"), "r") as f:
44
45
            model_config = json.load(f)
        config.update(model_config)
gushiqiao's avatar
gushiqiao committed
46
47
48
49
50
    elif os.path.exists(os.path.join(config.model_path, "original", "config.json")):
        with open(os.path.join(config.model_path, "original", "config.json"), "r") as f:
            model_config = json.load(f)
        config.update(model_config)
    # load quantized config
gushiqiao's avatar
gushiqiao committed
51
52
53
54
55
56
57
    if config.get("dit_quantized_ckpt", None) is not None:
        config_path = os.path.join(config.dit_quantized_ckpt, "config.json")
        if os.path.exists(config_path):
            with open(config_path, "r") as f:
                model_config = json.load(f)
            config.update(model_config)

Watebear's avatar
Watebear committed
58
59
60
61
    if config.task == "i2v":
        if config.target_video_length % config.vae_stride[0] != 1:
            logger.warning(f"`num_frames - 1` has to be divisible by {config.vae_stride[0]}. Rounding to the nearest number.")
            config.target_video_length = config.target_video_length // config.vae_stride[0] * config.vae_stride[0] + 1
62

63
64
    set_parallel_config(config)  # parallel config

65
    return config
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90


def set_parallel_config(config):
    config["seq_parallel"] = False
    config["cfg_parallel"] = False
    if config.parallel:
        if not dist.is_initialized():
            dist.init_process_group(backend="nccl")

        cfg_p_size = config.parallel.get("cfg_p_size", 1)
        seq_p_size = config.parallel.get("seq_p_size", 1)
        assert cfg_p_size * seq_p_size == dist.get_world_size(), f"cfg_p_size * seq_p_size must be equal to world_size"
        config["device_mesh"] = init_device_mesh("cuda", (cfg_p_size, seq_p_size), mesh_dim_names=("cfg_p", "seq_p"))

        if config.parallel and config.parallel.get("seq_p_size", False) and config.parallel.seq_p_size > 1:
            config["seq_parallel"] = True

        if config.get("enable_cfg", False) and config.parallel and config.parallel.get("cfg_p_size", False) and config.parallel.cfg_p_size > 1:
            config["cfg_parallel"] = True


def print_config(config):
    config_to_print = config.copy()
    config_to_print.pop("device_mesh", None)  # Remove device_mesh if it exists
    logger.info(f"config:\n{json.dumps(config_to_print, ensure_ascii=False, indent=4)}")