wan_moe_t2v_distill.json 811 Bytes
Newer Older
litzh's avatar
litzh committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
{
    "infer_steps": 4,
    "target_video_length": 81,
    "text_len": 512,
    "target_height": 480,
    "target_width": 832,
    "self_attn_1_type": "flash_attn3",
    "cross_attn_1_type": "flash_attn3",
    "cross_attn_2_type": "flash_attn3",
    "sample_guide_scale": [
        4.0,
        3.0
    ],
    "sample_shift": 5.0,
    "enable_cfg": false,
    "cpu_offload": true,
    "offload_granularity": "model",
    "t5_cpu_offload": false,
    "vae_cpu_offload": false,
    "boundary_step_index": 2,
    "denoising_step_list": [
        1000,
        750,
        500,
        250
    ],
    "high_noise_original_ckpt": "Wan2.2-T2V-A14B/distill_models/high_noise_model/distill_model.safetensors",
    "low_noise_original_ckpt": "Wan2.2-T2V-A14B/distill_models/low_noise_model/distill_model.safetensors"
}