"requirements-rocm.txt" did not exist on "bb1ba58f064731b179d586ae32fdaaaea439098d"
distill_model.py 1.43 KB
Newer Older
1
2
3
4
5
import os
import sys
import torch
import glob
import json
6
from safetensors import safe_open
7
8
9
10
11
12
13
from lightx2v.models.networks.wan.model import WanModel
from lightx2v.models.networks.wan.weights.pre_weights import WanPreWeights
from lightx2v.models.networks.wan.weights.post_weights import WanPostWeights
from lightx2v.models.networks.wan.weights.transformer_weights import (
    WanTransformerWeights,
)
from lightx2v.utils.envs import *
gushiqiao's avatar
gushiqiao committed
14
from loguru import logger
15
16
17
18
19
20
21
22
23
24


class WanDistillModel(WanModel):
    pre_weight_class = WanPreWeights
    post_weight_class = WanPostWeights
    transformer_weight_class = WanTransformerWeights

    def __init__(self, model_path, config, device):
        super().__init__(model_path, config, device)

gushiqiao's avatar
Fix  
gushiqiao committed
25
    def _load_ckpt(self, use_bf16, skip_bf16):
26
27
        # For the old t2v distill model: https://huggingface.co/lightx2v/Wan2.1-T2V-14B-StepDistill-CfgDistill
        ckpt_path = os.path.join(self.model_path, "distill_model.pt")
28
        if os.path.exists(ckpt_path):
gushiqiao's avatar
gushiqiao committed
29
            logger.info(f"Loading weights from {ckpt_path}")
30
31
32
33
34
            weight_dict = torch.load(ckpt_path, map_location="cpu", weights_only=True)
            weight_dict = {
                key: (weight_dict[key].to(torch.bfloat16) if use_bf16 or all(s not in key for s in skip_bf16) else weight_dict[key]).pin_memory().to(self.device) for key in weight_dict.keys()
            }
            return weight_dict
35

36
        return super()._load_ckpt(use_bf16, skip_bf16)