Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
xuwx1
LightX2V
Commits
9121bad1
Commit
9121bad1
authored
Jun 09, 2025
by
gushiqiao
Browse files
fix.
parent
5c241f86
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
13 additions
and
12 deletions
+13
-12
lightx2v/common/ops/mm/mm_weight.py
lightx2v/common/ops/mm/mm_weight.py
+9
-8
tools/convert/readme.md
tools/convert/readme.md
+2
-2
tools/convert/readme_zh.md
tools/convert/readme_zh.md
+2
-2
No files found.
lightx2v/common/ops/mm/mm_weight.py
View file @
9121bad1
...
...
@@ -2,12 +2,11 @@ import torch
from
abc
import
ABCMeta
,
abstractmethod
from
vllm
import
_custom_ops
as
ops
#
import sgl_kernel
import
sgl_kernel
from
lightx2v.utils.registry_factory
import
MM_WEIGHT_REGISTER
from
lightx2v.utils.quant_utils
import
IntegerQuantizer
,
FloatQuantizer
from
lightx2v.utils.envs
import
*
from
loguru
import
logger
from
safetensors
import
safe_open
try
:
import
q8_kernels.functional
as
Q8F
...
...
@@ -21,9 +20,11 @@ except ImportError:
class
MMWeightTemplate
(
metaclass
=
ABCMeta
):
def
__init__
(
self
,
weight_name
,
bias_name
):
def
__init__
(
self
,
weight_name
,
bias_name
,
lazy_load
=
False
,
lazy_load_file
=
None
):
self
.
weight_name
=
weight_name
self
.
bias_name
=
bias_name
self
.
lazy_load
=
lazy_load
self
.
lazy_load_file
=
lazy_load_file
self
.
config
=
{}
@
abstractmethod
...
...
@@ -61,8 +62,8 @@ class MMWeightTemplate(metaclass=ABCMeta):
@
MM_WEIGHT_REGISTER
(
"Default"
)
class
MMWeight
(
MMWeightTemplate
):
def
__init__
(
self
,
weight_name
,
bias_name
):
super
().
__init__
(
weight_name
,
bias_name
)
def
__init__
(
self
,
weight_name
,
bias_name
,
lazy_load
=
False
,
lazy_load_file
=
None
):
super
().
__init__
(
weight_name
,
bias_name
,
lazy_load
,
lazy_load_file
)
def
load
(
self
,
weight_dict
):
self
.
weight
=
weight_dict
[
self
.
weight_name
].
t
()
...
...
@@ -90,8 +91,8 @@ class MMWeight(MMWeightTemplate):
@
MM_WEIGHT_REGISTER
(
"Default-Force-FP32"
)
class
MMWeightForceFP32
(
MMWeight
):
def
__init__
(
self
,
weight_name
,
bias_name
):
super
().
__init__
(
weight_name
,
bias_name
)
def
__init__
(
self
,
weight_name
,
bias_name
,
lazy_load
=
False
,
lazy_load_file
=
None
):
super
().
__init__
(
weight_name
,
bias_name
,
lazy_load
,
lazy_load_file
)
def
load
(
self
,
weight_dict
):
super
().
load
(
weight_dict
)
...
...
@@ -102,7 +103,7 @@ class MMWeightForceFP32(MMWeight):
class
MMWeightQuantTemplate
(
MMWeightTemplate
):
def
__init__
(
self
,
weight_name
,
bias_name
,
lazy_load
=
False
,
lazy_load_file
=
None
):
super
().
__init__
(
weight_name
,
bias_name
)
super
().
__init__
(
weight_name
,
bias_name
,
lazy_load
,
lazy_load_file
)
self
.
weight_scale_name
=
self
.
weight_name
.
removesuffix
(
".weight"
)
+
".weight_scale"
self
.
load_func
=
None
self
.
weight_need_transpose
=
True
...
...
tools/convert/readme.md
View file @
9121bad1
...
...
@@ -46,7 +46,7 @@ python converter.py \
--output
/Path/To/output
\
--output_ext
.pth
\
--output_name
wan_fp8
\
--dtype
torch.float8_e4m3
_
fn
\
--dtype
torch.float8_e4m3fn
\
--model_type
wan_dit
```
...
...
@@ -70,7 +70,7 @@ python converter.py \
--output
/Path/To/output
\
--output_ext
.pth
\
--output_name
hunyuan_fp8
\
--dtype
torch.float8_e4m3
_
fn
\
--dtype
torch.float8_e4m3fn
\
--model_type
hunyuan_dit
```
...
...
tools/convert/readme_zh.md
View file @
9121bad1
...
...
@@ -46,7 +46,7 @@ python converter.py \
--output
/Path/To/output
\
--output_ext
.pth
\
--output_name
wan_fp8
\
--dtype
torch.float8_e4m3
_
fn
\
--dtype
torch.float8_e4m3fn
\
--model_type
wan_dit
```
...
...
@@ -70,7 +70,7 @@ python converter.py \
--output
/Path/To/output
\
--output_ext
.pth
\
--output_name
hunyuan_fp8
\
--dtype
torch.float8_e4m3
_
fn
\
--dtype
torch.float8_e4m3fn
\
--model_type
hunyuan_dit
```
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment