Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
xuwx1
LightX2V
Commits
397ce244
"docs/source/vscode:/vscode.git/clone" did not exist on "d64f7c4808ae249e9df5aec2bcc439f4b7dbb713"
Commit
397ce244
authored
Jul 21, 2025
by
gushiqiao
Committed by
GitHub
Jul 21, 2025
Browse files
Merge pull request #150 from ModelTC/dev_quant
Fix
parents
cc2a283a
f62e3109
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
1 addition
and
3 deletions
+1
-3
lightx2v/models/networks/wan/distill_model.py
lightx2v/models/networks/wan/distill_model.py
+0
-1
lightx2v/models/runners/wan/wan_runner.py
lightx2v/models/runners/wan/wan_runner.py
+1
-2
No files found.
lightx2v/models/networks/wan/distill_model.py
View file @
397ce244
...
@@ -36,7 +36,6 @@ class WanDistillModel(WanModel):
...
@@ -36,7 +36,6 @@ class WanDistillModel(WanModel):
if
os
.
path
.
exists
(
ckpt_path
):
if
os
.
path
.
exists
(
ckpt_path
):
logger
.
info
(
f
"Loading weights from
{
ckpt_path
}
"
)
logger
.
info
(
f
"Loading weights from
{
ckpt_path
}
"
)
weight_dict
=
torch
.
load
(
ckpt_path
,
map_location
=
"cpu"
,
weights_only
=
True
)
weight_dict
=
torch
.
load
(
ckpt_path
,
map_location
=
"cpu"
,
weights_only
=
True
)
print
(
weight_dict
.
keys
())
weight_dict
=
{
weight_dict
=
{
key
:
(
weight_dict
[
key
].
to
(
torch
.
bfloat16
)
if
use_bf16
or
all
(
s
not
in
key
for
s
in
skip_bf16
)
else
weight_dict
[
key
]).
pin_memory
().
to
(
self
.
device
)
for
key
in
weight_dict
.
keys
()
key
:
(
weight_dict
[
key
].
to
(
torch
.
bfloat16
)
if
use_bf16
or
all
(
s
not
in
key
for
s
in
skip_bf16
)
else
weight_dict
[
key
]).
pin_memory
().
to
(
self
.
device
)
for
key
in
weight_dict
.
keys
()
}
}
...
...
lightx2v/models/runners/wan/wan_runner.py
View file @
397ce244
...
@@ -69,7 +69,6 @@ class WanRunner(DefaultRunner):
...
@@ -69,7 +69,6 @@ class WanRunner(DefaultRunner):
clip_quantized_ckpt
=
None
clip_quantized_ckpt
=
None
clip_quant_scheme
=
None
clip_quant_scheme
=
None
print
(
clip_quant_scheme
)
image_encoder
=
CLIPModel
(
image_encoder
=
CLIPModel
(
dtype
=
torch
.
float16
,
dtype
=
torch
.
float16
,
device
=
self
.
init_device
,
device
=
self
.
init_device
,
...
@@ -107,7 +106,7 @@ class WanRunner(DefaultRunner):
...
@@ -107,7 +106,7 @@ class WanRunner(DefaultRunner):
else
:
else
:
t5_quant_scheme
=
None
t5_quant_scheme
=
None
t5_quantized_ckpt
=
None
t5_quantized_ckpt
=
None
print
(
t5_quant_scheme
)
text_encoder
=
T5EncoderModel
(
text_encoder
=
T5EncoderModel
(
text_len
=
self
.
config
[
"text_len"
],
text_len
=
self
.
config
[
"text_len"
],
dtype
=
torch
.
bfloat16
,
dtype
=
torch
.
bfloat16
,
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment