Unverified Commit 08d2f46a authored by Yang Yong (雍洋)'s avatar Yang Yong (雍洋) Committed by GitHub
Browse files

fix self-forcing model atten type (#347)

parent 04812de2
...@@ -113,7 +113,7 @@ class WanSelfAttention(WeightModule): ...@@ -113,7 +113,7 @@ class WanSelfAttention(WeightModule):
self.lazy_load = lazy_load self.lazy_load = lazy_load
self.lazy_load_file = lazy_load_file self.lazy_load_file = lazy_load_file
if self.config.get("sf_config", "False"): if self.config.get("sf_config", False):
self.attn_rms_type = "self_forcing" self.attn_rms_type = "self_forcing"
else: else:
self.attn_rms_type = "sgl-kernel" self.attn_rms_type = "sgl-kernel"
...@@ -228,7 +228,7 @@ class WanCrossAttention(WeightModule): ...@@ -228,7 +228,7 @@ class WanCrossAttention(WeightModule):
self.lazy_load = lazy_load self.lazy_load = lazy_load
self.lazy_load_file = lazy_load_file self.lazy_load_file = lazy_load_file
if self.config.get("sf_config", "False"): if self.config.get("sf_config", False):
self.attn_rms_type = "self_forcing" self.attn_rms_type = "self_forcing"
else: else:
self.attn_rms_type = "sgl-kernel" self.attn_rms_type = "sgl-kernel"
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment