Commit a5991a7a authored by comfyanonymous's avatar comfyanonymous
Browse files

Fix hunyuan dit text encoder weights always being in fp32.

parent 2c038cce
......@@ -52,8 +52,8 @@ class HyditTokenizer:
class HyditModel(torch.nn.Module):
def __init__(self, device="cpu", dtype=None):
super().__init__()
self.hydit_clip = HyditBertModel()
self.mt5xl = MT5XLModel()
self.hydit_clip = HyditBertModel(dtype=dtype)
self.mt5xl = MT5XLModel(dtype=dtype)
self.dtypes = set()
if dtype is not None:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment