Commit 0604509a authored by dongcl's avatar dongcl
Browse files

flux bug

parent e45e6793
...@@ -188,6 +188,13 @@ class CoreAdaptation(MegatronAdaptationABC): ...@@ -188,6 +188,13 @@ class CoreAdaptation(MegatronAdaptationABC):
apply_wrapper=True) apply_wrapper=True)
# flux # flux
try:
import flux
HAS_FLUX = True
except ImportError:
HAS_FLUX = False
if HAS_FLUX:
MegatronAdaptation.register("megatron.core.tensor_parallel.layers.ColumnParallelLinear.__init__", MegatronAdaptation.register("megatron.core.tensor_parallel.layers.ColumnParallelLinear.__init__",
parallel_linear_init_wrapper, parallel_linear_init_wrapper,
apply_wrapper=True) apply_wrapper=True)
......
...@@ -3,7 +3,12 @@ import warnings ...@@ -3,7 +3,12 @@ import warnings
from functools import wraps from functools import wraps
from typing import Callable, List, Optional from typing import Callable, List, Optional
import flux try:
import flux
except ImportError:
from megatron.training import print_rank_0
print_rank_0(f"flux is NOT installed")
import torch import torch
import torch.nn.functional as F import torch.nn.functional as F
from torch.nn.parameter import Parameter from torch.nn.parameter import Parameter
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment