Commit 0604509a authored by dongcl's avatar dongcl
Browse files

flux bug

parent e45e6793
...@@ -188,16 +188,23 @@ class CoreAdaptation(MegatronAdaptationABC): ...@@ -188,16 +188,23 @@ class CoreAdaptation(MegatronAdaptationABC):
apply_wrapper=True) apply_wrapper=True)
# flux # flux
MegatronAdaptation.register("megatron.core.tensor_parallel.layers.ColumnParallelLinear.__init__", try:
parallel_linear_init_wrapper, import flux
apply_wrapper=True) HAS_FLUX = True
MegatronAdaptation.register("megatron.core.tensor_parallel.layers.ColumnParallelLinear.forward", except ImportError:
ColumnParallelLinearPatch.forward) HAS_FLUX = False
MegatronAdaptation.register("megatron.core.tensor_parallel.layers.RowParallelLinear.__init__",
parallel_linear_init_wrapper, if HAS_FLUX:
apply_wrapper=True) MegatronAdaptation.register("megatron.core.tensor_parallel.layers.ColumnParallelLinear.__init__",
MegatronAdaptation.register("megatron.core.tensor_parallel.layers.RowParallelLinear.forward", parallel_linear_init_wrapper,
RowParallelLinearPatch.forward) apply_wrapper=True)
MegatronAdaptation.register("megatron.core.tensor_parallel.layers.ColumnParallelLinear.forward",
ColumnParallelLinearPatch.forward)
MegatronAdaptation.register("megatron.core.tensor_parallel.layers.RowParallelLinear.__init__",
parallel_linear_init_wrapper,
apply_wrapper=True)
MegatronAdaptation.register("megatron.core.tensor_parallel.layers.RowParallelLinear.forward",
RowParallelLinearPatch.forward)
def patch_training(self): def patch_training(self):
......
...@@ -3,7 +3,12 @@ import warnings ...@@ -3,7 +3,12 @@ import warnings
from functools import wraps from functools import wraps
from typing import Callable, List, Optional from typing import Callable, List, Optional
import flux try:
import flux
except ImportError:
from megatron.training import print_rank_0
print_rank_0(f"flux is NOT installed")
import torch import torch
import torch.nn.functional as F import torch.nn.functional as F
from torch.nn.parameter import Parameter from torch.nn.parameter import Parameter
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment