Commit 9b700096 authored by fengzch-das's avatar fengzch-das
Browse files

fix:impot error

parent ab16ec95
...@@ -4,8 +4,8 @@ import torch ...@@ -4,8 +4,8 @@ import torch
import torch.distributed as dist import torch.distributed as dist
from torch import Tensor from torch import Tensor
from colossalai.context.parallel_mode import ParallelMode from colossalai.legacy.context.parallel_mode import ParallelMode
from colossalai.core import global_context as gpc from colossalai.legacy.core import global_context as gpc
from .core import ensure_divisibility from .core import ensure_divisibility
......
...@@ -5,8 +5,8 @@ import torch ...@@ -5,8 +5,8 @@ import torch
import torch.distributed as dist import torch.distributed as dist
from torch import Tensor from torch import Tensor
from colossalai.context.parallel_mode import ParallelMode from colossalai.legacy.context.parallel_mode import ParallelMode
from colossalai.core import global_context as gpc from colossalai.legacy.core import global_context as gpc
from .comm import _split, divide from .comm import _split, divide
......
...@@ -4,8 +4,8 @@ from functools import partial ...@@ -4,8 +4,8 @@ from functools import partial
import torch import torch
import torch.nn as nn import torch.nn as nn
from colossalai.context.parallel_mode import ParallelMode from colossalai.legacy.context.parallel_mode import ParallelMode
from colossalai.core import global_context as gpc from colossalai.legacy.core import global_context as gpc
from fastfold.model.fastnn import MSACore, OutProductMean, PairCore from fastfold.model.fastnn import MSACore, OutProductMean, PairCore
from fastfold.model.fastnn.ops import Linear from fastfold.model.fastnn.ops import Linear
......
...@@ -18,8 +18,8 @@ from typing import Optional, Tuple ...@@ -18,8 +18,8 @@ from typing import Optional, Tuple
import torch import torch
import torch.nn as nn import torch.nn as nn
import torch.nn.functional as F import torch.nn.functional as F
from colossalai.context.parallel_mode import ParallelMode from colossalai.legacy.context.parallel_mode import ParallelMode
from colossalai.core import global_context as gpc from colossalai.legacy.core import global_context as gpc
from fastfold.model.fastnn.kernel import LayerNorm, bias_dropout_add from fastfold.model.fastnn.kernel import LayerNorm, bias_dropout_add
from fastfold.model.fastnn.ops import (ChunkMSARowAttentionWithPairBias, ChunkTransition, from fastfold.model.fastnn.ops import (ChunkMSARowAttentionWithPairBias, ChunkTransition,
SelfAttention, GlobalAttention, Transition, SelfAttention, GlobalAttention, Transition,
......
...@@ -18,8 +18,8 @@ from typing import Optional, List ...@@ -18,8 +18,8 @@ from typing import Optional, List
import torch import torch
import torch.nn as nn import torch.nn as nn
from colossalai.context.parallel_mode import ParallelMode from colossalai.legacy.context.parallel_mode import ParallelMode
from colossalai.core import global_context as gpc from colossalai.legacy.core import global_context as gpc
from fastfold.model.nn.primitives import Attention from fastfold.model.nn.primitives import Attention
from fastfold.utils.checkpointing import checkpoint_blocks from fastfold.utils.checkpointing import checkpoint_blocks
......
...@@ -10,8 +10,8 @@ from fastfold.model.fastnn.ops import set_chunk_size ...@@ -10,8 +10,8 @@ from fastfold.model.fastnn.ops import set_chunk_size
from fastfold.model.hub import AlphaFold from fastfold.model.hub import AlphaFold
from fastfold.utils.inject_fastnn import inject_fastnn from fastfold.utils.inject_fastnn import inject_fastnn
from fastfold.utils.import_weights import import_jax_weights_ from fastfold.utils.import_weights import import_jax_weights_
from colossalai.context.parallel_mode import ParallelMode from colossalai.legacy.context.parallel_mode import ParallelMode
from colossalai.core import global_context as gpc from colossalai.legacy.core import global_context as gpc
from fastfold.distributed.comm import gather, scatter, row_to_col from fastfold.distributed.comm import gather, scatter, row_to_col
from fastfold.utils.test_utils import get_param_path from fastfold.utils.test_utils import get_param_path
......
...@@ -10,8 +10,8 @@ from fastfold.model.fastnn.ops import set_chunk_size ...@@ -10,8 +10,8 @@ from fastfold.model.fastnn.ops import set_chunk_size
from fastfold.model.hub import AlphaFold from fastfold.model.hub import AlphaFold
from fastfold.utils.inject_fastnn import inject_fastnn from fastfold.utils.inject_fastnn import inject_fastnn
from fastfold.utils.import_weights import import_jax_weights_ from fastfold.utils.import_weights import import_jax_weights_
from colossalai.context.parallel_mode import ParallelMode from colossalai.legacy.context.parallel_mode import ParallelMode
from colossalai.core import global_context as gpc from colossalai.legacy.core import global_context as gpc
from fastfold.utils.test_utils import get_param_path from fastfold.utils.test_utils import get_param_path
from fastfold.distributed.comm import gather, scatter from fastfold.distributed.comm import gather, scatter
......
...@@ -10,8 +10,8 @@ from fastfold.model.fastnn.ops import set_chunk_size ...@@ -10,8 +10,8 @@ from fastfold.model.fastnn.ops import set_chunk_size
from fastfold.model.hub import AlphaFold from fastfold.model.hub import AlphaFold
from fastfold.utils.inject_fastnn import inject_fastnn from fastfold.utils.inject_fastnn import inject_fastnn
from fastfold.utils.import_weights import import_jax_weights_ from fastfold.utils.import_weights import import_jax_weights_
from colossalai.context.parallel_mode import ParallelMode from colossalai.legacy.context.parallel_mode import ParallelMode
from colossalai.core import global_context as gpc from colossalai.legacy.core import global_context as gpc
from fastfold.distributed.comm import gather, scatter, row_to_col from fastfold.distributed.comm import gather, scatter, row_to_col
from fastfold.utils.test_utils import get_param_path from fastfold.utils.test_utils import get_param_path
......
...@@ -11,8 +11,8 @@ from fastfold.model.hub import AlphaFold ...@@ -11,8 +11,8 @@ from fastfold.model.hub import AlphaFold
from fastfold.utils.inject_fastnn import inject_fastnn from fastfold.utils.inject_fastnn import inject_fastnn
from fastfold.utils.import_weights import import_jax_weights_ from fastfold.utils.import_weights import import_jax_weights_
from fastfold.config import model_config from fastfold.config import model_config
from colossalai.context.parallel_mode import ParallelMode from colossalai.legacy.context.parallel_mode import ParallelMode
from colossalai.core import global_context as gpc from colossalai.legacy.core import global_context as gpc
from fastfold.utils.test_utils import get_param_path from fastfold.utils.test_utils import get_param_path
from fastfold.distributed import scatter, row_to_col from fastfold.distributed import scatter, row_to_col
from fastfold.distributed.comm import gather, scatter from fastfold.distributed.comm import gather, scatter
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment