Unverified Commit 03b38a46 authored by Zaida Zhou's avatar Zaida Zhou Committed by GitHub
Browse files

[Refactor] Move is_mlu_available to mmcv/utils/device_type.py (#1884)

* [Refactor] Move is_mlu_available to mmcv/utils/device_type.py

* remove comma

* fix isort
parent 362a90f8
......@@ -2,9 +2,8 @@
from .data_parallel import MLUDataParallel
from .distributed import MLUDistributedDataParallel
from .scatter_gather import scatter, scatter_kwargs
from .utils import IS_MLU_AVAILABLE
__all__ = [
'MLUDataParallel', 'MLUDistributedDataParallel', 'scatter',
'scatter_kwargs', 'IS_MLU_AVAILABLE'
'scatter_kwargs'
]
......@@ -12,7 +12,7 @@ from torch import distributed as dist
from torch._utils import (_flatten_dense_tensors, _take_tensors,
_unflatten_dense_tensors)
from mmcv.device.mlu import IS_MLU_AVAILABLE
from mmcv.utils import IS_MLU_AVAILABLE
def _find_free_port():
......
......@@ -36,6 +36,7 @@ except ImportError:
'is_method_overridden', 'has_method'
]
else:
from .device_type import IS_MLU_AVAILABLE
from .env import collect_env
from .hub import load_url
from .logging import get_logger, print_log
......@@ -73,5 +74,5 @@ else:
'assert_params_all_zeros', 'check_python_script',
'is_method_overridden', 'is_jit_tracing', 'is_rocm_pytorch',
'_get_cuda_home', 'load_url', 'has_method', 'IS_CUDA_AVAILABLE',
'worker_init_fn'
'worker_init_fn', 'IS_MLU_AVAILABLE'
]
......@@ -5,10 +5,10 @@ import pytest
import torch
import torch.nn as nn
from mmcv.device.mlu import (IS_MLU_AVAILABLE, MLUDataParallel,
MLUDistributedDataParallel)
from mmcv.device.mlu import MLUDataParallel, MLUDistributedDataParallel
from mmcv.device.mlu._functions import Scatter, scatter
from mmcv.parallel import is_module_wrapper
from mmcv.utils import IS_MLU_AVAILABLE
def mock(*args, **kwargs):
......
......@@ -3,8 +3,7 @@ import numpy as np
import pytest
import torch
from mmcv.device.mlu import IS_MLU_AVAILABLE
from mmcv.utils import IS_CUDA_AVAILABLE
from mmcv.utils import IS_CUDA_AVAILABLE, IS_MLU_AVAILABLE
class TestBBox(object):
......
......@@ -3,8 +3,7 @@ import numpy as np
import pytest
import torch
from mmcv.device.mlu import IS_MLU_AVAILABLE
from mmcv.utils import IS_CUDA_AVAILABLE
from mmcv.utils import IS_CUDA_AVAILABLE, IS_MLU_AVAILABLE
_USING_PARROTS = True
try:
......
......@@ -3,8 +3,7 @@ import numpy as np
import pytest
import torch
from mmcv.device.mlu import IS_MLU_AVAILABLE
from mmcv.utils import IS_CUDA_AVAILABLE
from mmcv.utils import IS_CUDA_AVAILABLE, IS_MLU_AVAILABLE
class Testnms(object):
......
......@@ -3,8 +3,7 @@ import numpy as np
import pytest
import torch
from mmcv.device.mlu import IS_MLU_AVAILABLE
from mmcv.utils import IS_CUDA_AVAILABLE
from mmcv.utils import IS_CUDA_AVAILABLE, IS_MLU_AVAILABLE
_USING_PARROTS = True
try:
......
......@@ -5,8 +5,7 @@ import numpy as np
import pytest
import torch
from mmcv.device.mlu import IS_MLU_AVAILABLE
from mmcv.utils import IS_CUDA_AVAILABLE
from mmcv.utils import IS_CUDA_AVAILABLE, IS_MLU_AVAILABLE
_USING_PARROTS = True
try:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment