Unverified Commit eaa4f03d authored by ChaimZhu's avatar ChaimZhu Committed by GitHub
Browse files

[Enhance] Add Windows CI after moving ops (#1345)

* lock mmcv version

* change to cpu

* change int to int64

* fix tempfile error

* change tensor

* change tensor

* change cpu

* complete windows ci

* change dtype

* change data type to numpy

* change tensor to numpy

* fix comments
parent 2f88c124
...@@ -103,3 +103,44 @@ jobs: ...@@ -103,3 +103,44 @@ jobs:
env_vars: OS,PYTHON env_vars: OS,PYTHON
name: codecov-umbrella name: codecov-umbrella
fail_ci_if_error: false fail_ci_if_error: false
build_windows:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [windows-2022]
python: [3.8]
platform: [cpu]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python }}
- name: Upgrade pip
run: pip install pip --upgrade --user
- name: Install PyTorch
# As a complement to Linux CI, we test on PyTorch LTS version
run: pip install torch==1.8.2+${{ matrix.platform }} torchvision==0.9.2+${{ matrix.platform }} -f https://download.pytorch.org/whl/lts/1.8/torch_lts.html
- name: Install mmdet3d dependencies
run: |
pip install mmcv-full==1.4.7 -f https://download.openmmlab.com/mmcv/dist/cpu/torch1.8/index.html --only-binary mmcv-full
python -m pip install mmdet==2.19.0
python -m pip install mmsegmentation==0.20.0
python -m pip install -r requirements/build.txt -r requirements/runtime.txt -r requirements/tests.txt
- name: Build and install
run: pip install -e .
- name: Run unittests and generate coverage report
run: coverage run --branch --source mmdet3d -m pytest tests/
- name: Generate coverage report
run: |
coverage xml
coverage report -m
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v2
with:
file: ./coverage.xml
flags: unittests
env_vars: OS,PYTHON
name: codecov-umbrella
fail_ci_if_error: false
...@@ -518,7 +518,7 @@ class LoadAnnotations3D(LoadAnnotations): ...@@ -518,7 +518,7 @@ class LoadAnnotations3D(LoadAnnotations):
with_seg=False, with_seg=False,
with_bbox_depth=False, with_bbox_depth=False,
poly2mask=True, poly2mask=True,
seg_3d_dtype='int', seg_3d_dtype=np.int64,
file_client_args=dict(backend='disk')): file_client_args=dict(backend='disk')):
super().__init__( super().__init__(
with_bbox, with_bbox,
...@@ -600,7 +600,7 @@ class LoadAnnotations3D(LoadAnnotations): ...@@ -600,7 +600,7 @@ class LoadAnnotations3D(LoadAnnotations):
self.file_client = mmcv.FileClient(**self.file_client_args) self.file_client = mmcv.FileClient(**self.file_client_args)
try: try:
mask_bytes = self.file_client.get(pts_instance_mask_path) mask_bytes = self.file_client.get(pts_instance_mask_path)
pts_instance_mask = np.frombuffer(mask_bytes, dtype=np.int) pts_instance_mask = np.frombuffer(mask_bytes, dtype=np.int64)
except ConnectionError: except ConnectionError:
mmcv.check_file_exist(pts_instance_mask_path) mmcv.check_file_exist(pts_instance_mask_path)
pts_instance_mask = np.fromfile( pts_instance_mask = np.fromfile(
......
...@@ -118,12 +118,12 @@ def test_getitem(): ...@@ -118,12 +118,12 @@ def test_getitem():
assert lyft_dataset.CLASSES == ('car', 'pedestrian') assert lyft_dataset.CLASSES == ('car', 'pedestrian')
import tempfile import tempfile
tmp_file = tempfile.NamedTemporaryFile() with tempfile.TemporaryDirectory() as tmpdir:
with open(tmp_file.name, 'w') as f: path = tmpdir + 'classes.txt'
f.write('car\npedestrian\n') with open(path, 'w') as f:
f.write('car\npedestrian\n')
lyft_dataset = LyftDataset( lyft_dataset = LyftDataset(ann_file, None, root_path, classes=path)
ann_file, None, root_path, classes=tmp_file.name)
assert lyft_dataset.CLASSES != original_classes assert lyft_dataset.CLASSES != original_classes
assert lyft_dataset.CLASSES == ['car', 'pedestrian'] assert lyft_dataset.CLASSES == ['car', 'pedestrian']
......
...@@ -179,15 +179,16 @@ def test_seg_getitem(): ...@@ -179,15 +179,16 @@ def test_seg_getitem():
# test load classes from file # test load classes from file
import tempfile import tempfile
tmp_file = tempfile.NamedTemporaryFile() with tempfile.TemporaryDirectory() as tmpdir:
with open(tmp_file.name, 'w') as f: path = tmpdir + 'classes.txt'
f.write('beam\nwindow\n') with open(path, 'w') as f:
f.write('beam\nwindow\n')
s3dis_dataset = S3DISSegDataset( s3dis_dataset = S3DISSegDataset(
data_root=root_path, data_root=root_path,
ann_files=ann_file, ann_files=ann_file,
pipeline=None, pipeline=None,
classes=tmp_file.name, classes=path,
scene_idxs=scene_idxs) scene_idxs=scene_idxs)
assert s3dis_dataset.CLASSES != original_classes assert s3dis_dataset.CLASSES != original_classes
assert s3dis_dataset.CLASSES == ['beam', 'window'] assert s3dis_dataset.CLASSES == ['beam', 'window']
......
...@@ -112,12 +112,13 @@ def test_getitem(): ...@@ -112,12 +112,13 @@ def test_getitem():
# Test load classes from file # Test load classes from file
import tempfile import tempfile
tmp_file = tempfile.NamedTemporaryFile() with tempfile.TemporaryDirectory() as tmpdir:
with open(tmp_file.name, 'w') as f: path = tmpdir + 'classes.txt'
f.write('cabinet\nbed\n') with open(path, 'w') as f:
f.write('cabinet\nbed\n')
scannet_dataset = ScanNetDataset( scannet_dataset = ScanNetDataset(
root_path, ann_file, pipeline=None, classes=tmp_file.name) root_path, ann_file, pipeline=None, classes=path)
assert scannet_dataset.CLASSES != original_classes assert scannet_dataset.CLASSES != original_classes
assert scannet_dataset.CLASSES == ['cabinet', 'bed'] assert scannet_dataset.CLASSES == ['cabinet', 'bed']
...@@ -497,15 +498,16 @@ def test_seg_getitem(): ...@@ -497,15 +498,16 @@ def test_seg_getitem():
# test load classes from file # test load classes from file
import tempfile import tempfile
tmp_file = tempfile.NamedTemporaryFile() with tempfile.TemporaryDirectory() as tmpdir:
with open(tmp_file.name, 'w') as f: path = tmpdir + 'classes.txt'
f.write('cabinet\nchair\n') with open(path, 'w') as f:
f.write('cabinet\nchair\n')
scannet_dataset = ScanNetSegDataset( scannet_dataset = ScanNetSegDataset(
data_root=root_path, data_root=root_path,
ann_file=ann_file, ann_file=ann_file,
pipeline=None, pipeline=None,
classes=tmp_file.name, classes=path,
scene_idxs=scene_idxs) scene_idxs=scene_idxs)
assert scannet_dataset.CLASSES != original_classes assert scannet_dataset.CLASSES != original_classes
assert scannet_dataset.CLASSES == ['cabinet', 'chair'] assert scannet_dataset.CLASSES == ['cabinet', 'chair']
...@@ -744,15 +746,18 @@ def test_instance_seg_getitem(): ...@@ -744,15 +746,18 @@ def test_instance_seg_getitem():
-4.3207e-01, 1.8154e+00, 1.7455e-01, -4.3207e-01, 1.8154e+00, 1.7455e-01,
4.0392e-01, 3.8039e-01, 4.1961e-01 4.0392e-01, 3.8039e-01, 4.1961e-01
]]) ]])
expected_semantic_mask = torch.tensor([11, 18, 18, 0, 4]).long()
expected_instance_mask = torch.tensor([6, 56, 10, 9, 35]).long()
data = scannet_dataset[0] data = scannet_dataset[0]
assert torch.allclose(data['points']._data[:5], expected_points, 1e-2)
assert torch.allclose(data['pts_semantic_mask']._data[:5], points = data['points']._data[:5]
expected_semantic_mask) pts_semantic_mask = data['pts_semantic_mask']._data[:5]
assert torch.allclose(data['pts_instance_mask']._data[:5], pts_instance_mask = data['pts_instance_mask']._data[:5]
expected_instance_mask) expected_semantic_mask = np.array([11, 18, 18, 0, 4])
expected_instance_mask = np.array([6, 56, 10, 9, 35])
assert torch.allclose(points, expected_points, 1e-2)
assert np.all(pts_semantic_mask.numpy() == expected_semantic_mask)
assert np.all(pts_instance_mask.numpy() == expected_instance_mask)
def test_instance_seg_evaluate(): def test_instance_seg_evaluate():
......
...@@ -89,6 +89,9 @@ def _generate_sunrgbd_multi_modality_dataset_config(): ...@@ -89,6 +89,9 @@ def _generate_sunrgbd_multi_modality_dataset_config():
def test_getitem(): def test_getitem():
from os import path as osp
np.random.seed(0) np.random.seed(0)
root_path, ann_file, class_names, pipelines, modality = \ root_path, ann_file, class_names, pipelines, modality = \
_generate_sunrgbd_dataset_config() _generate_sunrgbd_dataset_config()
...@@ -107,7 +110,8 @@ def test_getitem(): ...@@ -107,7 +110,8 @@ def test_getitem():
pcd_rotation_expected = np.array([[0.99889565, 0.04698427, 0.], pcd_rotation_expected = np.array([[0.99889565, 0.04698427, 0.],
[-0.04698427, 0.99889565, 0.], [-0.04698427, 0.99889565, 0.],
[0., 0., 1.]]) [0., 0., 1.]])
assert file_name == './tests/data/sunrgbd/points/000001.bin' expected_file_name = osp.join('./tests/data/sunrgbd', 'points/000001.bin')
assert file_name == expected_file_name
assert pcd_horizontal_flip is False assert pcd_horizontal_flip is False
assert abs(pcd_scale_factor - 0.9770964398016714) < 1e-5 assert abs(pcd_scale_factor - 0.9770964398016714) < 1e-5
assert np.allclose(pcd_rotation, pcd_rotation_expected, 1e-3) assert np.allclose(pcd_rotation, pcd_rotation_expected, 1e-3)
...@@ -142,12 +146,13 @@ def test_getitem(): ...@@ -142,12 +146,13 @@ def test_getitem():
assert SUNRGBD_dataset.CLASSES == ('bed', 'table') assert SUNRGBD_dataset.CLASSES == ('bed', 'table')
import tempfile import tempfile
tmp_file = tempfile.NamedTemporaryFile() with tempfile.TemporaryDirectory() as tmpdir:
with open(tmp_file.name, 'w') as f: path = tmpdir + 'classes.txt'
f.write('bed\ntable\n') with open(path, 'w') as f:
f.write('bed\ntable\n')
SUNRGBD_dataset = SUNRGBDDataset( SUNRGBD_dataset = SUNRGBDDataset(
root_path, ann_file, pipeline=None, classes=tmp_file.name) root_path, ann_file, pipeline=None, classes=path)
assert SUNRGBD_dataset.CLASSES != original_classes assert SUNRGBD_dataset.CLASSES != original_classes
assert SUNRGBD_dataset.CLASSES == ['bed', 'table'] assert SUNRGBD_dataset.CLASSES == ['bed', 'table']
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment