Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
mmdetection3d
Commits
49a2bc85
Commit
49a2bc85
authored
Jun 01, 2020
by
liyinhao
Committed by
zhangwenwei
Jun 01, 2020
Browse files
Change data converter
parent
c42ad958
Changes
23
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
62 additions
and
29 deletions
+62
-29
tools/data_converter/indoor_converter.py
tools/data_converter/indoor_converter.py
+5
-3
tools/data_converter/scannet_data_utils.py
tools/data_converter/scannet_data_utils.py
+37
-7
tools/data_converter/sunrgbd_data_utils.py
tools/data_converter/sunrgbd_data_utils.py
+20
-19
No files found.
tools/data_converter/indoor_converter.py
View file @
49a2bc85
...
@@ -8,7 +8,8 @@ from tools.data_converter.sunrgbd_data_utils import SUNRGBDData
...
@@ -8,7 +8,8 @@ from tools.data_converter.sunrgbd_data_utils import SUNRGBDData
def
create_indoor_info_file
(
data_path
,
def
create_indoor_info_file
(
data_path
,
pkl_prefix
=
'sunrgbd'
,
pkl_prefix
=
'sunrgbd'
,
save_path
=
None
,
save_path
=
None
,
use_v1
=
False
):
use_v1
=
False
,
workers
=
4
):
"""Create indoor information file.
"""Create indoor information file.
Get information of the raw data and save it to the pkl file.
Get information of the raw data and save it to the pkl file.
...
@@ -18,6 +19,7 @@ def create_indoor_info_file(data_path,
...
@@ -18,6 +19,7 @@ def create_indoor_info_file(data_path,
pkl_prefix (str): Prefix of the pkl to be saved. Default: 'sunrgbd'.
pkl_prefix (str): Prefix of the pkl to be saved. Default: 'sunrgbd'.
save_path (str): Path of the pkl to be saved. Default: None.
save_path (str): Path of the pkl to be saved. Default: None.
use_v1 (bool): Whether to use v1. Default: False.
use_v1 (bool): Whether to use v1. Default: False.
workers (int): Number of threads to be used. Default: 4.
"""
"""
assert
os
.
path
.
exists
(
data_path
)
assert
os
.
path
.
exists
(
data_path
)
assert
pkl_prefix
in
[
'sunrgbd'
,
'scannet'
]
assert
pkl_prefix
in
[
'sunrgbd'
,
'scannet'
]
...
@@ -35,10 +37,10 @@ def create_indoor_info_file(data_path,
...
@@ -35,10 +37,10 @@ def create_indoor_info_file(data_path,
train_dataset
=
ScanNetData
(
root_path
=
data_path
,
split
=
'train'
)
train_dataset
=
ScanNetData
(
root_path
=
data_path
,
split
=
'train'
)
val_dataset
=
ScanNetData
(
root_path
=
data_path
,
split
=
'val'
)
val_dataset
=
ScanNetData
(
root_path
=
data_path
,
split
=
'val'
)
infos_train
=
train_dataset
.
get_infos
(
has_label
=
True
)
infos_train
=
train_dataset
.
get_infos
(
num_workers
=
workers
,
has_label
=
True
)
mmcv
.
dump
(
infos_train
,
train_filename
,
'pkl'
)
mmcv
.
dump
(
infos_train
,
train_filename
,
'pkl'
)
print
(
f
'
{
pkl_prefix
}
info train file is saved to
{
train_filename
}
'
)
print
(
f
'
{
pkl_prefix
}
info train file is saved to
{
train_filename
}
'
)
infos_val
=
val_dataset
.
get_infos
(
has_label
=
True
)
infos_val
=
val_dataset
.
get_infos
(
num_workers
=
workers
,
has_label
=
True
)
mmcv
.
dump
(
infos_val
,
val_filename
,
'pkl'
)
mmcv
.
dump
(
infos_val
,
val_filename
,
'pkl'
)
print
(
f
'
{
pkl_prefix
}
info val file is saved to
{
val_filename
}
'
)
print
(
f
'
{
pkl_prefix
}
info val file is saved to
{
val_filename
}
'
)
tools/data_converter/scannet_data_utils.py
View file @
49a2bc85
import
concurrent.futures
as
futures
import
concurrent.futures
as
futures
import
os
import
os
.path
as
osp
import
mmcv
import
mmcv
import
numpy
as
np
import
numpy
as
np
...
@@ -18,7 +18,7 @@ class ScanNetData(object):
...
@@ -18,7 +18,7 @@ class ScanNetData(object):
def
__init__
(
self
,
root_path
,
split
=
'train'
):
def
__init__
(
self
,
root_path
,
split
=
'train'
):
self
.
root_dir
=
root_path
self
.
root_dir
=
root_path
self
.
split
=
split
self
.
split
=
split
self
.
split_dir
=
os
.
path
.
join
(
root_path
)
self
.
split_dir
=
os
p
.
join
(
root_path
)
self
.
classes
=
[
self
.
classes
=
[
'cabinet'
,
'bed'
,
'chair'
,
'sofa'
,
'table'
,
'door'
,
'window'
,
'cabinet'
,
'bed'
,
'chair'
,
'sofa'
,
'table'
,
'door'
,
'window'
,
'bookshelf'
,
'picture'
,
'counter'
,
'desk'
,
'curtain'
,
'bookshelf'
,
'picture'
,
'counter'
,
'desk'
,
'curtain'
,
...
@@ -34,8 +34,8 @@ class ScanNetData(object):
...
@@ -34,8 +34,8 @@ class ScanNetData(object):
for
i
,
nyu40id
in
enumerate
(
list
(
self
.
cat_ids
))
for
i
,
nyu40id
in
enumerate
(
list
(
self
.
cat_ids
))
}
}
assert
split
in
[
'train'
,
'val'
,
'test'
]
assert
split
in
[
'train'
,
'val'
,
'test'
]
split_file
=
os
.
path
.
join
(
self
.
root_dir
,
'meta_data'
,
split_file
=
os
p
.
join
(
self
.
root_dir
,
'meta_data'
,
f
'scannetv2_
{
split
}
.txt'
)
f
'scannetv2_
{
split
}
.txt'
)
mmcv
.
check_file_exist
(
split_file
)
mmcv
.
check_file_exist
(
split_file
)
self
.
sample_id_list
=
mmcv
.
list_from_file
(
split_file
)
self
.
sample_id_list
=
mmcv
.
list_from_file
(
split_file
)
...
@@ -43,9 +43,9 @@ class ScanNetData(object):
...
@@ -43,9 +43,9 @@ class ScanNetData(object):
return
len
(
self
.
sample_id_list
)
return
len
(
self
.
sample_id_list
)
def
get_box_label
(
self
,
idx
):
def
get_box_label
(
self
,
idx
):
box_file
=
os
.
path
.
join
(
self
.
root_dir
,
'scannet_train_instance_data'
,
box_file
=
os
p
.
join
(
self
.
root_dir
,
'scannet_train_instance_data'
,
f
'
{
idx
}
_bbox.npy'
)
f
'
{
idx
}
_bbox.npy'
)
assert
os
.
path
.
exist
s
(
box_file
)
mmcv
.
check_file_
exist
(
box_file
)
return
np
.
load
(
box_file
)
return
np
.
load
(
box_file
)
def
get_infos
(
self
,
num_workers
=
4
,
has_label
=
True
,
sample_id_list
=
None
):
def
get_infos
(
self
,
num_workers
=
4
,
has_label
=
True
,
sample_id_list
=
None
):
...
@@ -68,6 +68,36 @@ class ScanNetData(object):
...
@@ -68,6 +68,36 @@ class ScanNetData(object):
info
=
dict
()
info
=
dict
()
pc_info
=
{
'num_features'
:
6
,
'lidar_idx'
:
sample_idx
}
pc_info
=
{
'num_features'
:
6
,
'lidar_idx'
:
sample_idx
}
info
[
'point_cloud'
]
=
pc_info
info
[
'point_cloud'
]
=
pc_info
pts_filename
=
osp
.
join
(
self
.
root_dir
,
'scannet_train_instance_data'
,
f
'
{
sample_idx
}
_vert.npy'
)
pts_instance_mask_path
=
osp
.
join
(
self
.
root_dir
,
'scannet_train_instance_data'
,
f
'
{
sample_idx
}
_ins_label.npy'
)
pts_semantic_mask_path
=
osp
.
join
(
self
.
root_dir
,
'scannet_train_instance_data'
,
f
'
{
sample_idx
}
_sem_label.npy'
)
points
=
np
.
load
(
pts_filename
)
pts_instance_mask
=
np
.
load
(
pts_instance_mask_path
).
astype
(
np
.
long
)
pts_semantic_mask
=
np
.
load
(
pts_semantic_mask_path
).
astype
(
np
.
long
)
mmcv
.
mkdir_or_exist
(
osp
.
join
(
self
.
root_dir
,
'points'
))
mmcv
.
mkdir_or_exist
(
osp
.
join
(
self
.
root_dir
,
'instance_mask'
))
mmcv
.
mkdir_or_exist
(
osp
.
join
(
self
.
root_dir
,
'semantic_mask'
))
points
.
tofile
(
osp
.
join
(
self
.
root_dir
,
'points'
,
f
'
{
sample_idx
}
.bin'
))
pts_instance_mask
.
tofile
(
osp
.
join
(
self
.
root_dir
,
'instance_mask'
,
f
'
{
sample_idx
}
.bin'
))
pts_semantic_mask
.
tofile
(
osp
.
join
(
self
.
root_dir
,
'semantic_mask'
,
f
'
{
sample_idx
}
.bin'
))
info
[
'pts_path'
]
=
osp
.
join
(
'points'
,
f
'
{
sample_idx
}
.bin'
)
info
[
'pts_instance_mask_path'
]
=
osp
.
join
(
'instance_mask'
,
f
'
{
sample_idx
}
.bin'
)
info
[
'pts_semantic_mask_path'
]
=
osp
.
join
(
'semantic_mask'
,
f
'
{
sample_idx
}
.bin'
)
if
has_label
:
if
has_label
:
annotations
=
{}
annotations
=
{}
...
...
tools/data_converter/sunrgbd_data_utils.py
View file @
49a2bc85
import
concurrent.futures
as
futures
import
concurrent.futures
as
futures
import
os
import
os
.path
as
osp
import
mmcv
import
mmcv
import
numpy
as
np
import
numpy
as
np
...
@@ -70,7 +70,7 @@ class SUNRGBDData(object):
...
@@ -70,7 +70,7 @@ class SUNRGBDData(object):
def
__init__
(
self
,
root_path
,
split
=
'train'
,
use_v1
=
False
):
def
__init__
(
self
,
root_path
,
split
=
'train'
,
use_v1
=
False
):
self
.
root_dir
=
root_path
self
.
root_dir
=
root_path
self
.
split
=
split
self
.
split
=
split
self
.
split_dir
=
os
.
path
.
join
(
root_path
)
self
.
split_dir
=
os
p
.
join
(
root_path
,
'sunrgbd_trainval'
)
self
.
classes
=
[
self
.
classes
=
[
'bed'
,
'table'
,
'sofa'
,
'chair'
,
'toilet'
,
'desk'
,
'dresser'
,
'bed'
,
'table'
,
'sofa'
,
'chair'
,
'toilet'
,
'desk'
,
'dresser'
,
'night_stand'
,
'bookshelf'
,
'bathtub'
'night_stand'
,
'bookshelf'
,
'bathtub'
...
@@ -81,22 +81,22 @@ class SUNRGBDData(object):
...
@@ -81,22 +81,22 @@ class SUNRGBDData(object):
for
label
in
range
(
len
(
self
.
classes
))
for
label
in
range
(
len
(
self
.
classes
))
}
}
assert
split
in
[
'train'
,
'val'
,
'test'
]
assert
split
in
[
'train'
,
'val'
,
'test'
]
split_file
=
os
.
path
.
join
(
self
.
roo
t_dir
,
f
'
{
split
}
_data_idx.txt'
)
split_file
=
os
p
.
join
(
self
.
spli
t_dir
,
f
'
{
split
}
_data_idx.txt'
)
mmcv
.
check_file_exist
(
split_file
)
mmcv
.
check_file_exist
(
split_file
)
self
.
sample_id_list
=
map
(
int
,
mmcv
.
list_from_file
(
split_file
))
self
.
sample_id_list
=
map
(
int
,
mmcv
.
list_from_file
(
split_file
))
self
.
image_dir
=
os
.
path
.
join
(
self
.
split_dir
,
'image'
)
self
.
image_dir
=
os
p
.
join
(
self
.
split_dir
,
'image'
)
self
.
calib_dir
=
os
.
path
.
join
(
self
.
split_dir
,
'calib'
)
self
.
calib_dir
=
os
p
.
join
(
self
.
split_dir
,
'calib'
)
self
.
depth_dir
=
os
.
path
.
join
(
self
.
split_dir
,
'depth'
)
self
.
depth_dir
=
os
p
.
join
(
self
.
split_dir
,
'depth'
)
if
use_v1
:
if
use_v1
:
self
.
label_dir
=
os
.
path
.
join
(
self
.
split_dir
,
'label_v1'
)
self
.
label_dir
=
os
p
.
join
(
self
.
split_dir
,
'label_v1'
)
else
:
else
:
self
.
label_dir
=
os
.
path
.
join
(
self
.
split_dir
,
'label'
)
self
.
label_dir
=
os
p
.
join
(
self
.
split_dir
,
'label'
)
def
__len__
(
self
):
def
__len__
(
self
):
return
len
(
self
.
sample_id_list
)
return
len
(
self
.
sample_id_list
)
def
get_image
(
self
,
idx
):
def
get_image
(
self
,
idx
):
img_filename
=
os
.
path
.
join
(
self
.
image_dir
,
f
'
{
idx
:
06
d
}
.jpg'
)
img_filename
=
os
p
.
join
(
self
.
image_dir
,
f
'
{
idx
:
06
d
}
.jpg'
)
return
mmcv
.
imread
(
img_filename
)
return
mmcv
.
imread
(
img_filename
)
def
get_image_shape
(
self
,
idx
):
def
get_image_shape
(
self
,
idx
):
...
@@ -104,12 +104,12 @@ class SUNRGBDData(object):
...
@@ -104,12 +104,12 @@ class SUNRGBDData(object):
return
np
.
array
(
image
.
shape
[:
2
],
dtype
=
np
.
int32
)
return
np
.
array
(
image
.
shape
[:
2
],
dtype
=
np
.
int32
)
def
get_depth
(
self
,
idx
):
def
get_depth
(
self
,
idx
):
depth_filename
=
os
.
path
.
join
(
self
.
depth_dir
,
f
'
{
idx
:
06
d
}
.mat'
)
depth_filename
=
os
p
.
join
(
self
.
depth_dir
,
f
'
{
idx
:
06
d
}
.mat'
)
depth
=
sio
.
loadmat
(
depth_filename
)[
'instance'
]
depth
=
sio
.
loadmat
(
depth_filename
)[
'instance'
]
return
depth
return
depth
def
get_calibration
(
self
,
idx
):
def
get_calibration
(
self
,
idx
):
calib_filepath
=
os
.
path
.
join
(
self
.
calib_dir
,
f
'
{
idx
:
06
d
}
.txt'
)
calib_filepath
=
os
p
.
join
(
self
.
calib_dir
,
f
'
{
idx
:
06
d
}
.txt'
)
lines
=
[
line
.
rstrip
()
for
line
in
open
(
calib_filepath
)]
lines
=
[
line
.
rstrip
()
for
line
in
open
(
calib_filepath
)]
Rt
=
np
.
array
([
float
(
x
)
for
x
in
lines
[
0
].
split
(
' '
)])
Rt
=
np
.
array
([
float
(
x
)
for
x
in
lines
[
0
].
split
(
' '
)])
Rt
=
np
.
reshape
(
Rt
,
(
3
,
3
),
order
=
'F'
)
Rt
=
np
.
reshape
(
Rt
,
(
3
,
3
),
order
=
'F'
)
...
@@ -117,7 +117,7 @@ class SUNRGBDData(object):
...
@@ -117,7 +117,7 @@ class SUNRGBDData(object):
return
K
,
Rt
return
K
,
Rt
def
get_label_objects
(
self
,
idx
):
def
get_label_objects
(
self
,
idx
):
label_filename
=
os
.
path
.
join
(
self
.
label_dir
,
f
'
{
idx
:
06
d
}
.txt'
)
label_filename
=
os
p
.
join
(
self
.
label_dir
,
f
'
{
idx
:
06
d
}
.txt'
)
lines
=
[
line
.
rstrip
()
for
line
in
open
(
label_filename
)]
lines
=
[
line
.
rstrip
()
for
line
in
open
(
label_filename
)]
objects
=
[
SUNRGBDInstance
(
line
)
for
line
in
lines
]
objects
=
[
SUNRGBDInstance
(
line
)
for
line
in
lines
]
return
objects
return
objects
...
@@ -146,15 +146,18 @@ class SUNRGBDData(object):
...
@@ -146,15 +146,18 @@ class SUNRGBDData(object):
pc_upright_depth
=
self
.
get_depth
(
sample_idx
)
pc_upright_depth
=
self
.
get_depth
(
sample_idx
)
pc_upright_depth_subsampled
=
random_sampling
(
pc_upright_depth_subsampled
=
random_sampling
(
pc_upright_depth
,
SAMPLE_NUM
)
pc_upright_depth
,
SAMPLE_NUM
)
np
.
save
(
os
.
path
.
join
(
self
.
root_dir
,
'lidar'
,
f
'
{
sample_idx
:
06
d
}
.npy'
),
pc_upright_depth_subsampled
)
info
=
dict
()
info
=
dict
()
pc_info
=
{
'num_features'
:
6
,
'lidar_idx'
:
sample_idx
}
pc_info
=
{
'num_features'
:
6
,
'lidar_idx'
:
sample_idx
}
info
[
'point_cloud'
]
=
pc_info
info
[
'point_cloud'
]
=
pc_info
img_name
=
os
.
path
.
join
(
self
.
image_dir
,
f
'
{
sample_idx
:
06
d
}
'
)
img_path
=
os
.
path
.
join
(
self
.
image_dir
,
img_name
)
mmcv
.
mkdir_or_exist
(
osp
.
join
(
self
.
root_dir
,
'points'
))
pc_upright_depth_subsampled
.
tofile
(
osp
.
join
(
self
.
root_dir
,
'points'
,
f
'
{
sample_idx
:
06
d
}
.bin'
))
info
[
'pts_path'
]
=
osp
.
join
(
'points'
,
f
'
{
sample_idx
:
06
d
}
.bin'
)
img_name
=
osp
.
join
(
self
.
image_dir
,
f
'
{
sample_idx
:
06
d
}
'
)
img_path
=
osp
.
join
(
self
.
image_dir
,
img_name
)
image_info
=
{
image_info
=
{
'image_idx'
:
sample_idx
,
'image_idx'
:
sample_idx
,
'image_shape'
:
self
.
get_image_shape
(
sample_idx
),
'image_shape'
:
self
.
get_image_shape
(
sample_idx
),
...
@@ -211,8 +214,6 @@ class SUNRGBDData(object):
...
@@ -211,8 +214,6 @@ class SUNRGBDData(object):
info
[
'annos'
]
=
annotations
info
[
'annos'
]
=
annotations
return
info
return
info
lidar_save_dir
=
os
.
path
.
join
(
self
.
root_dir
,
'lidar'
)
mmcv
.
mkdir_or_exist
(
lidar_save_dir
)
sample_id_list
=
sample_id_list
if
\
sample_id_list
=
sample_id_list
if
\
sample_id_list
is
not
None
else
self
.
sample_id_list
sample_id_list
is
not
None
else
self
.
sample_id_list
with
futures
.
ThreadPoolExecutor
(
num_workers
)
as
executor
:
with
futures
.
ThreadPoolExecutor
(
num_workers
)
as
executor
:
...
...
Prev
1
2
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment