Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
mmdetection3d
Commits
8751c079
Unverified
Commit
8751c079
authored
Sep 13, 2022
by
ChaimZhu
Committed by
GitHub
Sep 13, 2022
Browse files
update browse_dataset (#1817)
parent
f9e5e98f
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
34 additions
and
35 deletions
+34
-35
tools/misc/browse_dataset.py
tools/misc/browse_dataset.py
+34
-35
No files found.
tools/misc/browse_dataset.py
View file @
8751c079
...
@@ -2,28 +2,27 @@
...
@@ -2,28 +2,27 @@
import
argparse
import
argparse
from
os
import
path
as
osp
from
os
import
path
as
osp
import
mmengine
from
mmengine.config
import
Config
,
DictAction
from
mmengine
import
Config
,
DictAction
,
mkdir_or_exist
from
mmengine
.utils
import
ProgressBar
,
mkdir_or_exist
from
mmdet3d.datasets
import
build_dataset
from
mmdet3d.registry
import
DATASETS
,
VISUALIZERS
from
mmdet3d.registry
import
VISUALIZERS
from
mmdet3d.utils
import
register_all_modules
,
replace_ceph_backend
from
mmdet3d.utils
import
register_all_modules
def
parse_args
():
def
parse_args
():
parser
=
argparse
.
ArgumentParser
(
description
=
'Browse a dataset'
)
parser
=
argparse
.
ArgumentParser
(
description
=
'Browse a dataset'
)
parser
.
add_argument
(
'config'
,
help
=
'train config file path'
)
parser
.
add_argument
(
'config'
,
help
=
'train config file path'
)
parser
.
add_argument
(
'--skip-type'
,
type
=
str
,
nargs
=
'+'
,
default
=
[
'Normalize'
],
help
=
'skip some useless pipeline'
)
parser
.
add_argument
(
parser
.
add_argument
(
'--output-dir'
,
'--output-dir'
,
default
=
None
,
default
=
None
,
type
=
str
,
type
=
str
,
help
=
'If there is no display interface, you can save it'
)
help
=
'If there is no display interface, you can save it'
)
parser
.
add_argument
(
'--not-show'
,
default
=
False
,
action
=
'store_true'
)
parser
.
add_argument
(
'--show-interval'
,
type
=
float
,
default
=
2
,
help
=
'the interval of show (s)'
)
parser
.
add_argument
(
parser
.
add_argument
(
'--task'
,
'--task'
,
type
=
str
,
type
=
str
,
...
@@ -34,10 +33,7 @@ def parse_args():
...
@@ -34,10 +33,7 @@ def parse_args():
action
=
'store_true'
,
action
=
'store_true'
,
help
=
'Whether to visualize augmented datasets or original dataset.'
)
help
=
'Whether to visualize augmented datasets or original dataset.'
)
parser
.
add_argument
(
parser
.
add_argument
(
'--online'
,
'--ceph'
,
action
=
'store_true'
,
help
=
'Use ceph as data storage backend'
)
action
=
'store_true'
,
help
=
'Whether to perform online visualization. Note that you often '
'need a monitor to do so.'
)
parser
.
add_argument
(
parser
.
add_argument
(
'--cfg-options'
,
'--cfg-options'
,
nargs
=
'+'
,
nargs
=
'+'
,
...
@@ -52,20 +48,22 @@ def parse_args():
...
@@ -52,20 +48,22 @@ def parse_args():
return
args
return
args
def
build_data_cfg
(
config_path
,
skip_type
,
aug
,
cfg_options
):
def
build_data_cfg
(
config_path
,
aug
,
cfg_options
):
"""Build data config for loading visualization data."""
"""Build data config for loading visualization data."""
cfg
=
Config
.
fromfile
(
config_path
)
cfg
=
Config
.
fromfile
(
config_path
)
if
cfg_options
is
not
None
:
if
cfg_options
is
not
None
:
cfg
.
merge_from_dict
(
cfg_options
)
cfg
.
merge_from_dict
(
cfg_options
)
# extract inner dataset of `RepeatDataset` as `cfg.data.train`
# so we don't need to worry about it later
# extract inner dataset of `RepeatDataset` as
if
cfg
.
data
.
train
[
'type'
]
==
'RepeatDataset'
:
# `cfg.train_dataloader.dataset` so we don't
cfg
.
data
.
train
=
cfg
.
data
.
train
.
dataset
# need to worry about it later
if
cfg
.
train_dataloader
.
dataset
[
'type'
]
==
'RepeatDataset'
:
cfg
.
train_dataloader
.
dataset
=
cfg
.
train_dataloader
.
dataset
.
dataset
# use only first dataset for `ConcatDataset`
# use only first dataset for `ConcatDataset`
if
cfg
.
data
.
train
[
'type'
]
==
'ConcatDataset'
:
if
cfg
.
train
_dataloader
.
dataset
[
'type'
]
==
'ConcatDataset'
:
cfg
.
data
.
train
=
cfg
.
data
.
train
.
datasets
[
0
]
cfg
.
train_dataloader
.
dataset
=
cfg
.
train_dataloader
.
dataset
.
datasets
[
0
]
train_data_cfg
=
cfg
.
data
.
train
train_data_cfg
=
cfg
.
train
_dataloader
.
dataset
if
aug
:
if
aug
:
show_pipeline
=
cfg
.
train_pipeline
show_pipeline
=
cfg
.
train_pipeline
...
@@ -74,16 +72,14 @@ def build_data_cfg(config_path, skip_type, aug, cfg_options):
...
@@ -74,16 +72,14 @@ def build_data_cfg(config_path, skip_type, aug, cfg_options):
for
i
in
range
(
len
(
cfg
.
train_pipeline
)):
for
i
in
range
(
len
(
cfg
.
train_pipeline
)):
if
cfg
.
train_pipeline
[
i
][
'type'
]
==
'LoadAnnotations3D'
:
if
cfg
.
train_pipeline
[
i
][
'type'
]
==
'LoadAnnotations3D'
:
show_pipeline
.
insert
(
i
,
cfg
.
train_pipeline
[
i
])
show_pipeline
.
insert
(
i
,
cfg
.
train_pipeline
[
i
])
# Collect
points
as well as labels
# Collect
data
as well as labels
if
cfg
.
train_pipeline
[
i
][
'type'
]
==
'Pack3DInputs'
:
if
cfg
.
train_pipeline
[
i
][
'type'
]
==
'Pack3D
Det
Inputs'
:
if
show_pipeline
[
-
1
][
'type'
]
==
'Pack3DInputs'
:
if
show_pipeline
[
-
1
][
'type'
]
==
'Pack3D
Det
Inputs'
:
show_pipeline
[
-
1
]
=
cfg
.
train_pipeline
[
i
]
show_pipeline
[
-
1
]
=
cfg
.
train_pipeline
[
i
]
else
:
else
:
show_pipeline
.
append
(
cfg
.
train_pipeline
[
i
])
show_pipeline
.
append
(
cfg
.
train_pipeline
[
i
])
train_data_cfg
[
'pipeline'
]
=
[
train_data_cfg
[
'pipeline'
]
=
show_pipeline
x
for
x
in
show_pipeline
if
x
[
'type'
]
not
in
skip_type
]
return
cfg
return
cfg
...
@@ -94,18 +90,21 @@ def main():
...
@@ -94,18 +90,21 @@ def main():
if
args
.
output_dir
is
not
None
:
if
args
.
output_dir
is
not
None
:
mkdir_or_exist
(
args
.
output_dir
)
mkdir_or_exist
(
args
.
output_dir
)
cfg
=
build_data_cfg
(
args
.
config
,
args
.
skip_type
,
args
.
aug
,
cfg
=
build_data_cfg
(
args
.
config
,
args
.
aug
,
args
.
cfg_options
)
args
.
cfg_options
)
# TODO: We will unify the ceph support approach with other OpenMMLab repos
if
args
.
ceph
:
cfg
=
replace_ceph_backend
(
cfg
)
# register all modules in mmdet3d into the registries
# register all modules in mmdet3d into the registries
register_all_modules
()
register_all_modules
()
try
:
try
:
dataset
=
build_dataset
(
dataset
=
DATASETS
.
build
(
cfg
.
train_dataloader
.
dataset
,
cfg
.
train_dataloader
.
dataset
,
default_args
=
dict
(
filter_empty_gt
=
False
))
default_args
=
dict
(
filter_empty_gt
=
False
))
except
TypeError
:
# seg dataset doesn't have `filter_empty_gt` key
except
TypeError
:
# seg dataset doesn't have `filter_empty_gt` key
dataset
=
build_dataset
(
cfg
.
train_dataloader
.
dataset
)
dataset
=
DATASETS
.
build
(
cfg
.
train_dataloader
.
dataset
)
# configure visualization mode
# configure visualization mode
vis_task
=
args
.
task
# 'det', 'seg', 'multi_modality-det', 'mono-det'
vis_task
=
args
.
task
# 'det', 'seg', 'multi_modality-det', 'mono-det'
...
@@ -113,7 +112,7 @@ def main():
...
@@ -113,7 +112,7 @@ def main():
visualizer
=
VISUALIZERS
.
build
(
cfg
.
visualizer
)
visualizer
=
VISUALIZERS
.
build
(
cfg
.
visualizer
)
visualizer
.
dataset_meta
=
dataset
.
metainfo
visualizer
.
dataset_meta
=
dataset
.
metainfo
progress_bar
=
mmengine
.
ProgressBar
(
len
(
dataset
))
progress_bar
=
ProgressBar
(
len
(
dataset
))
for
item
in
dataset
:
for
item
in
dataset
:
# the 3D Boxes in input could be in any of three coordinates
# the 3D Boxes in input could be in any of three coordinates
...
@@ -126,7 +125,7 @@ def main():
...
@@ -126,7 +125,7 @@ def main():
visualizer
.
add_datasample
(
visualizer
.
add_datasample
(
'3d visualzier'
,
'3d visualzier'
,
data_input
,
data_input
,
data_sample
,
data_sample
=
data_sample
,
show
=
not
args
.
not_show
,
show
=
not
args
.
not_show
,
wait_time
=
args
.
show_interval
,
wait_time
=
args
.
show_interval
,
out_file
=
out_file
,
out_file
=
out_file
,
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment