Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
mmdeploy
Commits
b95be0c5
Commit
b95be0c5
authored
Jun 25, 2025
by
limm
Browse files
add tests/test_backend module
parent
7ae93d70
Pipeline
#2816
canceled with stages
Changes
2
Pipelines
1
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
219 additions
and
0 deletions
+219
-0
tests/data/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151612397179.pcd.bin
...-08-01-15-16-36-0400__LIDAR_TOP__1533151612397179.pcd.bin
+0
-0
tests/test_backend/test_wrapper.py
tests/test_backend/test_wrapper.py
+219
-0
No files found.
tests/data/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151612397179.pcd.bin
0 → 100644
View file @
b95be0c5
File added
tests/test_backend/test_wrapper.py
0 → 100644
View file @
b95be0c5
# Copyright (c) OpenMMLab. All rights reserved.
import
os.path
as
osp
import
subprocess
import
tempfile
import
mmengine
import
pytest
import
torch
import
torch.nn
as
nn
from
mmdeploy.utils.constants
import
Backend
from
mmdeploy.utils.test
import
check_backend
onnx_file
=
tempfile
.
NamedTemporaryFile
(
suffix
=
'.onnx'
).
name
ts_file
=
tempfile
.
NamedTemporaryFile
(
suffix
=
'.pt'
).
name
test_img
=
torch
.
rand
(
1
,
3
,
8
,
8
)
output_names
=
[
'output'
]
input_names
=
[
'input'
]
target_platform
=
'rk3588'
# rknn pre-compiled model need device
@
pytest
.
mark
.
skip
(
reason
=
'This a not test class but a utility class.'
)
class
TestModel
(
nn
.
Module
):
def
__init__
(
self
):
super
().
__init__
()
def
forward
(
self
,
x
):
return
x
+
test_img
model
=
TestModel
().
eval
()
@
pytest
.
fixture
(
autouse
=
True
,
scope
=
'module'
)
def
generate_onnx_file
():
with
torch
.
no_grad
():
torch
.
onnx
.
export
(
model
,
test_img
,
onnx_file
,
output_names
=
output_names
,
input_names
=
input_names
,
keep_initializers_as_inputs
=
True
,
do_constant_folding
=
True
,
verbose
=
False
,
opset_version
=
11
,
dynamic_axes
=
None
)
@
pytest
.
fixture
(
autouse
=
True
,
scope
=
'module'
)
def
generate_torchscript_file
():
from
mmengine
import
Config
backend
=
Backend
.
TORCHSCRIPT
.
value
deploy_cfg
=
Config
({
'backend_config'
:
dict
(
type
=
backend
)})
from
mmdeploy.apis.torch_jit
import
trace
context_info
=
dict
(
deploy_cfg
=
deploy_cfg
)
output_prefix
=
osp
.
splitext
(
ts_file
)[
0
]
example_inputs
=
torch
.
rand
(
1
,
3
,
8
,
8
)
trace
(
model
,
example_inputs
,
output_path_prefix
=
output_prefix
,
backend
=
backend
,
context_info
=
context_info
)
def
ir2backend
(
backend
,
onnx_file
,
ts_file
):
if
backend
==
Backend
.
TENSORRT
:
from
mmdeploy.backend.tensorrt
import
from_onnx
backend_file
=
tempfile
.
NamedTemporaryFile
(
suffix
=
'.engine'
).
name
from_onnx
(
onnx_file
,
osp
.
splitext
(
backend_file
)[
0
],
{
'input'
:
{
'min_shape'
:
[
1
,
3
,
8
,
8
],
'opt_shape'
:
[
1
,
3
,
8
,
8
],
'max_shape'
:
[
1
,
3
,
8
,
8
]
}
})
return
backend_file
elif
backend
==
Backend
.
ONNXRUNTIME
:
return
onnx_file
elif
backend
==
Backend
.
PPLNN
:
from
mmdeploy.apis.pplnn
import
from_onnx
output_file_prefix
=
tempfile
.
NamedTemporaryFile
().
name
from_onnx
(
onnx_file
,
output_file_prefix
=
output_file_prefix
)
algo_file
=
output_file_prefix
+
'.json'
output_file
=
output_file_prefix
+
'.onnx'
return
output_file
,
algo_file
elif
backend
==
Backend
.
NCNN
:
from
mmdeploy.backend.ncnn.init_plugins
import
get_onnx2ncnn_path
onnx2ncnn_path
=
get_onnx2ncnn_path
()
param_file
=
tempfile
.
NamedTemporaryFile
(
suffix
=
'.param'
).
name
bin_file
=
tempfile
.
NamedTemporaryFile
(
suffix
=
'.bin'
).
name
subprocess
.
call
([
onnx2ncnn_path
,
onnx_file
,
param_file
,
bin_file
])
return
param_file
,
bin_file
elif
backend
==
Backend
.
OPENVINO
:
from
mmdeploy.apis.openvino
import
from_onnx
,
get_output_model_file
backend_dir
=
tempfile
.
TemporaryDirectory
().
name
backend_file
=
get_output_model_file
(
onnx_file
,
backend_dir
)
input_info
=
{
'input'
:
test_img
.
shape
}
output_names
=
[
'output'
]
work_dir
=
backend_dir
from_onnx
(
onnx_file
,
work_dir
,
input_info
,
output_names
)
return
backend_file
elif
backend
==
Backend
.
RKNN
:
from
mmdeploy.apis.rknn
import
onnx2rknn
rknn_file
=
onnx_file
.
replace
(
'.onnx'
,
'.rknn'
)
deploy_cfg
=
mmengine
.
Config
(
dict
(
backend_config
=
dict
(
type
=
'rknn'
,
common_config
=
dict
(
target_platform
=
target_platform
),
quantization_config
=
dict
(
do_quantization
=
False
,
dataset
=
None
),
input_size_list
=
[[
3
,
8
,
8
]])))
onnx2rknn
(
onnx_file
,
rknn_file
,
deploy_cfg
)
return
rknn_file
elif
backend
==
Backend
.
ASCEND
:
from
mmdeploy.apis.ascend
import
from_onnx
backend_dir
=
tempfile
.
TemporaryDirectory
().
name
work_dir
=
backend_dir
file_name
=
osp
.
splitext
(
osp
.
split
(
onnx_file
)[
1
])[
0
]
backend_file
=
osp
.
join
(
work_dir
,
file_name
+
'.om'
)
model_inputs
=
mmengine
.
Config
(
dict
(
input_shapes
=
dict
(
input
=
test_img
.
shape
)))
from_onnx
(
onnx_file
,
work_dir
,
model_inputs
)
return
backend_file
elif
backend
==
Backend
.
TVM
:
from
mmdeploy.backend.tvm
import
from_onnx
,
get_library_ext
ext
=
get_library_ext
()
lib_file
=
tempfile
.
NamedTemporaryFile
(
suffix
=
ext
).
name
shape
=
{
'input'
:
test_img
.
shape
}
dtype
=
{
'input'
:
'float32'
}
target
=
'llvm'
tuner_dict
=
dict
(
type
=
'DefaultTuner'
,
target
=
target
)
from_onnx
(
onnx_file
,
lib_file
,
shape
=
shape
,
dtype
=
dtype
,
tuner
=
tuner_dict
)
assert
osp
.
exists
(
lib_file
)
return
lib_file
elif
backend
==
Backend
.
TORCHSCRIPT
:
return
ts_file
elif
backend
==
Backend
.
COREML
:
output_names
=
[
'output'
]
from
mmdeploy.backend.coreml.torchscript2coreml
import
(
from_torchscript
,
get_model_suffix
)
backend_dir
=
tempfile
.
TemporaryDirectory
().
name
work_dir
=
backend_dir
torchscript_name
=
osp
.
splitext
(
osp
.
split
(
ts_file
)[
1
])[
0
]
output_file_prefix
=
osp
.
join
(
work_dir
,
torchscript_name
)
convert_to
=
'mlprogram'
from_torchscript
(
ts_file
,
output_file_prefix
,
input_names
=
input_names
,
output_names
=
output_names
,
input_shapes
=
dict
(
input
=
dict
(
min_shape
=
[
1
,
3
,
8
,
8
],
default_shape
=
[
1
,
3
,
8
,
8
],
max_shape
=
[
1
,
3
,
8
,
8
])),
convert_to
=
convert_to
)
suffix
=
get_model_suffix
(
convert_to
)
return
output_file_prefix
+
suffix
else
:
raise
NotImplementedError
(
f
'Convert for
{
backend
.
value
}
has not been implemented.'
)
def
create_wrapper
(
backend
,
model_files
):
from
mmdeploy.backend.base
import
get_backend_manager
backend_mgr
=
get_backend_manager
(
backend
.
value
)
deploy_cfg
=
None
if
isinstance
(
model_files
,
str
):
model_files
=
[
model_files
]
elif
backend
==
Backend
.
RKNN
:
deploy_cfg
=
dict
(
backend_config
=
dict
(
common_config
=
dict
(
target_platform
=
target_platform
)))
return
backend_mgr
.
build_wrapper
(
model_files
,
input_names
=
input_names
,
output_names
=
output_names
,
deploy_cfg
=
deploy_cfg
)
def
run_wrapper
(
backend
,
wrapper
,
input
):
if
backend
==
Backend
.
TENSORRT
:
input
=
input
.
cuda
()
results
=
wrapper
({
'input'
:
input
})
if
backend
!=
Backend
.
RKNN
:
results
=
results
[
'output'
]
results
=
results
.
detach
().
cpu
()
return
results
ALL_BACKEND
=
list
(
Backend
)
ALL_BACKEND
.
remove
(
Backend
.
DEFAULT
)
ALL_BACKEND
.
remove
(
Backend
.
PYTORCH
)
ALL_BACKEND
.
remove
(
Backend
.
SDK
)
@
pytest
.
mark
.
parametrize
(
'backend'
,
ALL_BACKEND
)
def
test_wrapper
(
backend
):
check_backend
(
backend
,
True
)
model_files
=
ir2backend
(
backend
,
onnx_file
,
ts_file
)
assert
model_files
is
not
None
wrapper
=
create_wrapper
(
backend
,
model_files
)
assert
wrapper
is
not
None
results
=
run_wrapper
(
backend
,
wrapper
,
test_img
)
assert
results
is
not
None
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment