Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
wangsen
paddle_dbnet
Commits
6361a38f
Commit
6361a38f
authored
Jun 03, 2021
by
littletomatodonkey
Browse files
fix export model for distillation model
parent
ab4db2ac
Changes
3
Show whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
62 additions
and
40 deletions
+62
-40
ppocr/losses/distillation_loss.py
ppocr/losses/distillation_loss.py
+1
-1
ppocr/modeling/architectures/distillation_model.py
ppocr/modeling/architectures/distillation_model.py
+7
-7
tools/export_model.py
tools/export_model.py
+54
-32
No files found.
ppocr/losses/distillation_loss.py
View file @
6361a38f
...
...
@@ -82,7 +82,7 @@ class DistillationDistanceLoss(DistanceLoss):
key
=
None
,
name
=
"loss_distance"
,
**
kargs
):
super
().
__init__
(
mode
=
mode
,
name
=
name
)
super
().
__init__
(
mode
=
mode
,
name
=
name
,
**
kargs
)
assert
isinstance
(
model_name_pairs
,
list
)
self
.
key
=
key
self
.
model_name_pairs
=
model_name_pairs
...
...
ppocr/modeling/architectures/distillation_model.py
View file @
6361a38f
...
...
@@ -34,8 +34,8 @@ class DistillationModel(nn.Layer):
config (dict): the super parameters for module.
"""
super
().
__init__
()
self
.
model_
dic
t
=
dict
()
index
=
0
self
.
model_
lis
t
=
[]
self
.
model_name_list
=
[]
for
key
in
config
[
"Models"
]:
model_config
=
config
[
"Models"
][
key
]
freeze_params
=
False
...
...
@@ -46,15 +46,15 @@ class DistillationModel(nn.Layer):
pretrained
=
model_config
.
pop
(
"pretrained"
)
model
=
BaseModel
(
model_config
)
if
pretrained
is
not
None
:
load_dygraph_pretrain
(
model
,
path
=
pretrained
[
index
]
)
load_dygraph_pretrain
(
model
,
path
=
pretrained
)
if
freeze_params
:
for
param
in
model
.
parameters
():
param
.
trainable
=
False
self
.
model_
dict
[
key
]
=
self
.
add_sublayer
(
key
,
model
)
index
+=
1
self
.
model_
list
.
append
(
self
.
add_sublayer
(
key
,
model
)
)
self
.
model_name_list
.
append
(
key
)
def
forward
(
self
,
x
):
result_dict
=
dict
()
for
key
in
self
.
model_
dict
:
result_dict
[
key
]
=
self
.
model_
dict
[
key
](
x
)
for
idx
,
model_name
in
enumerate
(
self
.
model_
name_list
)
:
result_dict
[
model_name
]
=
self
.
model_
list
[
idx
](
x
)
return
result_dict
tools/export_model.py
View file @
6361a38f
...
...
@@ -17,7 +17,7 @@ import sys
__dir__
=
os
.
path
.
dirname
(
os
.
path
.
abspath
(
__file__
))
sys
.
path
.
append
(
__dir__
)
sys
.
path
.
append
(
os
.
path
.
abspath
(
os
.
path
.
join
(
__dir__
,
'
..
'
)))
sys
.
path
.
append
(
os
.
path
.
abspath
(
os
.
path
.
join
(
__dir__
,
"
..
"
)))
import
argparse
...
...
@@ -31,32 +31,12 @@ from ppocr.utils.logging import get_logger
from
tools.program
import
load_config
,
merge_config
,
ArgsParser
def
main
():
FLAGS
=
ArgsParser
().
parse_args
()
config
=
load_config
(
FLAGS
.
config
)
merge_config
(
FLAGS
.
opt
)
logger
=
get_logger
()
# build post process
post_process_class
=
build_post_process
(
config
[
'PostProcess'
],
config
[
'Global'
])
# build model
# for rec algorithm
if
hasattr
(
post_process_class
,
'character'
):
char_num
=
len
(
getattr
(
post_process_class
,
'character'
))
config
[
'Architecture'
][
"Head"
][
'out_channels'
]
=
char_num
model
=
build_model
(
config
[
'Architecture'
])
init_model
(
config
,
model
,
logger
)
model
.
eval
()
save_path
=
'{}/inference'
.
format
(
config
[
'Global'
][
'save_inference_dir'
])
if
config
[
'Architecture'
][
'algorithm'
]
==
"SRN"
:
max_text_length
=
config
[
'Architecture'
][
'Head'
][
'max_text_length'
]
def
export_single_model
(
model
,
arch_config
,
save_path
,
logger
):
if
arch_config
[
"algorithm"
]
==
"SRN"
:
max_text_length
=
arch_config
[
"Head"
][
"max_text_length"
]
other_shape
=
[
paddle
.
static
.
InputSpec
(
shape
=
[
None
,
1
,
64
,
256
],
dtype
=
'
float32
'
),
[
shape
=
[
None
,
1
,
64
,
256
],
dtype
=
"
float32
"
),
[
paddle
.
static
.
InputSpec
(
shape
=
[
None
,
256
,
1
],
dtype
=
"int64"
),
paddle
.
static
.
InputSpec
(
...
...
@@ -71,24 +51,66 @@ def main():
model
=
to_static
(
model
,
input_spec
=
other_shape
)
else
:
infer_shape
=
[
3
,
-
1
,
-
1
]
if
config
[
'Architecture'
][
'
model_type
'
]
==
"rec"
:
if
arch_
config
[
"
model_type
"
]
==
"rec"
:
infer_shape
=
[
3
,
32
,
-
1
]
# for rec model, H must be 32
if
'
Transform
'
in
config
[
'Architecture'
]
and
config
[
'Architecture'
]
[
'
Transform
'
]
is
not
None
and
config
[
'Architecture'
][
'Transform'
][
'
name
'
]
==
'
TPS
'
:
if
"
Transform
"
in
arch_
config
and
arch_config
[
"
Transform
"
]
is
not
None
and
arch_
config
[
"Transform"
][
"
name
"
]
==
"
TPS
"
:
logger
.
info
(
'
When there is tps in the network, variable length input is not supported, and the input size needs to be the same as during training
'
"
When there is tps in the network, variable length input is not supported, and the input size needs to be the same as during training
"
)
infer_shape
[
-
1
]
=
100
model
=
to_static
(
model
,
input_spec
=
[
paddle
.
static
.
InputSpec
(
shape
=
[
None
]
+
infer_shape
,
dtype
=
'
float32
'
)
shape
=
[
None
]
+
infer_shape
,
dtype
=
"
float32
"
)
])
paddle
.
jit
.
save
(
model
,
save_path
)
logger
.
info
(
'inference model is saved to {}'
.
format
(
save_path
))
logger
.
info
(
"inference model is saved to {}"
.
format
(
save_path
))
return
def
main
():
FLAGS
=
ArgsParser
().
parse_args
()
config
=
load_config
(
FLAGS
.
config
)
merge_config
(
FLAGS
.
opt
)
logger
=
get_logger
()
# build post process
post_process_class
=
build_post_process
(
config
[
"PostProcess"
],
config
[
"Global"
])
# build model
# for rec algorithm
if
hasattr
(
post_process_class
,
"character"
):
char_num
=
len
(
getattr
(
post_process_class
,
"character"
))
if
config
[
"Architecture"
][
"algorithm"
]
in
[
"Distillation"
,
]:
# distillation model
for
key
in
config
[
"Architecture"
][
"Models"
]:
config
[
"Architecture"
][
"Models"
][
key
][
"Head"
][
"out_channels"
]
=
char_num
else
:
# base rec model
config
[
"Architecture"
][
"Head"
][
"out_channels"
]
=
char_num
model
=
build_model
(
config
[
"Architecture"
])
init_model
(
config
,
model
,
logger
)
model
.
eval
()
save_path
=
config
[
"Global"
][
"save_inference_dir"
]
arch_config
=
config
[
"Architecture"
]
if
arch_config
[
"algorithm"
]
in
[
"Distillation"
,
]:
# distillation model
archs
=
list
(
arch_config
[
"Models"
].
values
())
for
idx
,
name
in
enumerate
(
model
.
model_name_list
):
sub_model_save_path
=
os
.
path
.
join
(
save_path
,
name
,
"inference"
)
export_single_model
(
model
.
model_list
[
idx
],
archs
[
idx
],
sub_model_save_path
,
logger
)
else
:
save_path
=
os
.
path
.
join
(
save_path
,
"inference"
)
export_single_model
(
model
,
arch_config
,
save_path
,
logger
)
if
__name__
==
"__main__"
:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment