Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
wangsen
paddle_dbnet
Commits
2e9abcb9
Unverified
Commit
2e9abcb9
authored
Nov 24, 2021
by
zhoujun
Committed by
GitHub
Nov 24, 2021
Browse files
Merge pull request #4748 from WenmuZhou/tipc
pair param with key when load trained model params
parents
6609e3ca
7f1badf7
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
27 additions
and
10 deletions
+27
-10
ppocr/utils/save_load.py
ppocr/utils/save_load.py
+27
-10
No files found.
ppocr/utils/save_load.py
View file @
2e9abcb9
...
...
@@ -54,11 +54,28 @@ def load_model(config, model, optimizer=None):
pretrained_model
=
global_config
.
get
(
'pretrained_model'
)
best_model_dict
=
{}
if
checkpoints
:
if
checkpoints
.
endswith
(
'pdparams'
):
if
checkpoints
.
endswith
(
'
.
pdparams'
):
checkpoints
=
checkpoints
.
replace
(
'.pdparams'
,
''
)
assert
os
.
path
.
exists
(
checkpoints
+
".pdopt"
),
\
f
"The
{
checkpoints
}
.pdopt does not exists!"
load_pretrained_params
(
model
,
checkpoints
)
assert
os
.
path
.
exists
(
checkpoints
+
".pdparams"
),
\
"The {}.pdparams does not exists!"
.
format
(
checkpoints
)
# load params from trained model
params
=
paddle
.
load
(
checkpoints
+
'.pdparams'
)
state_dict
=
model
.
state_dict
()
new_state_dict
=
{}
for
key
,
value
in
state_dict
.
items
():
if
key
not
in
params
:
logger
.
warning
(
"{} not in loaded params {} !"
.
format
(
key
,
params
.
keys
()))
pre_value
=
params
[
key
]
if
list
(
value
.
shape
)
==
list
(
pre_value
.
shape
):
new_state_dict
[
key
]
=
pre_value
else
:
logger
.
warning
(
"The shape of model params {} {} not matched with loaded params shape {} !"
.
format
(
key
,
value
.
shape
,
pre_value
.
shape
))
model
.
set_state_dict
(
new_state_dict
)
optim_dict
=
paddle
.
load
(
checkpoints
+
'.pdopt'
)
if
optimizer
is
not
None
:
optimizer
.
set_state_dict
(
optim_dict
)
...
...
@@ -80,10 +97,10 @@ def load_model(config, model, optimizer=None):
def
load_pretrained_params
(
model
,
path
):
logger
=
get_logger
()
if
path
.
endswith
(
'pdparams'
):
if
path
.
endswith
(
'
.
pdparams'
):
path
=
path
.
replace
(
'.pdparams'
,
''
)
assert
os
.
path
.
exists
(
path
+
".pdparams"
),
\
f
"The
{
path
}
.pdparams does not exists!"
"The {}.pdparams does not exists!"
.
format
(
path
)
params
=
paddle
.
load
(
path
+
'.pdparams'
)
state_dict
=
model
.
state_dict
()
...
...
@@ -92,11 +109,11 @@ def load_pretrained_params(model, path):
if
list
(
state_dict
[
k1
].
shape
)
==
list
(
params
[
k2
].
shape
):
new_state_dict
[
k1
]
=
params
[
k2
]
else
:
logger
.
info
(
f
"The shape of model params
{
k1
}
{
state_dict
[
k1
].
shape
}
not matched with loaded params
{
k2
}
{
params
[
k2
].
shape
}
!"
)
logger
.
warning
(
"The shape of model params {} {} not matched with loaded params {} {} !"
.
format
(
k1
,
state_dict
[
k1
].
shape
,
k2
,
params
[
k2
].
shape
)
)
model
.
set_state_dict
(
new_state_dict
)
logger
.
info
(
f
"load pretrain successful from
{
path
}
"
)
logger
.
info
(
"load pretrain successful from {
}"
.
format
(
path
)
)
return
model
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment