Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
wangsen
paddle_dbnet
Commits
91b79f69
"tests/vscode:/vscode.git/clone" did not exist on "1172c9634b4a32d6e82301e3d59ce17005e13e85"
Commit
91b79f69
authored
Nov 24, 2021
by
WenmuZhou
Browse files
pair param with key when load trained model params
parent
5fd13ab8
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
20 additions
and
4 deletions
+20
-4
ppocr/utils/save_load.py
ppocr/utils/save_load.py
+20
-4
No files found.
ppocr/utils/save_load.py
View file @
91b79f69
...
@@ -56,9 +56,25 @@ def load_model(config, model, optimizer=None):
...
@@ -56,9 +56,25 @@ def load_model(config, model, optimizer=None):
if
checkpoints
:
if
checkpoints
:
if
checkpoints
.
endswith
(
'pdparams'
):
if
checkpoints
.
endswith
(
'pdparams'
):
checkpoints
=
checkpoints
.
replace
(
'.pdparams'
,
''
)
checkpoints
=
checkpoints
.
replace
(
'.pdparams'
,
''
)
assert
os
.
path
.
exists
(
checkpoints
+
".pdopt"
),
\
assert
os
.
path
.
exists
(
checkpoints
+
".pdparams"
),
\
f
"The
{
checkpoints
}
.pdopt does not exists!"
f
"The
{
checkpoints
}
.pdparams does not exists!"
load_pretrained_params
(
model
,
checkpoints
)
# load params from trained model
params
=
paddle
.
load
(
checkpoints
+
'.pdparams'
)
state_dict
=
model
.
state_dict
()
new_state_dict
=
{}
for
key
,
value
in
state_dict
.
items
():
if
key
not
in
params
:
logger
.
warning
(
f
"
{
key
}
not in loaded params
{
params
.
keys
()
}
!"
)
pre_value
=
params
[
key
]
if
list
(
value
.
shape
)
==
list
(
pre_value
.
shape
):
new_state_dict
[
key
]
=
pre_value
else
:
logger
.
warning
(
f
"The shape of model params
{
key
}
{
value
.
shape
}
not matched with loaded params shape
{
pre_value
.
shape
}
!"
)
model
.
set_state_dict
(
new_state_dict
)
optim_dict
=
paddle
.
load
(
checkpoints
+
'.pdopt'
)
optim_dict
=
paddle
.
load
(
checkpoints
+
'.pdopt'
)
if
optimizer
is
not
None
:
if
optimizer
is
not
None
:
optimizer
.
set_state_dict
(
optim_dict
)
optimizer
.
set_state_dict
(
optim_dict
)
...
@@ -92,7 +108,7 @@ def load_pretrained_params(model, path):
...
@@ -92,7 +108,7 @@ def load_pretrained_params(model, path):
if
list
(
state_dict
[
k1
].
shape
)
==
list
(
params
[
k2
].
shape
):
if
list
(
state_dict
[
k1
].
shape
)
==
list
(
params
[
k2
].
shape
):
new_state_dict
[
k1
]
=
params
[
k2
]
new_state_dict
[
k1
]
=
params
[
k2
]
else
:
else
:
logger
.
info
(
logger
.
warning
(
f
"The shape of model params
{
k1
}
{
state_dict
[
k1
].
shape
}
not matched with loaded params
{
k2
}
{
params
[
k2
].
shape
}
!"
f
"The shape of model params
{
k1
}
{
state_dict
[
k1
].
shape
}
not matched with loaded params
{
k2
}
{
params
[
k2
].
shape
}
!"
)
)
model
.
set_state_dict
(
new_state_dict
)
model
.
set_state_dict
(
new_state_dict
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment