Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
wangsen
paddle_dbnet
Commits
f9a2b26a
Unverified
Commit
f9a2b26a
authored
Mar 28, 2022
by
littletomatodonkey
Committed by
GitHub
Mar 28, 2022
Browse files
fix quant logic (#5806)
* fix quant logic * fix undef * fix doc
parent
3d692957
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
9 additions
and
2 deletions
+9
-2
deploy/slim/quantization/quant.py
deploy/slim/quantization/quant.py
+9
-2
No files found.
deploy/slim/quantization/quant.py
View file @
f9a2b26a
...
@@ -118,6 +118,11 @@ def main(config, device, logger, vdl_writer):
...
@@ -118,6 +118,11 @@ def main(config, device, logger, vdl_writer):
config
[
'Architecture'
][
"Head"
][
'out_channels'
]
=
char_num
config
[
'Architecture'
][
"Head"
][
'out_channels'
]
=
char_num
model
=
build_model
(
config
[
'Architecture'
])
model
=
build_model
(
config
[
'Architecture'
])
pre_best_model_dict
=
dict
()
# load fp32 model to begin quantization
if
config
[
"Global"
][
"pretrained_model"
]
is
not
None
:
pre_best_model_dict
=
load_model
(
config
,
model
)
quanter
=
QAT
(
config
=
quant_config
,
act_preprocess
=
PACT
)
quanter
=
QAT
(
config
=
quant_config
,
act_preprocess
=
PACT
)
quanter
.
quantize
(
model
)
quanter
.
quantize
(
model
)
...
@@ -134,10 +139,12 @@ def main(config, device, logger, vdl_writer):
...
@@ -134,10 +139,12 @@ def main(config, device, logger, vdl_writer):
step_each_epoch
=
len
(
train_dataloader
),
step_each_epoch
=
len
(
train_dataloader
),
parameters
=
model
.
parameters
())
parameters
=
model
.
parameters
())
# resume PACT training process
if
config
[
"Global"
][
"checkpoints"
]
is
not
None
:
pre_best_model_dict
=
load_model
(
config
,
model
,
optimizer
)
# build metric
# build metric
eval_class
=
build_metric
(
config
[
'Metric'
])
eval_class
=
build_metric
(
config
[
'Metric'
])
# load pretrain model
pre_best_model_dict
=
load_model
(
config
,
model
,
optimizer
)
logger
.
info
(
'train dataloader has {} iters, valid dataloader has {} iters'
.
logger
.
info
(
'train dataloader has {} iters, valid dataloader has {} iters'
.
format
(
len
(
train_dataloader
),
len
(
valid_dataloader
)))
format
(
len
(
train_dataloader
),
len
(
valid_dataloader
)))
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment