Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
wangsen
paddle_dbnet
Commits
8a28962c
Commit
8a28962c
authored
Feb 12, 2022
by
WenmuZhou
Browse files
add Const lr
parent
07633eb8
Changes
4
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
43 additions
and
14 deletions
+43
-14
configs/vqa/re/layoutlmv2.yml
configs/vqa/re/layoutlmv2.yml
+6
-4
configs/vqa/re/layoutxlm.yml
configs/vqa/re/layoutxlm.yml
+2
-4
ppocr/optimizer/__init__.py
ppocr/optimizer/__init__.py
+2
-5
ppocr/optimizer/learning_rate.py
ppocr/optimizer/learning_rate.py
+33
-1
No files found.
configs/vqa/re/layoutlmv2.yml
View file @
8a28962c
...
...
@@ -34,10 +34,12 @@ Optimizer:
beta2
:
0.999
clip_norm
:
10
lr
:
name
:
Piecewise
values
:
[
0.000005
,
0.00005
]
decay_epochs
:
[
10
]
warmup_epoch
:
0
# name: Piecewise
# values: [0.000005, 0.00005]
# decay_epochs: [10]
# warmup_epoch: 0
learning_rate
:
0.00005
warmup_epoch
:
10
regularizer
:
name
:
L2
factor
:
0.00000
...
...
configs/vqa/re/layoutxlm.yml
View file @
8a28962c
...
...
@@ -34,10 +34,8 @@ Optimizer:
beta2
:
0.999
clip_norm
:
10
lr
:
name
:
Piecewise
values
:
[
0.000005
,
0.00005
]
decay_epochs
:
[
10
]
warmup_epoch
:
0
learning_rate
:
0.00005
warmup_epoch
:
10
regularizer
:
name
:
L2
factor
:
0.00000
...
...
ppocr/optimizer/__init__.py
View file @
8a28962c
...
...
@@ -25,11 +25,8 @@ __all__ = ['build_optimizer']
def
build_lr_scheduler
(
lr_config
,
epochs
,
step_each_epoch
):
from
.
import
learning_rate
lr_config
.
update
({
'epochs'
:
epochs
,
'step_each_epoch'
:
step_each_epoch
})
if
'name'
in
lr_config
:
lr_name
=
lr_config
.
pop
(
'name'
)
lr
=
getattr
(
learning_rate
,
lr_name
)(
**
lr_config
)()
else
:
lr
=
lr_config
[
'learning_rate'
]
lr_name
=
lr_config
.
pop
(
'name'
,
'Const'
)
lr
=
getattr
(
learning_rate
,
lr_name
)(
**
lr_config
)()
return
lr
...
...
ppocr/optimizer/learning_rate.py
View file @
8a28962c
...
...
@@ -275,4 +275,36 @@ class OneCycle(object):
start_lr
=
0.0
,
end_lr
=
self
.
max_lr
,
last_epoch
=
self
.
last_epoch
)
return
learning_rate
\ No newline at end of file
return
learning_rate
class
Const
(
object
):
"""
Const learning rate decay
Args:
learning_rate(float): initial learning rate
step_each_epoch(int): steps each epoch
last_epoch (int, optional): The index of last epoch. Can be set to restart training. Default: -1, means initial learning rate.
"""
def
__init__
(
self
,
learning_rate
,
step_each_epoch
,
warmup_epoch
=
0
,
last_epoch
=-
1
,
**
kwargs
):
super
(
Const
,
self
).
__init__
()
self
.
learning_rate
=
learning_rate
self
.
last_epoch
=
last_epoch
self
.
warmup_epoch
=
round
(
warmup_epoch
*
step_each_epoch
)
def
__call__
(
self
):
learning_rate
=
self
.
learning_rate
if
self
.
warmup_epoch
>
0
:
learning_rate
=
lr
.
LinearWarmup
(
learning_rate
=
learning_rate
,
warmup_steps
=
self
.
warmup_epoch
,
start_lr
=
0.0
,
end_lr
=
self
.
learning_rate
,
last_epoch
=
self
.
last_epoch
)
return
learning_rate
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment