Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
35410da7
Commit
35410da7
authored
Feb 27, 2019
by
lukovnikov
Browse files
added warning
parent
4d79e0d3
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
6 additions
and
6 deletions
+6
-6
pytorch_pretrained_bert/optimization.py
pytorch_pretrained_bert/optimization.py
+2
-2
pytorch_pretrained_bert/optimization_openai.py
pytorch_pretrained_bert/optimization_openai.py
+4
-4
No files found.
pytorch_pretrained_bert/optimization.py
View file @
35410da7
...
@@ -159,8 +159,8 @@ class BertAdam(Optimizer):
...
@@ -159,8 +159,8 @@ class BertAdam(Optimizer):
# warning for exceeding t_total (only active with warmup_linear
# warning for exceeding t_total (only active with warmup_linear
if
group
[
'schedule'
]
==
"warmup_linear"
and
progress
>
1.
and
not
warned_for_t_total
:
if
group
[
'schedule'
]
==
"warmup_linear"
and
progress
>
1.
and
not
warned_for_t_total
:
logger
.
warning
(
logger
.
warning
(
"Training beyond specified 't_total' steps. Learning rate set to {}. "
"Training beyond specified 't_total' steps
with schedule '{}'
. Learning rate set to {}. "
"Please set 't_total' of {} correctly."
.
format
(
lr_scheduled
,
self
.
__class__
.
__name__
))
"Please set 't_total' of {} correctly."
.
format
(
group
[
'schedule'
],
lr_scheduled
,
self
.
__class__
.
__name__
))
warned_for_t_total
=
True
warned_for_t_total
=
True
# end warning
# end warning
else
:
else
:
...
...
pytorch_pretrained_bert/optimization_openai.py
View file @
35410da7
...
@@ -29,14 +29,14 @@ def warmup_cosine(x, warmup=0.002):
...
@@ -29,14 +29,14 @@ def warmup_cosine(x, warmup=0.002):
return
0.5
*
(
1.0
+
torch
.
cos
(
math
.
pi
*
x
))
return
0.5
*
(
1.0
+
torch
.
cos
(
math
.
pi
*
x
))
def
warmup_constant
(
x
,
warmup
=
0.002
):
def
warmup_constant
(
x
,
warmup
=
0.002
):
""" Linearly increases learning rate over `warmup`*`t_total` (as provided to
Bert
Adam) training steps.
""" Linearly increases learning rate over `warmup`*`t_total` (as provided to
OpenAI
Adam) training steps.
Learning rate is 1. afterwards. """
Learning rate is 1. afterwards. """
if
x
<
warmup
:
if
x
<
warmup
:
return
x
/
warmup
return
x
/
warmup
return
1.0
return
1.0
def
warmup_linear
(
x
,
warmup
=
0.002
):
def
warmup_linear
(
x
,
warmup
=
0.002
):
""" Specifies a triangular learning rate schedule where peak is reached at `warmup`*`t_total`-th (as provided to
Bert
Adam) training step.
""" Specifies a triangular learning rate schedule where peak is reached at `warmup`*`t_total`-th (as provided to
OpenAI
Adam) training step.
After `t_total`-th training step, learning rate is zero. """
After `t_total`-th training step, learning rate is zero. """
if
x
<
warmup
:
if
x
<
warmup
:
return
x
/
warmup
return
x
/
warmup
...
@@ -142,8 +142,8 @@ class OpenAIAdam(Optimizer):
...
@@ -142,8 +142,8 @@ class OpenAIAdam(Optimizer):
# warning for exceeding t_total (only active with warmup_linear
# warning for exceeding t_total (only active with warmup_linear
if
group
[
'schedule'
]
==
"warmup_linear"
and
progress
>
1.
and
not
warned_for_t_total
:
if
group
[
'schedule'
]
==
"warmup_linear"
and
progress
>
1.
and
not
warned_for_t_total
:
logger
.
warning
(
logger
.
warning
(
"Training beyond specified 't_total' steps. Learning rate set to {}. "
"Training beyond specified 't_total' steps
with schedule '{}'
. Learning rate set to {}. "
"Please set 't_total' of {} correctly."
.
format
(
lr_scheduled
,
self
.
__class__
.
__name__
))
"Please set 't_total' of {} correctly."
.
format
(
group
[
'schedule'
],
lr_scheduled
,
self
.
__class__
.
__name__
))
warned_for_t_total
=
True
warned_for_t_total
=
True
# end warning
# end warning
else
:
else
:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment