Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
60a37238
Commit
60a37238
authored
Feb 27, 2019
by
lukovnikov
Browse files
added warning
parent
da2d8ca2
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
10 additions
and
0 deletions
+10
-0
pytorch_pretrained_bert/optimization.py
pytorch_pretrained_bert/optimization.py
+5
-0
pytorch_pretrained_bert/optimization_openai.py
pytorch_pretrained_bert/optimization_openai.py
+5
-0
No files found.
pytorch_pretrained_bert/optimization.py
View file @
60a37238
...
@@ -19,6 +19,9 @@ import torch
...
@@ -19,6 +19,9 @@ import torch
from
torch.optim
import
Optimizer
from
torch.optim
import
Optimizer
from
torch.optim.optimizer
import
required
from
torch.optim.optimizer
import
required
from
torch.nn.utils
import
clip_grad_norm_
from
torch.nn.utils
import
clip_grad_norm_
import
logging
logger
=
logging
.
getLogger
(
__name__
)
def
warmup_cosine
(
x
,
warmup
=
0.002
):
def
warmup_cosine
(
x
,
warmup
=
0.002
):
if
x
<
warmup
:
if
x
<
warmup
:
...
@@ -37,6 +40,8 @@ def warmup_linear(x, warmup=0.002):
...
@@ -37,6 +40,8 @@ def warmup_linear(x, warmup=0.002):
After `t_total`-th training step, learning rate is zero. """
After `t_total`-th training step, learning rate is zero. """
if
x
<
warmup
:
if
x
<
warmup
:
return
x
/
warmup
return
x
/
warmup
if
x
>
1
:
logger
.
warning
(
"Training beyond specified 't_total' steps. Learning rate set to zero. Please set 't_total' of BertAdam correctly."
)
return
max
((
x
-
1.
)
/
(
warmup
-
1.
),
0
)
return
max
((
x
-
1.
)
/
(
warmup
-
1.
),
0
)
SCHEDULES
=
{
SCHEDULES
=
{
...
...
pytorch_pretrained_bert/optimization_openai.py
View file @
60a37238
...
@@ -19,6 +19,9 @@ import torch
...
@@ -19,6 +19,9 @@ import torch
from
torch.optim
import
Optimizer
from
torch.optim
import
Optimizer
from
torch.optim.optimizer
import
required
from
torch.optim.optimizer
import
required
from
torch.nn.utils
import
clip_grad_norm_
from
torch.nn.utils
import
clip_grad_norm_
import
logging
logger
=
logging
.
getLogger
(
__name__
)
def
warmup_cosine
(
x
,
warmup
=
0.002
):
def
warmup_cosine
(
x
,
warmup
=
0.002
):
if
x
<
warmup
:
if
x
<
warmup
:
...
@@ -37,6 +40,8 @@ def warmup_linear(x, warmup=0.002):
...
@@ -37,6 +40,8 @@ def warmup_linear(x, warmup=0.002):
After `t_total`-th training step, learning rate is zero. """
After `t_total`-th training step, learning rate is zero. """
if
x
<
warmup
:
if
x
<
warmup
:
return
x
/
warmup
return
x
/
warmup
if
x
>
1
:
logger
.
warning
(
"Training beyond specified 't_total' steps. Learning rate set to zero. Please set 't_total' of BertAdam correctly."
)
return
max
((
x
-
1.
)
/
(
warmup
-
1.
),
0
)
return
max
((
x
-
1.
)
/
(
warmup
-
1.
),
0
)
SCHEDULES
=
{
SCHEDULES
=
{
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment