Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
fb78a90d
Unverified
Commit
fb78a90d
authored
Aug 27, 2020
by
Sam Shleifer
Committed by
GitHub
Aug 27, 2020
Browse files
PL: --adafactor option (#6776)
parent
92ac2fa7
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
12 additions
and
1 deletion
+12
-1
examples/lightning_base.py
examples/lightning_base.py
+11
-1
examples/seq2seq/test_seq2seq_examples.py
examples/seq2seq/test_seq2seq_examples.py
+1
-0
No files found.
examples/lightning_base.py
View file @
fb78a90d
...
...
@@ -22,6 +22,7 @@ from transformers import (
PreTrainedTokenizer
,
)
from
transformers.optimization
import
(
Adafactor
,
get_cosine_schedule_with_warmup
,
get_cosine_with_hard_restarts_schedule_with_warmup
,
get_linear_schedule_with_warmup
,
...
...
@@ -137,7 +138,15 @@ class BaseTransformer(pl.LightningModule):
"weight_decay"
:
0.0
,
},
]
optimizer
=
AdamW
(
optimizer_grouped_parameters
,
lr
=
self
.
hparams
.
learning_rate
,
eps
=
self
.
hparams
.
adam_epsilon
)
if
self
.
hparams
.
adafactor
:
optimizer
=
Adafactor
(
optimizer_grouped_parameters
,
lr
=
self
.
hparams
.
learning_rate
,
scale_parameter
=
False
,
relative_step
=
False
)
else
:
optimizer
=
AdamW
(
optimizer_grouped_parameters
,
lr
=
self
.
hparams
.
learning_rate
,
eps
=
self
.
hparams
.
adam_epsilon
)
self
.
opt
=
optimizer
scheduler
=
self
.
get_lr_scheduler
()
...
...
@@ -251,6 +260,7 @@ class BaseTransformer(pl.LightningModule):
parser
.
add_argument
(
"--num_train_epochs"
,
dest
=
"max_epochs"
,
default
=
3
,
type
=
int
)
parser
.
add_argument
(
"--train_batch_size"
,
default
=
32
,
type
=
int
)
parser
.
add_argument
(
"--eval_batch_size"
,
default
=
32
,
type
=
int
)
parser
.
add_argument
(
"--adafactor"
,
action
=
"store_true"
)
class
LoggingCallback
(
pl
.
Callback
):
...
...
examples/seq2seq/test_seq2seq_examples.py
View file @
fb78a90d
...
...
@@ -30,6 +30,7 @@ logger = logging.getLogger()
CUDA_AVAILABLE
=
torch
.
cuda
.
is_available
()
CHEAP_ARGS
=
{
"label_smoothing"
:
0.2
,
"adafactor"
:
True
,
"early_stopping_patience"
:
2
,
"logger_name"
:
"default"
,
"length_penalty"
:
0.5
,
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment