Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
Fairseq
Commits
47b3b81c
Commit
47b3b81c
authored
Apr 07, 2018
by
Myle Ott
Browse files
Allow schedule for update-freq
parent
4fa8760e
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
10 additions
and
3 deletions
+10
-3
fairseq/options.py
fairseq/options.py
+3
-2
singleprocess_train.py
singleprocess_train.py
+7
-1
No files found.
fairseq/options.py
View file @
47b3b81c
...
...
@@ -58,6 +58,7 @@ def parse_args_and_arch(parser, input_args=None):
# Post-process args.
args
.
lr
=
list
(
map
(
float
,
args
.
lr
.
split
(
','
)))
args
.
update_freq
=
list
(
map
(
float
,
args
.
update_freq
.
split
(
','
)))
if
args
.
max_sentences_valid
is
None
:
args
.
max_sentences_valid
=
args
.
max_sentences
...
...
@@ -152,6 +153,8 @@ def add_optimization_args(parser):
group
.
add_argument
(
'--sentence-avg'
,
action
=
'store_true'
,
help
=
'normalize gradients by the number of sentences in a batch'
' (default is to normalize by number of tokens)'
)
group
.
add_argument
(
'--update-freq'
,
default
=
'1'
,
metavar
=
'N'
,
help
=
'update parameters every N_i batches, when in epoch i'
)
# Optimizer definitions can be found under fairseq/optim/
group
.
add_argument
(
'--optimizer'
,
default
=
'nag'
,
metavar
=
'OPT'
,
...
...
@@ -174,8 +177,6 @@ def add_optimization_args(parser):
group
.
add_argument
(
'--min-lr'
,
default
=
1e-5
,
type
=
float
,
metavar
=
'LR'
,
help
=
'minimum learning rate'
)
group
.
add_argument
(
'--update-freq'
,
default
=
1
,
type
=
int
,
metavar
=
'N'
,
help
=
'update parameters every N batches'
)
return
group
...
...
singleprocess_train.py
View file @
47b3b81c
...
...
@@ -132,12 +132,18 @@ def train(args, trainer, itr, epoch):
if
meter
is
not
None
:
meter
.
reset
()
# update parameters every N batches
if
epoch
<=
len
(
args
.
update_freq
):
update_freq
=
args
.
update_freq
[
epoch
-
1
]
else
:
update_freq
=
args
.
update_freq
[
-
1
]
extra_meters
=
collections
.
defaultdict
(
lambda
:
AverageMeter
())
max_update
=
args
.
max_update
or
math
.
inf
num_batches
=
len
(
itr
)
progress
=
progress_bar
.
build_progress_bar
(
args
,
itr
,
epoch
,
no_progress_bar
=
'simple'
)
for
i
,
sample
in
enumerate
(
progress
):
if
i
<
num_batches
-
1
and
(
i
+
1
)
%
args
.
update_freq
>
0
:
if
i
<
num_batches
-
1
and
(
i
+
1
)
%
update_freq
>
0
:
# buffer updates according to --update-freq
trainer
.
train_step
(
sample
,
update_params
=
False
)
continue
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment