Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
7de5c6aa
"web/git@developer.sourcefind.cn:chenpangpang/ComfyUI.git" did not exist on "74fc7b772656a59b344508480632d9d45f9127de"
Commit
7de5c6aa
authored
Mar 20, 2019
by
Matthew Carrigan
Browse files
PEP8 and formatting cleanups
parent
1798e98e
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
7 additions
and
5 deletions
+7
-5
examples/lm_finetuning/finetune_on_pregenerated.py
examples/lm_finetuning/finetune_on_pregenerated.py
+7
-4
examples/lm_finetuning/pregenerate_training_data.py
examples/lm_finetuning/pregenerate_training_data.py
+0
-1
No files found.
examples/lm_finetuning/finetune_on_pregenerated.py
View file @
7de5c6aa
...
...
@@ -9,7 +9,7 @@ from collections import namedtuple
from
torch.utils.data
import
DataLoader
,
Dataset
,
RandomSampler
from
torch.utils.data.distributed
import
DistributedSampler
from
tqdm
import
tqdm
,
trange
from
tqdm
import
tqdm
from
pytorch_pretrained_bert.modeling
import
BertForPreTraining
from
pytorch_pretrained_bert.tokenization
import
BertTokenizer
...
...
@@ -149,7 +149,8 @@ def main():
help
=
"random seed for initialization"
)
args
=
parser
.
parse_args
()
assert
args
.
pregenerated_data
.
is_dir
(),
"--pregenerated_data should point to the folder of files made by pregenerate_training_data.py!"
assert
args
.
pregenerated_data
.
is_dir
(),
\
"--pregenerated_data should point to the folder of files made by pregenerate_training_data.py!"
samples_per_epoch
=
[]
for
i
in
range
(
args
.
epochs
):
...
...
@@ -237,7 +238,8 @@ def main():
from
apex.optimizers
import
FP16_Optimizer
from
apex.optimizers
import
FusedAdam
except
ImportError
:
raise
ImportError
(
"Please install apex from https://www.github.com/nvidia/apex to use distributed and fp16 training."
)
raise
ImportError
(
"Please install apex from https://www.github.com/nvidia/apex to use distributed and fp16 training."
)
optimizer
=
FusedAdam
(
optimizer_grouped_parameters
,
lr
=
args
.
learning_rate
,
...
...
@@ -293,7 +295,8 @@ def main():
if
args
.
fp16
:
# modify learning rate with special warm up BERT uses
# if args.fp16 is False, BertAdam is used that handles this automatically
lr_this_step
=
args
.
learning_rate
*
warmup_linear
(
global_step
/
num_train_optimization_steps
,
args
.
warmup_proportion
)
lr_this_step
=
args
.
learning_rate
*
warmup_linear
(
global_step
/
num_train_optimization_steps
,
args
.
warmup_proportion
)
for
param_group
in
optimizer
.
param_groups
:
param_group
[
'lr'
]
=
lr_this_step
optimizer
.
step
()
...
...
examples/lm_finetuning/pregenerate_training_data.py
View file @
7de5c6aa
...
...
@@ -269,6 +269,5 @@ def main():
metrics_file
.
write
(
json
.
dumps
(
metrics
))
if
__name__
==
'__main__'
:
main
()
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment