Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
deb2b590
Unverified
Commit
deb2b590
authored
Jan 22, 2024
by
bofeng huang
Committed by
GitHub
Jan 22, 2024
Browse files
Fix lr_scheduler in no_trainer training scripts (#27872)
* Fix lr_scheduler * Fix lr scheduler
parent
692c3c6b
Changes
9
Hide whitespace changes
Inline
Side-by-side
Showing
9 changed files
with
18 additions
and
18 deletions
+18
-18
examples/pytorch/image-classification/run_image_classification_no_trainer.py
...age-classification/run_image_classification_no_trainer.py
+2
-2
examples/pytorch/image-pretraining/run_mim_no_trainer.py
examples/pytorch/image-pretraining/run_mim_no_trainer.py
+2
-2
examples/pytorch/language-modeling/run_clm_no_trainer.py
examples/pytorch/language-modeling/run_clm_no_trainer.py
+2
-2
examples/pytorch/language-modeling/run_mlm_no_trainer.py
examples/pytorch/language-modeling/run_mlm_no_trainer.py
+2
-2
examples/pytorch/multiple-choice/run_swag_no_trainer.py
examples/pytorch/multiple-choice/run_swag_no_trainer.py
+2
-2
examples/pytorch/question-answering/run_qa_beam_search_no_trainer.py
...torch/question-answering/run_qa_beam_search_no_trainer.py
+2
-2
examples/pytorch/question-answering/run_qa_no_trainer.py
examples/pytorch/question-answering/run_qa_no_trainer.py
+2
-2
examples/pytorch/semantic-segmentation/run_semantic_segmentation_no_trainer.py
...ntic-segmentation/run_semantic_segmentation_no_trainer.py
+2
-2
examples/pytorch/summarization/run_summarization_no_trainer.py
...les/pytorch/summarization/run_summarization_no_trainer.py
+2
-2
No files found.
examples/pytorch/image-classification/run_image_classification_no_trainer.py
View file @
deb2b590
...
...
@@ -438,8 +438,8 @@ def main():
lr_scheduler
=
get_scheduler
(
name
=
args
.
lr_scheduler_type
,
optimizer
=
optimizer
,
num_warmup_steps
=
args
.
num_warmup_steps
*
a
rgs
.
gradient_accumulation_step
s
,
num_training_steps
=
args
.
max_train_steps
*
args
.
gradient_accumulation_step
s
,
num_warmup_steps
=
args
.
num_warmup_steps
*
a
ccelerator
.
num_processe
s
,
num_training_steps
=
args
.
max_train_steps
if
overrode_max_train_steps
else
args
.
max_train_steps
*
accelerator
.
num_processe
s
,
)
# Prepare everything with our `accelerator`.
...
...
examples/pytorch/image-pretraining/run_mim_no_trainer.py
View file @
deb2b590
...
...
@@ -626,8 +626,8 @@ def main():
lr_scheduler
=
get_scheduler
(
name
=
args
.
lr_scheduler_type
,
optimizer
=
optimizer
,
num_warmup_steps
=
args
.
num_warmup_steps
*
a
rgs
.
gradient_accumulation_step
s
,
num_training_steps
=
args
.
max_train_steps
*
args
.
gradient_accumulation_step
s
,
num_warmup_steps
=
args
.
num_warmup_steps
*
a
ccelerator
.
num_processe
s
,
num_training_steps
=
args
.
max_train_steps
if
overrode_max_train_steps
else
args
.
max_train_steps
*
accelerator
.
num_processe
s
,
)
# Prepare everything with our `accelerator`.
...
...
examples/pytorch/language-modeling/run_clm_no_trainer.py
View file @
deb2b590
...
...
@@ -526,8 +526,8 @@ def main():
lr_scheduler
=
get_scheduler
(
name
=
args
.
lr_scheduler_type
,
optimizer
=
optimizer
,
num_warmup_steps
=
args
.
num_warmup_steps
*
a
rgs
.
gradient_accumulation_step
s
,
num_training_steps
=
args
.
max_train_steps
*
args
.
gradient_accumulation_step
s
,
num_warmup_steps
=
args
.
num_warmup_steps
*
a
ccelerator
.
num_processe
s
,
num_training_steps
=
args
.
max_train_steps
if
overrode_max_train_steps
else
args
.
max_train_steps
*
accelerator
.
num_processe
s
,
)
# Prepare everything with our `accelerator`.
...
...
examples/pytorch/language-modeling/run_mlm_no_trainer.py
View file @
deb2b590
...
...
@@ -563,8 +563,8 @@ def main():
lr_scheduler
=
get_scheduler
(
name
=
args
.
lr_scheduler_type
,
optimizer
=
optimizer
,
num_warmup_steps
=
args
.
num_warmup_steps
*
a
rgs
.
gradient_accumulation_step
s
,
num_training_steps
=
args
.
max_train_steps
*
args
.
gradient_accumulation_step
s
,
num_warmup_steps
=
args
.
num_warmup_steps
*
a
ccelerator
.
num_processe
s
,
num_training_steps
=
args
.
max_train_steps
if
overrode_max_train_steps
else
args
.
max_train_steps
*
accelerator
.
num_processe
s
,
)
# Prepare everything with our `accelerator`.
...
...
examples/pytorch/multiple-choice/run_swag_no_trainer.py
View file @
deb2b590
...
...
@@ -510,8 +510,8 @@ def main():
lr_scheduler
=
get_scheduler
(
name
=
args
.
lr_scheduler_type
,
optimizer
=
optimizer
,
num_warmup_steps
=
args
.
num_warmup_steps
*
a
rgs
.
gradient_accumulation_step
s
,
num_training_steps
=
args
.
max_train_steps
*
args
.
gradient_accumulation_step
s
,
num_warmup_steps
=
args
.
num_warmup_steps
*
a
ccelerator
.
num_processe
s
,
num_training_steps
=
args
.
max_train_steps
if
overrode_max_train_steps
else
args
.
max_train_steps
*
accelerator
.
num_processe
s
,
)
# Prepare everything with our `accelerator`.
...
...
examples/pytorch/question-answering/run_qa_beam_search_no_trainer.py
View file @
deb2b590
...
...
@@ -750,8 +750,8 @@ def main():
lr_scheduler
=
get_scheduler
(
name
=
args
.
lr_scheduler_type
,
optimizer
=
optimizer
,
num_warmup_steps
=
args
.
num_warmup_steps
*
a
rgs
.
gradient_accumulation_step
s
,
num_training_steps
=
args
.
max_train_steps
*
args
.
gradient_accumulation_step
s
,
num_warmup_steps
=
args
.
num_warmup_steps
*
a
ccelerator
.
num_processe
s
,
num_training_steps
=
args
.
max_train_steps
if
overrode_max_train_steps
else
args
.
max_train_steps
*
accelerator
.
num_processe
s
,
)
# Prepare everything with our `accelerator`.
...
...
examples/pytorch/question-answering/run_qa_no_trainer.py
View file @
deb2b590
...
...
@@ -780,8 +780,8 @@ def main():
lr_scheduler
=
get_scheduler
(
name
=
args
.
lr_scheduler_type
,
optimizer
=
optimizer
,
num_warmup_steps
=
args
.
num_warmup_steps
*
a
rgs
.
gradient_accumulation_step
s
,
num_training_steps
=
args
.
max_train_steps
*
args
.
gradient_accumulation_step
s
,
num_warmup_steps
=
args
.
num_warmup_steps
*
a
ccelerator
.
num_processe
s
,
num_training_steps
=
args
.
max_train_steps
if
overrode_max_train_steps
else
args
.
max_train_steps
*
accelerator
.
num_processe
s
,
)
# Prepare everything with our `accelerator`.
...
...
examples/pytorch/semantic-segmentation/run_semantic_segmentation_no_trainer.py
View file @
deb2b590
...
...
@@ -513,8 +513,8 @@ def main():
lr_scheduler
=
get_scheduler
(
name
=
args
.
lr_scheduler_type
,
optimizer
=
optimizer
,
num_warmup_steps
=
args
.
num_warmup_steps
*
a
rgs
.
gradient_accumulation_step
s
,
num_training_steps
=
args
.
max_train_steps
*
args
.
gradient_accumulation_step
s
,
num_warmup_steps
=
args
.
num_warmup_steps
*
a
ccelerator
.
num_processe
s
,
num_training_steps
=
args
.
max_train_steps
if
overrode_max_train_steps
else
args
.
max_train_steps
*
accelerator
.
num_processe
s
,
)
# Prepare everything with our `accelerator`.
...
...
examples/pytorch/summarization/run_summarization_no_trainer.py
View file @
deb2b590
...
...
@@ -580,8 +580,8 @@ def main():
lr_scheduler
=
get_scheduler
(
name
=
args
.
lr_scheduler_type
,
optimizer
=
optimizer
,
num_warmup_steps
=
args
.
num_warmup_steps
*
a
rgs
.
gradient_accumulation_step
s
,
num_training_steps
=
args
.
max_train_steps
*
args
.
gradient_accumulation_step
s
,
num_warmup_steps
=
args
.
num_warmup_steps
*
a
ccelerator
.
num_processe
s
,
num_training_steps
=
args
.
max_train_steps
if
overrode_max_train_steps
else
args
.
max_train_steps
*
accelerator
.
num_processe
s
,
)
# Prepare everything with our `accelerator`.
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment