Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
072188d6
Unverified
Commit
072188d6
authored
Jun 06, 2023
by
Zachary Mueller
Committed by
GitHub
Jun 06, 2023
Browse files
Act on deprecations in Accelerate no_trainer examples (#24053)
Act on deprecation
parent
ff4c0fc7
Changes
12
Show whitespace changes
Inline
Side-by-side
Showing
12 changed files
with
14 additions
and
12 deletions
+14
-12
examples/pytorch/image-classification/run_image_classification_no_trainer.py
...age-classification/run_image_classification_no_trainer.py
+1
-1
examples/pytorch/image-pretraining/run_mim_no_trainer.py
examples/pytorch/image-pretraining/run_mim_no_trainer.py
+1
-1
examples/pytorch/language-modeling/run_clm_no_trainer.py
examples/pytorch/language-modeling/run_clm_no_trainer.py
+1
-1
examples/pytorch/language-modeling/run_mlm_no_trainer.py
examples/pytorch/language-modeling/run_mlm_no_trainer.py
+1
-1
examples/pytorch/multiple-choice/run_swag_no_trainer.py
examples/pytorch/multiple-choice/run_swag_no_trainer.py
+1
-1
examples/pytorch/question-answering/run_qa_beam_search_no_trainer.py
...torch/question-answering/run_qa_beam_search_no_trainer.py
+1
-1
examples/pytorch/question-answering/run_qa_no_trainer.py
examples/pytorch/question-answering/run_qa_no_trainer.py
+1
-1
examples/pytorch/semantic-segmentation/run_semantic_segmentation_no_trainer.py
...ntic-segmentation/run_semantic_segmentation_no_trainer.py
+1
-1
examples/pytorch/summarization/run_summarization_no_trainer.py
...les/pytorch/summarization/run_summarization_no_trainer.py
+1
-1
examples/pytorch/token-classification/run_ner_no_trainer.py
examples/pytorch/token-classification/run_ner_no_trainer.py
+1
-1
examples/pytorch/translation/run_translation_no_trainer.py
examples/pytorch/translation/run_translation_no_trainer.py
+1
-1
examples/research_projects/codeparrot/scripts/codeparrot_training.py
...search_projects/codeparrot/scripts/codeparrot_training.py
+3
-1
No files found.
examples/pytorch/image-classification/run_image_classification_no_trainer.py
View file @
072188d6
...
...
@@ -210,7 +210,7 @@ def main():
if
args
.
with_tracking
:
accelerator_log_kwargs
[
"log_with"
]
=
args
.
report_to
accelerator_log_kwargs
[
"
logging
_dir"
]
=
args
.
output_dir
accelerator_log_kwargs
[
"
project
_dir"
]
=
args
.
output_dir
accelerator
=
Accelerator
(
gradient_accumulation_steps
=
args
.
gradient_accumulation_steps
,
**
accelerator_log_kwargs
)
...
...
examples/pytorch/image-pretraining/run_mim_no_trainer.py
View file @
072188d6
...
...
@@ -378,7 +378,7 @@ def main():
if
args
.
with_tracking
:
accelerator_log_kwargs
[
"log_with"
]
=
args
.
report_to
accelerator_log_kwargs
[
"
logging
_dir"
]
=
args
.
output_dir
accelerator_log_kwargs
[
"
project
_dir"
]
=
args
.
output_dir
accelerator
=
Accelerator
(
gradient_accumulation_steps
=
args
.
gradient_accumulation_steps
,
...
...
examples/pytorch/language-modeling/run_clm_no_trainer.py
View file @
072188d6
...
...
@@ -261,7 +261,7 @@ def main():
if
args
.
with_tracking
:
accelerator_log_kwargs
[
"log_with"
]
=
args
.
report_to
accelerator_log_kwargs
[
"
logging
_dir"
]
=
args
.
output_dir
accelerator_log_kwargs
[
"
project
_dir"
]
=
args
.
output_dir
accelerator
=
Accelerator
(
gradient_accumulation_steps
=
args
.
gradient_accumulation_steps
,
**
accelerator_log_kwargs
)
...
...
examples/pytorch/language-modeling/run_mlm_no_trainer.py
View file @
072188d6
...
...
@@ -270,7 +270,7 @@ def main():
if
args
.
with_tracking
:
accelerator_log_kwargs
[
"log_with"
]
=
args
.
report_to
accelerator_log_kwargs
[
"
logging
_dir"
]
=
args
.
output_dir
accelerator_log_kwargs
[
"
project
_dir"
]
=
args
.
output_dir
accelerator
=
Accelerator
(
gradient_accumulation_steps
=
args
.
gradient_accumulation_steps
,
**
accelerator_log_kwargs
)
...
...
examples/pytorch/multiple-choice/run_swag_no_trainer.py
View file @
072188d6
...
...
@@ -288,7 +288,7 @@ def main():
if
args
.
with_tracking
:
accelerator_log_kwargs
[
"log_with"
]
=
args
.
report_to
accelerator_log_kwargs
[
"
logging
_dir"
]
=
args
.
output_dir
accelerator_log_kwargs
[
"
project
_dir"
]
=
args
.
output_dir
accelerator
=
Accelerator
(
gradient_accumulation_steps
=
args
.
gradient_accumulation_steps
,
**
accelerator_log_kwargs
)
...
...
examples/pytorch/question-answering/run_qa_beam_search_no_trainer.py
View file @
072188d6
...
...
@@ -303,7 +303,7 @@ def main():
if
args
.
with_tracking
:
accelerator_log_kwargs
[
"log_with"
]
=
args
.
report_to
accelerator_log_kwargs
[
"
logging
_dir"
]
=
args
.
output_dir
accelerator_log_kwargs
[
"
project
_dir"
]
=
args
.
output_dir
accelerator
=
Accelerator
(
gradient_accumulation_steps
=
args
.
gradient_accumulation_steps
,
**
accelerator_log_kwargs
)
...
...
examples/pytorch/question-answering/run_qa_no_trainer.py
View file @
072188d6
...
...
@@ -341,7 +341,7 @@ def main():
if
args
.
with_tracking
:
accelerator_log_kwargs
[
"log_with"
]
=
args
.
report_to
accelerator_log_kwargs
[
"
logging
_dir"
]
=
args
.
output_dir
accelerator_log_kwargs
[
"
project
_dir"
]
=
args
.
output_dir
accelerator
=
Accelerator
(
gradient_accumulation_steps
=
args
.
gradient_accumulation_steps
,
**
accelerator_log_kwargs
)
...
...
examples/pytorch/semantic-segmentation/run_semantic_segmentation_no_trainer.py
View file @
072188d6
...
...
@@ -330,7 +330,7 @@ def main():
if
args
.
with_tracking
:
accelerator_log_kwargs
[
"log_with"
]
=
args
.
report_to
accelerator_log_kwargs
[
"
logging
_dir"
]
=
args
.
output_dir
accelerator_log_kwargs
[
"
project
_dir"
]
=
args
.
output_dir
accelerator
=
Accelerator
(
gradient_accumulation_steps
=
args
.
gradient_accumulation_steps
,
**
accelerator_log_kwargs
)
...
...
examples/pytorch/summarization/run_summarization_no_trainer.py
View file @
072188d6
...
...
@@ -325,7 +325,7 @@ def main():
if
args
.
with_tracking
:
accelerator_log_kwargs
[
"log_with"
]
=
args
.
report_to
accelerator_log_kwargs
[
"
logging
_dir"
]
=
args
.
output_dir
accelerator_log_kwargs
[
"
project
_dir"
]
=
args
.
output_dir
accelerator
=
Accelerator
(
gradient_accumulation_steps
=
args
.
gradient_accumulation_steps
,
**
accelerator_log_kwargs
)
if
args
.
source_prefix
is
None
and
args
.
model_name_or_path
in
[
...
...
examples/pytorch/token-classification/run_ner_no_trainer.py
View file @
072188d6
...
...
@@ -271,7 +271,7 @@ def main():
# If we're using tracking, we also need to initialize it here and it will by default pick up all supported trackers
# in the environment
accelerator
=
(
Accelerator
(
log_with
=
args
.
report_to
,
logging
_dir
=
args
.
output_dir
)
if
args
.
with_tracking
else
Accelerator
()
Accelerator
(
log_with
=
args
.
report_to
,
project
_dir
=
args
.
output_dir
)
if
args
.
with_tracking
else
Accelerator
()
)
# Make one log on every process with the configuration for debugging.
logging
.
basicConfig
(
...
...
examples/pytorch/translation/run_translation_no_trainer.py
View file @
072188d6
...
...
@@ -316,7 +316,7 @@ def main():
# If we're using tracking, we also need to initialize it here and it will by default pick up all supported trackers
# in the environment
accelerator
=
(
Accelerator
(
log_with
=
args
.
report_to
,
logging
_dir
=
args
.
output_dir
)
if
args
.
with_tracking
else
Accelerator
()
Accelerator
(
log_with
=
args
.
report_to
,
project
_dir
=
args
.
output_dir
)
if
args
.
with_tracking
else
Accelerator
()
)
# Make one log on every process with the configuration for debugging.
...
...
examples/research_projects/codeparrot/scripts/codeparrot_training.py
View file @
072188d6
...
...
@@ -7,6 +7,7 @@ from pathlib import Path
import
datasets
import
torch
from
accelerate
import
Accelerator
,
DistributedType
from
accelerate.utils
import
ProjectConfiguration
from
arguments
import
TrainingArguments
from
datasets
import
load_dataset
from
huggingface_hub
import
Repository
...
...
@@ -195,7 +196,8 @@ parser = HfArgumentParser(TrainingArguments)
args
=
parser
.
parse_args
()
# Accelerator
accelerator
=
Accelerator
(
log_with
=
[
"wandb"
,
"tensorboard"
],
logging_dir
=
f
"
{
args
.
save_dir
}
/log"
)
config
=
ProjectConfiguration
(
project_dir
=
args
.
save_dir
,
logging_dir
=
"log"
)
accelerator
=
Accelerator
(
log_with
=
[
"wandb"
,
"tensorboard"
],
project_config
=
config
)
acc_state
=
{
str
(
k
):
str
(
v
)
for
k
,
v
in
accelerator
.
state
.
__dict__
.
items
()}
args
=
Namespace
(
**
vars
(
args
),
**
acc_state
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment