Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
aec10d16
"git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "f9a2a9e32bf4f6dc128decd0c124fa1f5507532e"
Unverified
Commit
aec10d16
authored
Apr 18, 2023
by
Zachary Mueller
Committed by
GitHub
Apr 18, 2023
Browse files
Update accelerate version + warning check fix (#22833)
parent
78cda46f
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
4 additions
and
4 deletions
+4
-4
setup.py
setup.py
+1
-1
src/transformers/dependency_versions_table.py
src/transformers/dependency_versions_table.py
+1
-1
src/transformers/training_args.py
src/transformers/training_args.py
+2
-2
No files found.
setup.py
View file @
aec10d16
...
@@ -102,7 +102,7 @@ if stale_egg_info.exists():
...
@@ -102,7 +102,7 @@ if stale_egg_info.exists():
# 2. once modified, run: `make deps_table_update` to update src/transformers/dependency_versions_table.py
# 2. once modified, run: `make deps_table_update` to update src/transformers/dependency_versions_table.py
_deps
=
[
_deps
=
[
"Pillow"
,
"Pillow"
,
"accelerate>=0.1
0
.0"
,
"accelerate>=0.1
7
.0"
,
"av==9.2.0"
,
# Latest version of PyAV (10.0.0) has issues with audio stream.
"av==9.2.0"
,
# Latest version of PyAV (10.0.0) has issues with audio stream.
"beautifulsoup4"
,
"beautifulsoup4"
,
"black~=23.1"
,
"black~=23.1"
,
...
...
src/transformers/dependency_versions_table.py
View file @
aec10d16
...
@@ -3,7 +3,7 @@
...
@@ -3,7 +3,7 @@
# 2. run `make deps_table_update``
# 2. run `make deps_table_update``
deps
=
{
deps
=
{
"Pillow"
:
"Pillow"
,
"Pillow"
:
"Pillow"
,
"accelerate"
:
"accelerate>=0.1
0
.0"
,
"accelerate"
:
"accelerate>=0.1
7
.0"
,
"av"
:
"av==9.2.0"
,
"av"
:
"av==9.2.0"
,
"beautifulsoup4"
:
"beautifulsoup4"
,
"beautifulsoup4"
:
"beautifulsoup4"
,
"black"
:
"black~=23.1"
,
"black"
:
"black~=23.1"
,
...
...
src/transformers/training_args.py
View file @
aec10d16
...
@@ -1552,10 +1552,10 @@ class TrainingArguments:
...
@@ -1552,10 +1552,10 @@ class TrainingArguments:
if
(
if
(
torch
.
distributed
.
is_available
()
torch
.
distributed
.
is_available
()
and
torch
.
distributed
.
is_initialized
()
and
torch
.
distributed
.
is_initialized
()
and
self
.
distributed_state
.
distributed_type
!
=
DistributedType
.
NO
and
self
.
distributed_state
.
distributed_type
=
=
DistributedType
.
NO
):
):
logger
.
warning
(
logger
.
warning
(
"torch.distributed process group is initialized, but parallel_mode
=
= ParallelMode.DISTRIBUTED. "
"torch.distributed process group is initialized, but parallel_mode
!
= ParallelMode.DISTRIBUTED. "
"In order to use Torch DDP, launch your script with `python -m torch.distributed.launch"
"In order to use Torch DDP, launch your script with `python -m torch.distributed.launch"
)
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment