Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
ColossalAI
Commits
26cd6d85
Unverified
Commit
26cd6d85
authored
Sep 25, 2023
by
flybird11111
Committed by
GitHub
Sep 25, 2023
Browse files
[fix] fix weekly runing example (#4787)
* [fix] fix weekly runing example * [fix] fix weekly runing example
parent
d512a4d3
Changes
4
Show whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
5 additions
and
5 deletions
+5
-5
examples/tutorial/new_api/cifar_resnet/train.py
examples/tutorial/new_api/cifar_resnet/train.py
+1
-1
examples/tutorial/new_api/cifar_vit/train.py
examples/tutorial/new_api/cifar_vit/train.py
+1
-1
examples/tutorial/new_api/glue_bert/finetune.py
examples/tutorial/new_api/glue_bert/finetune.py
+2
-2
examples/tutorial/new_api/glue_bert/test_ci.sh
examples/tutorial/new_api/glue_bert/test_ci.sh
+1
-1
No files found.
examples/tutorial/new_api/cifar_resnet/train.py
View file @
26cd6d85
...
@@ -145,7 +145,7 @@ def main():
...
@@ -145,7 +145,7 @@ def main():
if
args
.
plugin
.
startswith
(
"torch_ddp"
):
if
args
.
plugin
.
startswith
(
"torch_ddp"
):
plugin
=
TorchDDPPlugin
()
plugin
=
TorchDDPPlugin
()
elif
args
.
plugin
==
"gemini"
:
elif
args
.
plugin
==
"gemini"
:
plugin
=
GeminiPlugin
(
placement_policy
=
"
cuda
"
,
strict_ddp_mode
=
True
,
initial_scale
=
2
**
5
)
plugin
=
GeminiPlugin
(
placement_policy
=
"
static
"
,
strict_ddp_mode
=
True
,
initial_scale
=
2
**
5
)
elif
args
.
plugin
==
"low_level_zero"
:
elif
args
.
plugin
==
"low_level_zero"
:
plugin
=
LowLevelZeroPlugin
(
initial_scale
=
2
**
5
)
plugin
=
LowLevelZeroPlugin
(
initial_scale
=
2
**
5
)
...
...
examples/tutorial/new_api/cifar_vit/train.py
View file @
26cd6d85
...
@@ -165,7 +165,7 @@ def main():
...
@@ -165,7 +165,7 @@ def main():
if
args
.
plugin
.
startswith
(
"torch_ddp"
):
if
args
.
plugin
.
startswith
(
"torch_ddp"
):
plugin
=
TorchDDPPlugin
()
plugin
=
TorchDDPPlugin
()
elif
args
.
plugin
==
"gemini"
:
elif
args
.
plugin
==
"gemini"
:
plugin
=
GeminiPlugin
(
placement_policy
=
"
cuda
"
,
strict_ddp_mode
=
True
,
initial_scale
=
2
**
5
)
plugin
=
GeminiPlugin
(
placement_policy
=
"
static
"
,
strict_ddp_mode
=
True
,
initial_scale
=
2
**
5
)
elif
args
.
plugin
==
"low_level_zero"
:
elif
args
.
plugin
==
"low_level_zero"
:
plugin
=
LowLevelZeroPlugin
(
initial_scale
=
2
**
5
)
plugin
=
LowLevelZeroPlugin
(
initial_scale
=
2
**
5
)
...
...
examples/tutorial/new_api/glue_bert/finetune.py
View file @
26cd6d85
...
@@ -21,7 +21,7 @@ from colossalai.utils import get_current_device
...
@@ -21,7 +21,7 @@ from colossalai.utils import get_current_device
# ==============================
# ==============================
# Prepare Hyperparameters
# Prepare Hyperparameters
# ==============================
# ==============================
NUM_EPOCHS
=
3
NUM_EPOCHS
=
1
BATCH_SIZE
=
32
BATCH_SIZE
=
32
LEARNING_RATE
=
2.4e-5
LEARNING_RATE
=
2.4e-5
WEIGHT_DECAY
=
0.01
WEIGHT_DECAY
=
0.01
...
@@ -141,7 +141,7 @@ def main():
...
@@ -141,7 +141,7 @@ def main():
if
args
.
plugin
.
startswith
(
"torch_ddp"
):
if
args
.
plugin
.
startswith
(
"torch_ddp"
):
plugin
=
TorchDDPPlugin
()
plugin
=
TorchDDPPlugin
()
elif
args
.
plugin
==
"gemini"
:
elif
args
.
plugin
==
"gemini"
:
plugin
=
GeminiPlugin
(
placement_policy
=
"
cuda
"
,
strict_ddp_mode
=
True
,
initial_scale
=
2
**
5
)
plugin
=
GeminiPlugin
(
placement_policy
=
"
static
"
,
strict_ddp_mode
=
True
,
initial_scale
=
2
**
5
)
elif
args
.
plugin
==
"low_level_zero"
:
elif
args
.
plugin
==
"low_level_zero"
:
plugin
=
LowLevelZeroPlugin
(
initial_scale
=
2
**
5
)
plugin
=
LowLevelZeroPlugin
(
initial_scale
=
2
**
5
)
...
...
examples/tutorial/new_api/glue_bert/test_ci.sh
View file @
26cd6d85
...
@@ -4,5 +4,5 @@ set -xe
...
@@ -4,5 +4,5 @@ set -xe
pip
install
-r
requirements.txt
pip
install
-r
requirements.txt
for
plugin
in
"torch_ddp"
"torch_ddp_fp16"
"gemini"
"low_level_zero"
;
do
for
plugin
in
"torch_ddp"
"torch_ddp_fp16"
"gemini"
"low_level_zero"
;
do
torchrun
--standalone
--nproc_per_node
4 finetune.py
--target_f1
0.8
6
--plugin
$plugin
torchrun
--standalone
--nproc_per_node
4 finetune.py
--target_f1
0.8
0
--plugin
$plugin
done
done
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment