Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
Megatron-LM
Commits
7625a9d2
Commit
7625a9d2
authored
Feb 17, 2023
by
Maanu Grover
Committed by
Jared Casper
Feb 17, 2023
Browse files
Fix pretraining examples
parent
285068c8
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
134 additions
and
99 deletions
+134
-99
examples/pretrain_bert.sh
examples/pretrain_bert.sh
+43
-30
examples/pretrain_gpt.sh
examples/pretrain_gpt.sh
+44
-34
examples/pretrain_t5.sh
examples/pretrain_t5.sh
+47
-35
No files found.
examples/pretrain_bert.sh
View file @
7625a9d2
#!/bin/bash
#!/bin/bash
RANK
=
0
export
CUDA_DEVICE_MAX_CONNECTIONS
=
1
WORLD_SIZE
=
1
DATA_PATH
=
<Specify path and file prefix>_text_sentence
CHECKPOINT_PATH
=
<Specify path>
CHECKPOINT_PATH
=
<Specify path>
VOCAB_FILE
=
<Specify path to file>/bert-vocab.txt
DATA_PATH
=
<Specify path and file prefix>_text_sentence
BERT_ARGS
=
"
--num-layers 24
\
--hidden-size 1024
\
--num-attention-heads 16
\
--seq-length 512
\
--max-position-embeddings 512
\
--micro-batch-size 4
\
--global-batch-size 8
\
--lr 0.0001
\
--train-iters 2000000
\
--lr-decay-iters 990000
\
--lr-decay-style linear
\
--min-lr 0.00001
\
--weight-decay 1e-2
\
--lr-warmup-fraction .01
\
--clip-grad 1.0
\
--fp16
"
DATA_ARGS
=
"
--data-path
$DATA_PATH
\
--vocab-file
$VOCAB_FILE
\
--data-impl mmap
\
--split 949,50,1
"
OUTPUT_ARGS
=
"
--log-interval 100
\
--save-interval 10000
\
--eval-interval 1000
\
--eval-iters 10
"
python pretrain_bert.py
\
torchrun pretrain_bert.py
\
--num-layers
24
\
$BERT_ARGS
\
--hidden-size
1024
\
$DATA_ARGS
\
--num-attention-heads
16
\
$OUTPUT_ARGS
\
--micro-batch-size
4
\
--save
$CHECKPOINT_PATH
\
--global-batch-size
8
\
--load
$CHECKPOINT_PATH
--seq-length
512
\
--max-position-embeddings
512
\
--train-iters
2000000
\
--lr-decay-iters
990000
\
--save
$CHECKPOINT_PATH
\
--load
$CHECKPOINT_PATH
\
--data-path
$DATA_PATH
\
--vocab-file
bert-vocab.txt
\
--data-impl
mmap
\
--split
949,50,1
\
--lr
0.0001
\
--min-lr
0.00001
\
--lr-decay-style
linear
\
--lr-warmup-fraction
.01
\
--weight-decay
1e-2
\
--clip-grad
1.0
\
--log-interval
100
\
--save-interval
10000
\
--eval-interval
1000
\
--eval-iters
10
\
--fp16
examples/pretrain_gpt.sh
View file @
7625a9d2
#!
/bin/bash
#!/bin/bash
# Runs the "345M" parameter model
# Runs the "345M" parameter model
RANK
=
0
export
CUDA_DEVICE_MAX_CONNECTIONS
=
1
WORLD_SIZE
=
1
DATA_PATH
=
<Specify path and file prefix>_text_document
CHECKPOINT_PATH
=
<Specify path>
CHECKPOINT_PATH
=
<Specify path>
VOCAB_FILE
=
<Specify path to file>/gpt2-vocab.json
MERGE_FILE
=
<Specify path to file>/gpt2-merges.txt
DATA_PATH
=
<Specify path and file prefix>_text_document
GPT_ARGS
=
"
--num-layers 24
\
--hidden-size 1024
\
--num-attention-heads 16
\
--seq-length 1024
\
--max-position-embeddings 1024
\
--micro-batch-size 4
\
--global-batch-size 8
\
--lr 0.00015
\
--train-iters 500000
\
--lr-decay-iters 320000
\
--lr-decay-style cosine
\
--min-lr 1.0e-5
\
--weight-decay 1e-2
\
--lr-warmup-fraction .01
\
--clip-grad 1.0
\
--fp16
"
DATA_ARGS
=
"
--data-path
$DATA_PATH
\
--vocab-file
$VOCAB_FILE
\
--merge-file
$MERGE_FILE
\
--data-impl mmap
\
--split 949,50,1
"
OUTPUT_ARGS
=
"
--log-interval 100
\
--save-interval 10000
\
--eval-interval 1000
\
--eval-iters 10
"
python pretrain_gpt.py
\
torchrun pretrain_gpt.py
\
--num-layers
24
\
$GPT_ARGS
\
--hidden-size
1024
\
$DATA_ARGS
\
--num-attention-heads
16
\
$OUTPUT_ARGS
\
--micro-batch-size
4
\
--save
$CHECKPOINT_PATH
\
--global-batch-size
8
\
--load
$CHECKPOINT_PATH
--seq-length
1024
\
--max-position-embeddings
1024
\
--train-iters
500000
\
--lr-decay-iters
320000
\
--save
$CHECKPOINT_PATH
\
--load
$CHECKPOINT_PATH
\
--data-path
$DATA_PATH
\
--vocab-file
gpt2-vocab.json
\
--merge-file
gpt2-merges.txt
\
--data-impl
mmap
\
--split
949,50,1
\
--distributed-backend
nccl
\
--lr
0.00015
\
--min-lr
1.0e-5
\
--lr-decay-style
cosine
\
--weight-decay
1e-2
\
--clip-grad
1.0
\
--lr-warmup-fraction
.01
\
--activations-checkpoint-method
uniform
\
--log-interval
100
\
--save-interval
10000
\
--eval-interval
1000
\
--eval-iters
10
\
--fp16
examples/pretrain_t5.sh
View file @
7625a9d2
#!/bin/bash
#!/bin/bash
RANK
=
0
export
CUDA_DEVICE_MAX_CONNECTIONS
=
1
WORLD_SIZE
=
1
DATA_PATH
=
<Specify path and file prefix>
VOCAB_FILE
=
<Specify path to vocab.txt>
CHECKPOINT_PATH
=
<Specify path>
CHECKPOINT_PATH
=
<Specify path>
VOCAB_FILE
=
<Specify path to file>/t5-vocab.txt
DATA_PATH
=
<Specify path and file prefix>_text_sentence
T5_ARGS
=
"
--num-layers 12
\
--hidden-size 768
\
--num-attention-heads 12
\
--kv-channels 64
\
--ffn-hidden-size 3072
\
--encoder-seq-length 512
\
--decoder-seq-length 128
\
--max-position-embeddings 512
\
--micro-batch-size 16
\
--global-batch-size 16
\
--lr 0.0001
\
--train-iters 1000000
\
--lr-decay-iters 1000000
\
--lr-decay-style linear
\
--min-lr 0.00001
\
--weight-decay 1e-2
\
--lr-warmup-fraction .01
\
--clip-grad 1.0
\
--fp16
\
--vocab-extra-ids 100
"
DATA_ARGS
=
"
--data-path
$DATA_PATH
\
--vocab-file
$VOCAB_FILE
\
--data-impl mmap
\
--split 949,50,1
"
OUTPUT_ARGS
=
"
--log-interval 100
\
--save-interval 10000
\
--eval-interval 1000
\
--eval-iters 10
"
python pretrain_t5.py
\
torchrun pretrain_t5.py
\
--num-layers
12
\
$T5_ARGS
\
--hidden-size
768
\
$DATA_ARGS
\
--num-attention-heads
12
\
$OUTPUT_ARGS
\
--kv-channels
64
\
--save
$CHECKPOINT_PATH
\
--ffn-hidden-size
3072
\
--load
$CHECKPOINT_PATH
--encoder-seq-length
512
\
--decoder-seq-length
128
\
--micro-batch-size
16
\
--global-batch-size
16
\
--max-position-embeddings
512
\
--train-iters
1000000
\
--lr-decay-iters
1000000
\
--save
$CHECKPOINT_PATH
\
--load
$CHECKPOINT_PATH
\
--data-path
$DATA_PATH
\
--vocab-file
$VOCAB_FILE
\
--data-impl
mmap
\
--split
949,50,1
\
--lr
0.0001
\
--min-lr
0.00001
\
--lr-decay-style
linear
\
--lr-warmup-fraction
.01
\
--weight-decay
1e-2
\
--clip-grad
1.0
\
--log-interval
100
\
--save-interval
10000
\
--eval-interval
1000
\
--eval-iters
10
\
--fp16
\
--vocab-extra-ids
100
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment