Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
Megatron-LM
Commits
a20445d3
Commit
a20445d3
authored
Oct 15, 2021
by
Jared Casper
Browse files
Fix finetuning tasks after T5 pipeline merge.
parent
5478d67e
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
3 additions
and
1 deletion
+3
-1
tasks/finetune_utils.py
tasks/finetune_utils.py
+3
-1
No files found.
tasks/finetune_utils.py
View file @
a20445d3
...
...
@@ -25,6 +25,7 @@ from megatron import get_timers
from
megatron
import
mpu
from
megatron.checkpointing
import
load_checkpoint
from
megatron.checkpointing
import
save_checkpoint
from
megatron.model
import
ModelType
from
megatron.training
import
evaluate_and_print_results
from
megatron.training
import
setup_model_and_optimizer
from
megatron.training
import
train_step
...
...
@@ -248,6 +249,7 @@ def _train(model, optimizer, lr_scheduler, forward_step,
def
finetune
(
train_valid_datasets_provider
,
model_provider
,
model_type
=
ModelType
.
encoder_or_decoder
,
forward_step
=
_cross_entropy_forward_step
,
end_of_epoch_callback_provider
=
None
,
task_collate_fn
=
None
):
...
...
@@ -277,7 +279,7 @@ def finetune(train_valid_datasets_provider, model_provider,
# Build model, optimizer and learning rate scheduler.
timers
(
'model and optimizer'
).
start
()
model
,
optimizer
,
lr_scheduler
=
setup_model_and_optimizer
(
model_provider
)
model
,
optimizer
,
lr_scheduler
=
setup_model_and_optimizer
(
model_provider
,
model_type
)
timers
(
'model and optimizer'
).
stop
()
# If pretrained checkpoint is provided and we have not trained for
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment