Commit 3f3ba5e7 authored by Mostofa Patwary's avatar Mostofa Patwary
Browse files

addressed the comments

parent 3d96729a
...@@ -317,11 +317,10 @@ def setup_model_and_optimizer(model_provider_func): ...@@ -317,11 +317,10 @@ def setup_model_and_optimizer(model_provider_func):
assert args.DDP_impl == 'local' assert args.DDP_impl == 'local'
# get model without FP16 and/or TorchDDP wrappers # get model without FP16 and/or TorchDDP wrappers
unwrapped_model = unwrap_model(model) if args.iteration == 0 and len(unwrapped_model) == 1 \
if args.iteration == 0 and hasattr(unwrapped_model, and hasattr(unwrapped_model[0], 'init_state_dict_from_bert'):
'init_state_dict_from_bert'):
print_rank_0("Initializing ICT from pretrained BERT model") print_rank_0("Initializing ICT from pretrained BERT model")
unwrapped_model.init_state_dict_from_bert() unwrapped_model[0].init_state_dict_from_bert()
if args.fp16: if args.fp16:
optimizer.reload_model_params() optimizer.reload_model_params()
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment