Unverified Commit 19067711 authored by Sourab Mangrulkar's avatar Sourab Mangrulkar Committed by GitHub
Browse files

fix `tokenizer_type` to avoid error when loading checkpoint back (#20062)

parent 3502c202
...@@ -648,7 +648,7 @@ def convert_checkpoint_from_transformers_to_megatron(args): ...@@ -648,7 +648,7 @@ def convert_checkpoint_from_transformers_to_megatron(args):
"data_parallel_size": args.target_data_parallel_size, "data_parallel_size": args.target_data_parallel_size,
"make_vocab_size_divisible_by": args.make_vocab_size_divisible_by, "make_vocab_size_divisible_by": args.make_vocab_size_divisible_by,
"rank": 0, "rank": 0,
"tokenizer_type": None, "tokenizer_type": "GPT2BPETokenizer",
} }
if config.activation_function == "gelu": if config.activation_function == "gelu":
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment