Unverified Commit db8c7cae authored by Viktor Scherbakov's avatar Viktor Scherbakov Committed by GitHub
Browse files

Empty list in defaults for LLaMA special tokens during weights conversion (#32342)

empty list in defaults
parent 2229ebe7
...@@ -449,7 +449,8 @@ def main(): ...@@ -449,7 +449,8 @@ def main():
if args.model_size is None and args.num_shards is None: if args.model_size is None and args.num_shards is None:
raise ValueError("You have to set at least `num_shards` if you are not giving the `model_size`") raise ValueError("You have to set at least `num_shards` if you are not giving the `model_size`")
if args.special_tokens is None: if args.special_tokens is None:
args.special_tokens = DEFAULT_LLAMA_SPECIAL_TOKENS[str(args.llama_version)] # no special tokens by default
args.special_tokens = DEFAULT_LLAMA_SPECIAL_TOKENS.get(str(args.llama_version), [])
spm_path = os.path.join(args.input_dir, "tokenizer.model") spm_path = os.path.join(args.input_dir, "tokenizer.model")
vocab_size = len( vocab_size = len(
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment