Traceback (most recent call last): File "/usr/local/lib/python3.10/site-packages/transformers/utils/hub.py", line 398, in cached_file resolved_file = hf_hub_download( File "/usr/local/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py", line 106, in _inner_fn validate_repo_id(arg_value) File "/usr/local/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py", line 154, in validate_repo_id raise HFValidationError( huggingface_hub.errors.HFValidationError: Repo id must be in the form 'repo_name' or 'namespace/repo_name': '/mnt/fs/user/panhw/Megatron-LM-main/Meta-Llama-3-8B'. Use `repo_type` argument if needed. The above exception was the direct cause of the following exception: Traceback (most recent call last): File "/mnt/fs/user/llama/panhw/Megatron-LM-main/pretrain_gpt.py", line 248, in pretrain( File "/mnt/fs/user/llama/panhw/Megatron-LM-main/megatron/training/training.py", line 191, in pretrain initialize_megatron(extra_args_provider=extra_args_provider, File "/mnt/fs/user/llama/panhw/Megatron-LM-main/megatron/training/initialize.py", line 62, in initialize_megatron set_global_variables(args) File "/mnt/fs/user/llama/panhw/Megatron-LM-main/megatron/training/global_vars.py", line 100, in set_global_variables _ = _build_tokenizer(args) File "/mnt/fs/user/llama/panhw/Megatron-LM-main/megatron/training/global_vars.py", line 130, in _build_tokenizer _GLOBAL_TOKENIZER = build_tokenizer(args) File "/mnt/fs/user/llama/panhw/Megatron-LM-main/megatron/training/tokenizer/tokenizer.py", line 42, in build_tokenizer tokenizer = _HuggingFaceTokenizer(args.tokenizer_model) File "/mnt/fs/user/llama/panhw/Megatron-LM-main/megatron/training/tokenizer/tokenizer.py", line 93, in __init__ self._tokenizer = transformers.AutoTokenizer.from_pretrained("/mnt/fs/user/panhw/Megatron-LM-main/Meta-Llama-3-8B") File "/usr/local/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 767, in from_pretrained tokenizer_config = get_tokenizer_config(pretrained_model_name_or_path, **kwargs) File "/usr/local/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 600, in get_tokenizer_config resolved_config_file = cached_file( File "/usr/local/lib/python3.10/site-packages/transformers/utils/hub.py", line 462, in cached_file raise EnvironmentError( OSError: Incorrect path_or_model_id: '/mnt/fs/user/panhw/Megatron-LM-main/Meta-Llama-3-8B'. Please provide either the path to a local folder or the repo_id of a model on the Hub.