Unverified Commit 224da5df authored by Yih-Dar's avatar Yih-Dar Committed by GitHub
Browse files

update `use_auth_token` -> `token` (#25083)



* update

---------
Co-authored-by: default avatarydshieh <ydshieh@users.noreply.github.com>
parent c53c8e49
...@@ -435,6 +435,8 @@ class PretrainedConfig(PushToHubMixin): ...@@ -435,6 +435,8 @@ class PretrainedConfig(PushToHubMixin):
kwargs (`Dict[str, Any]`, *optional*): kwargs (`Dict[str, Any]`, *optional*):
Additional key word arguments passed along to the [`~utils.PushToHubMixin.push_to_hub`] method. Additional key word arguments passed along to the [`~utils.PushToHubMixin.push_to_hub`] method.
""" """
self._set_token_in_kwargs(kwargs)
if os.path.isfile(save_directory): if os.path.isfile(save_directory):
raise AssertionError(f"Provided path ({save_directory}) should be a directory, not a file") raise AssertionError(f"Provided path ({save_directory}) should be a directory, not a file")
...@@ -463,11 +465,11 @@ class PretrainedConfig(PushToHubMixin): ...@@ -463,11 +465,11 @@ class PretrainedConfig(PushToHubMixin):
repo_id, repo_id,
files_timestamps, files_timestamps,
commit_message=commit_message, commit_message=commit_message,
token=kwargs.get("use_auth_token"), token=kwargs.get("token"),
) )
@classmethod @staticmethod
def _set_token_in_kwargs(self, kwargs, token=None): def _set_token_in_kwargs(kwargs, token=None):
"""Temporary method to deal with `token` and `use_auth_token`. """Temporary method to deal with `token` and `use_auth_token`.
This method is to avoid apply the same changes in all model config classes that overwrite `from_pretrained`. This method is to avoid apply the same changes in all model config classes that overwrite `from_pretrained`.
...@@ -490,8 +492,7 @@ class PretrainedConfig(PushToHubMixin): ...@@ -490,8 +492,7 @@ class PretrainedConfig(PushToHubMixin):
token = use_auth_token token = use_auth_token
if token is not None: if token is not None:
# change to `token` in a follow-up PR kwargs["token"] = token
kwargs["use_auth_token"] = token
@classmethod @classmethod
def from_pretrained( def from_pretrained(
...@@ -612,6 +613,8 @@ class PretrainedConfig(PushToHubMixin): ...@@ -612,6 +613,8 @@ class PretrainedConfig(PushToHubMixin):
`Tuple[Dict, Dict]`: The dictionary(ies) that will be used to instantiate the configuration object. `Tuple[Dict, Dict]`: The dictionary(ies) that will be used to instantiate the configuration object.
""" """
cls._set_token_in_kwargs(kwargs)
original_kwargs = copy.deepcopy(kwargs) original_kwargs = copy.deepcopy(kwargs)
# Get config dict associated with the base config file # Get config dict associated with the base config file
config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs)
...@@ -635,7 +638,7 @@ class PretrainedConfig(PushToHubMixin): ...@@ -635,7 +638,7 @@ class PretrainedConfig(PushToHubMixin):
force_download = kwargs.pop("force_download", False) force_download = kwargs.pop("force_download", False)
resume_download = kwargs.pop("resume_download", False) resume_download = kwargs.pop("resume_download", False)
proxies = kwargs.pop("proxies", None) proxies = kwargs.pop("proxies", None)
use_auth_token = kwargs.pop("use_auth_token", None) token = kwargs.pop("token", None)
local_files_only = kwargs.pop("local_files_only", False) local_files_only = kwargs.pop("local_files_only", False)
revision = kwargs.pop("revision", None) revision = kwargs.pop("revision", None)
trust_remote_code = kwargs.pop("trust_remote_code", None) trust_remote_code = kwargs.pop("trust_remote_code", None)
...@@ -677,7 +680,7 @@ class PretrainedConfig(PushToHubMixin): ...@@ -677,7 +680,7 @@ class PretrainedConfig(PushToHubMixin):
proxies=proxies, proxies=proxies,
resume_download=resume_download, resume_download=resume_download,
local_files_only=local_files_only, local_files_only=local_files_only,
use_auth_token=use_auth_token, token=token,
user_agent=user_agent, user_agent=user_agent,
revision=revision, revision=revision,
subfolder=subfolder, subfolder=subfolder,
......
...@@ -360,8 +360,7 @@ class FeatureExtractionMixin(PushToHubMixin): ...@@ -360,8 +360,7 @@ class FeatureExtractionMixin(PushToHubMixin):
token = use_auth_token token = use_auth_token
if token is not None: if token is not None:
# change to `token` in a follow-up PR kwargs["token"] = token
kwargs["use_auth_token"] = token
feature_extractor_dict, kwargs = cls.get_feature_extractor_dict(pretrained_model_name_or_path, **kwargs) feature_extractor_dict, kwargs = cls.get_feature_extractor_dict(pretrained_model_name_or_path, **kwargs)
...@@ -382,6 +381,18 @@ class FeatureExtractionMixin(PushToHubMixin): ...@@ -382,6 +381,18 @@ class FeatureExtractionMixin(PushToHubMixin):
kwargs (`Dict[str, Any]`, *optional*): kwargs (`Dict[str, Any]`, *optional*):
Additional key word arguments passed along to the [`~utils.PushToHubMixin.push_to_hub`] method. Additional key word arguments passed along to the [`~utils.PushToHubMixin.push_to_hub`] method.
""" """
use_auth_token = kwargs.pop("use_auth_token", None)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if kwargs.get("token", None) is not None:
raise ValueError(
"`token` and `use_auth_token` are both specified. Please set only the argument `token`."
)
kwargs["token"] = use_auth_token
if os.path.isfile(save_directory): if os.path.isfile(save_directory):
raise AssertionError(f"Provided path ({save_directory}) should be a directory, not a file") raise AssertionError(f"Provided path ({save_directory}) should be a directory, not a file")
...@@ -410,7 +421,7 @@ class FeatureExtractionMixin(PushToHubMixin): ...@@ -410,7 +421,7 @@ class FeatureExtractionMixin(PushToHubMixin):
repo_id, repo_id,
files_timestamps, files_timestamps,
commit_message=commit_message, commit_message=commit_message,
token=kwargs.get("use_auth_token"), token=kwargs.get("token"),
) )
return [output_feature_extractor_file] return [output_feature_extractor_file]
...@@ -434,10 +445,21 @@ class FeatureExtractionMixin(PushToHubMixin): ...@@ -434,10 +445,21 @@ class FeatureExtractionMixin(PushToHubMixin):
force_download = kwargs.pop("force_download", False) force_download = kwargs.pop("force_download", False)
resume_download = kwargs.pop("resume_download", False) resume_download = kwargs.pop("resume_download", False)
proxies = kwargs.pop("proxies", None) proxies = kwargs.pop("proxies", None)
token = kwargs.pop("token", None)
use_auth_token = kwargs.pop("use_auth_token", None) use_auth_token = kwargs.pop("use_auth_token", None)
local_files_only = kwargs.pop("local_files_only", False) local_files_only = kwargs.pop("local_files_only", False)
revision = kwargs.pop("revision", None) revision = kwargs.pop("revision", None)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if token is not None:
raise ValueError(
"`token` and `use_auth_token` are both specified. Please set only the argument `token`."
)
token = use_auth_token
from_pipeline = kwargs.pop("_from_pipeline", None) from_pipeline = kwargs.pop("_from_pipeline", None)
from_auto_class = kwargs.pop("_from_auto", False) from_auto_class = kwargs.pop("_from_auto", False)
...@@ -471,7 +493,7 @@ class FeatureExtractionMixin(PushToHubMixin): ...@@ -471,7 +493,7 @@ class FeatureExtractionMixin(PushToHubMixin):
proxies=proxies, proxies=proxies,
resume_download=resume_download, resume_download=resume_download,
local_files_only=local_files_only, local_files_only=local_files_only,
use_auth_token=use_auth_token, token=token,
user_agent=user_agent, user_agent=user_agent,
revision=revision, revision=revision,
) )
......
...@@ -361,6 +361,18 @@ class GenerationConfig(PushToHubMixin): ...@@ -361,6 +361,18 @@ class GenerationConfig(PushToHubMixin):
kwargs (`Dict[str, Any]`, *optional*): kwargs (`Dict[str, Any]`, *optional*):
Additional key word arguments passed along to the [`~utils.PushToHubMixin.push_to_hub`] method. Additional key word arguments passed along to the [`~utils.PushToHubMixin.push_to_hub`] method.
""" """
use_auth_token = kwargs.pop("use_auth_token", None)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if kwargs.get("token", None) is not None:
raise ValueError(
"`token` and `use_auth_token` are both specified. Please set only the argument `token`."
)
kwargs["token"] = use_auth_token
config_file_name = config_file_name if config_file_name is not None else GENERATION_CONFIG_NAME config_file_name = config_file_name if config_file_name is not None else GENERATION_CONFIG_NAME
if os.path.isfile(save_directory): if os.path.isfile(save_directory):
...@@ -385,7 +397,7 @@ class GenerationConfig(PushToHubMixin): ...@@ -385,7 +397,7 @@ class GenerationConfig(PushToHubMixin):
repo_id, repo_id,
files_timestamps, files_timestamps,
commit_message=commit_message, commit_message=commit_message,
token=kwargs.get("use_auth_token"), token=kwargs.get("token"),
) )
@classmethod @classmethod
......
...@@ -189,8 +189,7 @@ class ImageProcessingMixin(PushToHubMixin): ...@@ -189,8 +189,7 @@ class ImageProcessingMixin(PushToHubMixin):
token = use_auth_token token = use_auth_token
if token is not None: if token is not None:
# change to `token` in a follow-up PR kwargs["token"] = token
kwargs["use_auth_token"] = token
image_processor_dict, kwargs = cls.get_image_processor_dict(pretrained_model_name_or_path, **kwargs) image_processor_dict, kwargs = cls.get_image_processor_dict(pretrained_model_name_or_path, **kwargs)
...@@ -211,6 +210,18 @@ class ImageProcessingMixin(PushToHubMixin): ...@@ -211,6 +210,18 @@ class ImageProcessingMixin(PushToHubMixin):
kwargs (`Dict[str, Any]`, *optional*): kwargs (`Dict[str, Any]`, *optional*):
Additional key word arguments passed along to the [`~utils.PushToHubMixin.push_to_hub`] method. Additional key word arguments passed along to the [`~utils.PushToHubMixin.push_to_hub`] method.
""" """
use_auth_token = kwargs.pop("use_auth_token", None)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if kwargs.get("token", None) is not None:
raise ValueError(
"`token` and `use_auth_token` are both specified. Please set only the argument `token`."
)
kwargs["token"] = use_auth_token
if os.path.isfile(save_directory): if os.path.isfile(save_directory):
raise AssertionError(f"Provided path ({save_directory}) should be a directory, not a file") raise AssertionError(f"Provided path ({save_directory}) should be a directory, not a file")
...@@ -239,7 +250,7 @@ class ImageProcessingMixin(PushToHubMixin): ...@@ -239,7 +250,7 @@ class ImageProcessingMixin(PushToHubMixin):
repo_id, repo_id,
files_timestamps, files_timestamps,
commit_message=commit_message, commit_message=commit_message,
token=kwargs.get("use_auth_token"), token=kwargs.get("token"),
) )
return [output_image_processor_file] return [output_image_processor_file]
...@@ -266,6 +277,7 @@ class ImageProcessingMixin(PushToHubMixin): ...@@ -266,6 +277,7 @@ class ImageProcessingMixin(PushToHubMixin):
force_download = kwargs.pop("force_download", False) force_download = kwargs.pop("force_download", False)
resume_download = kwargs.pop("resume_download", False) resume_download = kwargs.pop("resume_download", False)
proxies = kwargs.pop("proxies", None) proxies = kwargs.pop("proxies", None)
token = kwargs.pop("token", None)
use_auth_token = kwargs.pop("use_auth_token", None) use_auth_token = kwargs.pop("use_auth_token", None)
local_files_only = kwargs.pop("local_files_only", False) local_files_only = kwargs.pop("local_files_only", False)
revision = kwargs.pop("revision", None) revision = kwargs.pop("revision", None)
...@@ -274,6 +286,16 @@ class ImageProcessingMixin(PushToHubMixin): ...@@ -274,6 +286,16 @@ class ImageProcessingMixin(PushToHubMixin):
from_pipeline = kwargs.pop("_from_pipeline", None) from_pipeline = kwargs.pop("_from_pipeline", None)
from_auto_class = kwargs.pop("_from_auto", False) from_auto_class = kwargs.pop("_from_auto", False)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if token is not None:
raise ValueError(
"`token` and `use_auth_token` are both specified. Please set only the argument `token`."
)
token = use_auth_token
user_agent = {"file_type": "image processor", "from_auto_class": from_auto_class} user_agent = {"file_type": "image processor", "from_auto_class": from_auto_class}
if from_pipeline is not None: if from_pipeline is not None:
user_agent["using_pipeline"] = from_pipeline user_agent["using_pipeline"] = from_pipeline
...@@ -304,7 +326,7 @@ class ImageProcessingMixin(PushToHubMixin): ...@@ -304,7 +326,7 @@ class ImageProcessingMixin(PushToHubMixin):
proxies=proxies, proxies=proxies,
resume_download=resume_download, resume_download=resume_download,
local_files_only=local_files_only, local_files_only=local_files_only,
use_auth_token=use_auth_token, token=token,
user_agent=user_agent, user_agent=user_agent,
revision=revision, revision=revision,
subfolder=subfolder, subfolder=subfolder,
......
...@@ -727,7 +727,7 @@ class FlaxPreTrainedModel(PushToHubMixin, FlaxGenerationMixin): ...@@ -727,7 +727,7 @@ class FlaxPreTrainedModel(PushToHubMixin, FlaxGenerationMixin):
"proxies": proxies, "proxies": proxies,
"resume_download": resume_download, "resume_download": resume_download,
"local_files_only": local_files_only, "local_files_only": local_files_only,
"use_auth_token": token, "token": token,
"user_agent": user_agent, "user_agent": user_agent,
"revision": revision, "revision": revision,
"subfolder": subfolder, "subfolder": subfolder,
...@@ -758,7 +758,7 @@ class FlaxPreTrainedModel(PushToHubMixin, FlaxGenerationMixin): ...@@ -758,7 +758,7 @@ class FlaxPreTrainedModel(PushToHubMixin, FlaxGenerationMixin):
has_file_kwargs = { has_file_kwargs = {
"revision": revision, "revision": revision,
"proxies": proxies, "proxies": proxies,
"use_auth_token": token, "token": token,
} }
if has_file(pretrained_model_name_or_path, WEIGHTS_NAME, **has_file_kwargs): if has_file(pretrained_model_name_or_path, WEIGHTS_NAME, **has_file_kwargs):
raise EnvironmentError( raise EnvironmentError(
...@@ -809,7 +809,7 @@ class FlaxPreTrainedModel(PushToHubMixin, FlaxGenerationMixin): ...@@ -809,7 +809,7 @@ class FlaxPreTrainedModel(PushToHubMixin, FlaxGenerationMixin):
proxies=proxies, proxies=proxies,
resume_download=resume_download, resume_download=resume_download,
local_files_only=local_files_only, local_files_only=local_files_only,
use_auth_token=token, token=token,
user_agent=user_agent, user_agent=user_agent,
revision=revision, revision=revision,
subfolder=subfolder, subfolder=subfolder,
...@@ -1019,7 +1019,13 @@ class FlaxPreTrainedModel(PushToHubMixin, FlaxGenerationMixin): ...@@ -1019,7 +1019,13 @@ class FlaxPreTrainedModel(PushToHubMixin, FlaxGenerationMixin):
return model, unflatten_dict(state) return model, unflatten_dict(state)
def save_pretrained( def save_pretrained(
self, save_directory: Union[str, os.PathLike], params=None, push_to_hub=False, max_shard_size="10GB", **kwargs self,
save_directory: Union[str, os.PathLike],
params=None,
push_to_hub=False,
max_shard_size="10GB",
token: Optional[Union[str, bool]] = None,
**kwargs,
): ):
""" """
Save a model and its configuration file to a directory, so that it can be re-loaded using the Save a model and its configuration file to a directory, so that it can be re-loaded using the
...@@ -1043,9 +1049,27 @@ class FlaxPreTrainedModel(PushToHubMixin, FlaxGenerationMixin): ...@@ -1043,9 +1049,27 @@ class FlaxPreTrainedModel(PushToHubMixin, FlaxGenerationMixin):
</Tip> </Tip>
token (`str` or `bool`, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, or not specified, will use
the token generated when running `huggingface-cli login` (stored in `~/.huggingface`).
kwargs (`Dict[str, Any]`, *optional*): kwargs (`Dict[str, Any]`, *optional*):
Additional key word arguments passed along to the [`~utils.PushToHubMixin.push_to_hub`] method. Additional key word arguments passed along to the [`~utils.PushToHubMixin.push_to_hub`] method.
""" """
use_auth_token = kwargs.pop("use_auth_token", None)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if token is not None:
raise ValueError(
"`token` and `use_auth_token` are both specified. Please set only the argument `token`."
)
token = use_auth_token
if token is not None:
kwargs["token"] = token
if os.path.isfile(save_directory): if os.path.isfile(save_directory):
logger.error(f"Provided path ({save_directory}) should be a directory, not a file") logger.error(f"Provided path ({save_directory}) should be a directory, not a file")
return return
...@@ -1118,7 +1142,7 @@ class FlaxPreTrainedModel(PushToHubMixin, FlaxGenerationMixin): ...@@ -1118,7 +1142,7 @@ class FlaxPreTrainedModel(PushToHubMixin, FlaxGenerationMixin):
repo_id, repo_id,
files_timestamps, files_timestamps,
commit_message=commit_message, commit_message=commit_message,
token=kwargs.get("use_auth_token"), token=token,
) )
@classmethod @classmethod
......
...@@ -2335,6 +2335,7 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu ...@@ -2335,6 +2335,7 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu
max_shard_size: Union[int, str] = "10GB", max_shard_size: Union[int, str] = "10GB",
create_pr: bool = False, create_pr: bool = False,
safe_serialization: bool = False, safe_serialization: bool = False,
token: Optional[Union[str, bool]] = None,
**kwargs, **kwargs,
): ):
""" """
...@@ -2371,9 +2372,27 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu ...@@ -2371,9 +2372,27 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu
Whether or not to create a PR with the uploaded files or directly commit. Whether or not to create a PR with the uploaded files or directly commit.
safe_serialization (`bool`, *optional*, defaults to `False`): safe_serialization (`bool`, *optional*, defaults to `False`):
Whether to save the model using `safetensors` or the traditional PyTorch way (that uses `pickle`). Whether to save the model using `safetensors` or the traditional PyTorch way (that uses `pickle`).
token (`str` or `bool`, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, or not specified, will use
the token generated when running `huggingface-cli login` (stored in `~/.huggingface`).
kwargs (`Dict[str, Any]`, *optional*): kwargs (`Dict[str, Any]`, *optional*):
Additional key word arguments passed along to the [`~utils.PushToHubMixin.push_to_hub`] method. Additional key word arguments passed along to the [`~utils.PushToHubMixin.push_to_hub`] method.
""" """
use_auth_token = kwargs.pop("use_auth_token", None)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if token is not None:
raise ValueError(
"`token` and `use_auth_token` are both specified. Please set only the argument `token`."
)
token = use_auth_token
if token is not None:
kwargs["token"] = token
if os.path.isfile(save_directory): if os.path.isfile(save_directory):
logger.error(f"Provided path ({save_directory}) should be a directory, not a file") logger.error(f"Provided path ({save_directory}) should be a directory, not a file")
return return
...@@ -2478,7 +2497,7 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu ...@@ -2478,7 +2497,7 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu
repo_id, repo_id,
files_timestamps, files_timestamps,
commit_message=commit_message, commit_message=commit_message,
token=kwargs.get("use_auth_token"), token=token,
) )
@classmethod @classmethod
...@@ -2665,7 +2684,7 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu ...@@ -2665,7 +2684,7 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu
resume_download=resume_download, resume_download=resume_download,
proxies=proxies, proxies=proxies,
local_files_only=local_files_only, local_files_only=local_files_only,
token=use_auth_token, token=token,
revision=revision, revision=revision,
_from_auto=from_auto_class, _from_auto=from_auto_class,
_from_pipeline=from_pipeline, _from_pipeline=from_pipeline,
...@@ -2752,7 +2771,7 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu ...@@ -2752,7 +2771,7 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu
"proxies": proxies, "proxies": proxies,
"resume_download": resume_download, "resume_download": resume_download,
"local_files_only": local_files_only, "local_files_only": local_files_only,
"use_auth_token": token, "token": token,
"user_agent": user_agent, "user_agent": user_agent,
"revision": revision, "revision": revision,
"subfolder": subfolder, "subfolder": subfolder,
...@@ -2799,7 +2818,7 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu ...@@ -2799,7 +2818,7 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu
has_file_kwargs = { has_file_kwargs = {
"revision": revision, "revision": revision,
"proxies": proxies, "proxies": proxies,
"use_auth_token": token, "token": token,
} }
if has_file(pretrained_model_name_or_path, WEIGHTS_NAME, **has_file_kwargs): if has_file(pretrained_model_name_or_path, WEIGHTS_NAME, **has_file_kwargs):
raise EnvironmentError( raise EnvironmentError(
...@@ -2846,7 +2865,7 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu ...@@ -2846,7 +2865,7 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu
proxies=proxies, proxies=proxies,
resume_download=resume_download, resume_download=resume_download,
local_files_only=local_files_only, local_files_only=local_files_only,
use_auth_token=token, token=token,
user_agent=user_agent, user_agent=user_agent,
revision=revision, revision=revision,
_commit_hash=commit_hash, _commit_hash=commit_hash,
...@@ -3036,6 +3055,8 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu ...@@ -3036,6 +3055,8 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu
commit_message: Optional[str] = None, commit_message: Optional[str] = None,
private: Optional[bool] = None, private: Optional[bool] = None,
max_shard_size: Optional[Union[int, str]] = "10GB", max_shard_size: Optional[Union[int, str]] = "10GB",
token: Optional[Union[bool, str]] = None,
# (`use_auth_token` is deprecated)
use_auth_token: Optional[Union[bool, str]] = None, use_auth_token: Optional[Union[bool, str]] = None,
create_pr: bool = False, create_pr: bool = False,
**base_model_card_args, **base_model_card_args,
...@@ -3054,7 +3075,7 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu ...@@ -3054,7 +3075,7 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu
Message to commit while pushing. Will default to `"Upload model"`. Message to commit while pushing. Will default to `"Upload model"`.
private (`bool`, *optional*): private (`bool`, *optional*):
Whether or not the repository created should be private. Whether or not the repository created should be private.
use_auth_token (`bool` or `str`, *optional*): token (`bool` or `str`, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `huggingface-cli login` (stored in `~/.huggingface`). Will default to `True` if `repo_url` when running `huggingface-cli login` (stored in `~/.huggingface`). Will default to `True` if `repo_url`
is not specified. is not specified.
...@@ -3079,6 +3100,16 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu ...@@ -3079,6 +3100,16 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu
model.push_to_hub("huggingface/my-finetuned-bert") model.push_to_hub("huggingface/my-finetuned-bert")
``` ```
""" """
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if token is not None:
raise ValueError(
"`token` and `use_auth_token` are both specified. Please set only the argument `token`."
)
token = use_auth_token
if "repo_path_or_name" in base_model_card_args: if "repo_path_or_name" in base_model_card_args:
warnings.warn( warnings.warn(
"The `repo_path_or_name` argument is deprecated and will be removed in v5 of Transformers. Use " "The `repo_path_or_name` argument is deprecated and will be removed in v5 of Transformers. Use "
...@@ -3096,7 +3127,7 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu ...@@ -3096,7 +3127,7 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu
working_dir = repo_id.split("/")[-1] working_dir = repo_id.split("/")[-1]
repo_id = self._create_repo( repo_id = self._create_repo(
repo_id, private=private, use_auth_token=use_auth_token, repo_url=repo_url, organization=organization repo_id, private=private, token=token, repo_url=repo_url, organization=organization
) )
if use_temp_dir is None: if use_temp_dir is None:
...@@ -3121,7 +3152,7 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu ...@@ -3121,7 +3152,7 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu
repo_id, repo_id,
files_timestamps, files_timestamps,
commit_message=commit_message, commit_message=commit_message,
token=use_auth_token, token=token,
create_pr=create_pr, create_pr=create_pr,
) )
......
...@@ -1665,6 +1665,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix ...@@ -1665,6 +1665,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix
max_shard_size: Union[int, str] = "10GB", max_shard_size: Union[int, str] = "10GB",
safe_serialization: bool = False, safe_serialization: bool = False,
variant: Optional[str] = None, variant: Optional[str] = None,
token: Optional[Union[str, bool]] = None,
**kwargs, **kwargs,
): ):
""" """
...@@ -1704,9 +1705,27 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix ...@@ -1704,9 +1705,27 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix
Whether to save the model using `safetensors` or the traditional PyTorch way (that uses `pickle`). Whether to save the model using `safetensors` or the traditional PyTorch way (that uses `pickle`).
variant (`str`, *optional*): variant (`str`, *optional*):
If specified, weights are saved in the format pytorch_model.<variant>.bin. If specified, weights are saved in the format pytorch_model.<variant>.bin.
token (`str` or `bool`, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, or not specified, will use
the token generated when running `huggingface-cli login` (stored in `~/.huggingface`).
kwargs (`Dict[str, Any]`, *optional*): kwargs (`Dict[str, Any]`, *optional*):
Additional key word arguments passed along to the [`~utils.PushToHubMixin.push_to_hub`] method. Additional key word arguments passed along to the [`~utils.PushToHubMixin.push_to_hub`] method.
""" """
use_auth_token = kwargs.pop("use_auth_token", None)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if token is not None:
raise ValueError(
"`token` and `use_auth_token` are both specified. Please set only the argument `token`."
)
token = use_auth_token
if token is not None:
kwargs["token"] = token
# Checks if the model has been loaded in 8-bit # Checks if the model has been loaded in 8-bit
if getattr(self, "is_loaded_in_8bit", False) and getattr(self, "is_8bit_serializable", False): if getattr(self, "is_loaded_in_8bit", False) and getattr(self, "is_8bit_serializable", False):
warnings.warn( warnings.warn(
...@@ -1872,7 +1891,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix ...@@ -1872,7 +1891,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix
repo_id, repo_id,
files_timestamps, files_timestamps,
commit_message=commit_message, commit_message=commit_message,
token=kwargs.get("use_auth_token"), token=token,
) )
def get_memory_footprint(self, return_buffers=True): def get_memory_footprint(self, return_buffers=True):
...@@ -2513,7 +2532,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix ...@@ -2513,7 +2532,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix
"proxies": proxies, "proxies": proxies,
"resume_download": resume_download, "resume_download": resume_download,
"local_files_only": local_files_only, "local_files_only": local_files_only,
"use_auth_token": token, "token": token,
"user_agent": user_agent, "user_agent": user_agent,
"revision": revision, "revision": revision,
"subfolder": subfolder, "subfolder": subfolder,
...@@ -2558,7 +2577,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix ...@@ -2558,7 +2577,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix
has_file_kwargs = { has_file_kwargs = {
"revision": revision, "revision": revision,
"proxies": proxies, "proxies": proxies,
"use_auth_token": token, "token": token,
} }
if has_file(pretrained_model_name_or_path, TF2_WEIGHTS_NAME, **has_file_kwargs): if has_file(pretrained_model_name_or_path, TF2_WEIGHTS_NAME, **has_file_kwargs):
raise EnvironmentError( raise EnvironmentError(
...@@ -2619,7 +2638,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix ...@@ -2619,7 +2638,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix
proxies=proxies, proxies=proxies,
resume_download=resume_download, resume_download=resume_download,
local_files_only=local_files_only, local_files_only=local_files_only,
use_auth_token=token, token=token,
user_agent=user_agent, user_agent=user_agent,
revision=revision, revision=revision,
subfolder=subfolder, subfolder=subfolder,
......
...@@ -16,6 +16,7 @@ ...@@ -16,6 +16,7 @@
import copy import copy
import importlib import importlib
import os import os
import warnings
from collections import OrderedDict from collections import OrderedDict
from ...configuration_utils import PretrainedConfig from ...configuration_utils import PretrainedConfig
...@@ -449,8 +450,25 @@ class _BaseAutoModelClass: ...@@ -449,8 +450,25 @@ class _BaseAutoModelClass:
"revision", "revision",
"subfolder", "subfolder",
"use_auth_token", "use_auth_token",
"token",
] ]
hub_kwargs = {name: kwargs.pop(name) for name in hub_kwargs_names if name in kwargs} hub_kwargs = {name: kwargs.pop(name) for name in hub_kwargs_names if name in kwargs}
token = hub_kwargs.pop("token", None)
use_auth_token = hub_kwargs.pop("use_auth_token", None)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if token is not None:
raise ValueError(
"`token` and `use_auth_token` are both specified. Please set only the argument `token`."
)
token = use_auth_token
if token is not None:
hub_kwargs["token"] = token
if not isinstance(config, PretrainedConfig): if not isinstance(config, PretrainedConfig):
kwargs_orig = copy.deepcopy(kwargs) kwargs_orig = copy.deepcopy(kwargs)
# ensure not to pollute the config object with torch_dtype="auto" - since it's # ensure not to pollute the config object with torch_dtype="auto" - since it's
......
...@@ -987,6 +987,17 @@ class AutoConfig: ...@@ -987,6 +987,17 @@ class AutoConfig:
>>> unused_kwargs >>> unused_kwargs
{'foo': False} {'foo': False}
```""" ```"""
use_auth_token = kwargs.pop("use_auth_token", None)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if kwargs.get("token", None) is not None:
raise ValueError(
"`token` and `use_auth_token` are both specified. Please set only the argument `token`."
)
kwargs["token"] = use_auth_token
kwargs["_from_auto"] = True kwargs["_from_auto"] = True
kwargs["name_or_path"] = pretrained_model_name_or_path kwargs["name_or_path"] = pretrained_model_name_or_path
trust_remote_code = kwargs.pop("trust_remote_code", None) trust_remote_code = kwargs.pop("trust_remote_code", None)
......
...@@ -16,6 +16,7 @@ ...@@ -16,6 +16,7 @@
import importlib import importlib
import json import json
import os import os
import warnings
from collections import OrderedDict from collections import OrderedDict
from typing import Dict, Optional, Union from typing import Dict, Optional, Union
...@@ -135,7 +136,7 @@ def get_feature_extractor_config( ...@@ -135,7 +136,7 @@ def get_feature_extractor_config(
force_download: bool = False, force_download: bool = False,
resume_download: bool = False, resume_download: bool = False,
proxies: Optional[Dict[str, str]] = None, proxies: Optional[Dict[str, str]] = None,
use_auth_token: Optional[Union[bool, str]] = None, token: Optional[Union[bool, str]] = None,
revision: Optional[str] = None, revision: Optional[str] = None,
local_files_only: bool = False, local_files_only: bool = False,
**kwargs, **kwargs,
...@@ -164,7 +165,7 @@ def get_feature_extractor_config( ...@@ -164,7 +165,7 @@ def get_feature_extractor_config(
proxies (`Dict[str, str]`, *optional*): proxies (`Dict[str, str]`, *optional*):
A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128', A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',
'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.
use_auth_token (`str` or *bool*, *optional*): token (`str` or *bool*, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `huggingface-cli login` (stored in `~/.huggingface`). when running `huggingface-cli login` (stored in `~/.huggingface`).
revision (`str`, *optional*, defaults to `"main"`): revision (`str`, *optional*, defaults to `"main"`):
...@@ -176,7 +177,7 @@ def get_feature_extractor_config( ...@@ -176,7 +177,7 @@ def get_feature_extractor_config(
<Tip> <Tip>
Passing `use_auth_token=True` is required when you want to use a private model. Passing `token=True` is required when you want to use a private model.
</Tip> </Tip>
...@@ -198,6 +199,15 @@ def get_feature_extractor_config( ...@@ -198,6 +199,15 @@ def get_feature_extractor_config(
tokenizer.save_pretrained("tokenizer-test") tokenizer.save_pretrained("tokenizer-test")
tokenizer_config = get_tokenizer_config("tokenizer-test") tokenizer_config = get_tokenizer_config("tokenizer-test")
```""" ```"""
use_auth_token = kwargs.pop("use_auth_token", None)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if token is not None:
raise ValueError("`token` and `use_auth_token` are both specified. Please set only the argument `token`.")
token = use_auth_token
resolved_config_file = get_file_from_repo( resolved_config_file = get_file_from_repo(
pretrained_model_name_or_path, pretrained_model_name_or_path,
FEATURE_EXTRACTOR_NAME, FEATURE_EXTRACTOR_NAME,
...@@ -205,7 +215,7 @@ def get_feature_extractor_config( ...@@ -205,7 +215,7 @@ def get_feature_extractor_config(
force_download=force_download, force_download=force_download,
resume_download=resume_download, resume_download=resume_download,
proxies=proxies, proxies=proxies,
use_auth_token=use_auth_token, token=token,
revision=revision, revision=revision,
local_files_only=local_files_only, local_files_only=local_files_only,
) )
...@@ -269,7 +279,7 @@ class AutoFeatureExtractor: ...@@ -269,7 +279,7 @@ class AutoFeatureExtractor:
proxies (`Dict[str, str]`, *optional*): proxies (`Dict[str, str]`, *optional*):
A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128', A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',
'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.
use_auth_token (`str` or *bool*, *optional*): token (`str` or *bool*, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `huggingface-cli login` (stored in `~/.huggingface`). when running `huggingface-cli login` (stored in `~/.huggingface`).
revision (`str`, *optional*, defaults to `"main"`): revision (`str`, *optional*, defaults to `"main"`):
...@@ -292,7 +302,7 @@ class AutoFeatureExtractor: ...@@ -292,7 +302,7 @@ class AutoFeatureExtractor:
<Tip> <Tip>
Passing `use_auth_token=True` is required when you want to use a private model. Passing `token=True` is required when you want to use a private model.
</Tip> </Tip>
...@@ -307,6 +317,17 @@ class AutoFeatureExtractor: ...@@ -307,6 +317,17 @@ class AutoFeatureExtractor:
>>> # If feature extractor files are in a directory (e.g. feature extractor was saved using *save_pretrained('./test/saved_model/')*) >>> # If feature extractor files are in a directory (e.g. feature extractor was saved using *save_pretrained('./test/saved_model/')*)
>>> # feature_extractor = AutoFeatureExtractor.from_pretrained("./test/saved_model/") >>> # feature_extractor = AutoFeatureExtractor.from_pretrained("./test/saved_model/")
```""" ```"""
use_auth_token = kwargs.pop("use_auth_token", None)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if kwargs.get("token", None) is not None:
raise ValueError(
"`token` and `use_auth_token` are both specified. Please set only the argument `token`."
)
kwargs["token"] = use_auth_token
config = kwargs.pop("config", None) config = kwargs.pop("config", None)
trust_remote_code = kwargs.pop("trust_remote_code", None) trust_remote_code = kwargs.pop("trust_remote_code", None)
kwargs["_from_auto"] = True kwargs["_from_auto"] = True
......
...@@ -16,6 +16,7 @@ ...@@ -16,6 +16,7 @@
import importlib import importlib
import json import json
import os import os
import warnings
from collections import OrderedDict from collections import OrderedDict
from typing import Dict, Optional, Union from typing import Dict, Optional, Union
...@@ -143,7 +144,7 @@ def get_image_processor_config( ...@@ -143,7 +144,7 @@ def get_image_processor_config(
force_download: bool = False, force_download: bool = False,
resume_download: bool = False, resume_download: bool = False,
proxies: Optional[Dict[str, str]] = None, proxies: Optional[Dict[str, str]] = None,
use_auth_token: Optional[Union[bool, str]] = None, token: Optional[Union[bool, str]] = None,
revision: Optional[str] = None, revision: Optional[str] = None,
local_files_only: bool = False, local_files_only: bool = False,
**kwargs, **kwargs,
...@@ -172,7 +173,7 @@ def get_image_processor_config( ...@@ -172,7 +173,7 @@ def get_image_processor_config(
proxies (`Dict[str, str]`, *optional*): proxies (`Dict[str, str]`, *optional*):
A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128', A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',
'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.
use_auth_token (`str` or *bool*, *optional*): token (`str` or *bool*, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `huggingface-cli login` (stored in `~/.huggingface`). when running `huggingface-cli login` (stored in `~/.huggingface`).
revision (`str`, *optional*, defaults to `"main"`): revision (`str`, *optional*, defaults to `"main"`):
...@@ -184,7 +185,7 @@ def get_image_processor_config( ...@@ -184,7 +185,7 @@ def get_image_processor_config(
<Tip> <Tip>
Passing `use_auth_token=True` is required when you want to use a private model. Passing `token=True` is required when you want to use a private model.
</Tip> </Tip>
...@@ -206,6 +207,15 @@ def get_image_processor_config( ...@@ -206,6 +207,15 @@ def get_image_processor_config(
image_processor.save_pretrained("image-processor-test") image_processor.save_pretrained("image-processor-test")
image_processor_config = get_image_processor_config("image-processor-test") image_processor_config = get_image_processor_config("image-processor-test")
```""" ```"""
use_auth_token = kwargs.pop("use_auth_token", None)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if token is not None:
raise ValueError("`token` and `use_auth_token` are both specified. Please set only the argument `token`.")
token = use_auth_token
resolved_config_file = get_file_from_repo( resolved_config_file = get_file_from_repo(
pretrained_model_name_or_path, pretrained_model_name_or_path,
IMAGE_PROCESSOR_NAME, IMAGE_PROCESSOR_NAME,
...@@ -213,7 +223,7 @@ def get_image_processor_config( ...@@ -213,7 +223,7 @@ def get_image_processor_config(
force_download=force_download, force_download=force_download,
resume_download=resume_download, resume_download=resume_download,
proxies=proxies, proxies=proxies,
use_auth_token=use_auth_token, token=token,
revision=revision, revision=revision,
local_files_only=local_files_only, local_files_only=local_files_only,
) )
...@@ -277,7 +287,7 @@ class AutoImageProcessor: ...@@ -277,7 +287,7 @@ class AutoImageProcessor:
proxies (`Dict[str, str]`, *optional*): proxies (`Dict[str, str]`, *optional*):
A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128', A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',
'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.
use_auth_token (`str` or *bool*, *optional*): token (`str` or *bool*, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `huggingface-cli login` (stored in `~/.huggingface`). when running `huggingface-cli login` (stored in `~/.huggingface`).
revision (`str`, *optional*, defaults to `"main"`): revision (`str`, *optional*, defaults to `"main"`):
...@@ -300,7 +310,7 @@ class AutoImageProcessor: ...@@ -300,7 +310,7 @@ class AutoImageProcessor:
<Tip> <Tip>
Passing `use_auth_token=True` is required when you want to use a private model. Passing `token=True` is required when you want to use a private model.
</Tip> </Tip>
...@@ -315,6 +325,17 @@ class AutoImageProcessor: ...@@ -315,6 +325,17 @@ class AutoImageProcessor:
>>> # If image processor files are in a directory (e.g. image processor was saved using *save_pretrained('./test/saved_model/')*) >>> # If image processor files are in a directory (e.g. image processor was saved using *save_pretrained('./test/saved_model/')*)
>>> # image_processor = AutoImageProcessor.from_pretrained("./test/saved_model/") >>> # image_processor = AutoImageProcessor.from_pretrained("./test/saved_model/")
```""" ```"""
use_auth_token = kwargs.pop("use_auth_token", None)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if kwargs.get("token", None) is not None:
raise ValueError(
"`token` and `use_auth_token` are both specified. Please set only the argument `token`."
)
kwargs["token"] = use_auth_token
config = kwargs.pop("config", None) config = kwargs.pop("config", None)
trust_remote_code = kwargs.pop("trust_remote_code", None) trust_remote_code = kwargs.pop("trust_remote_code", None)
kwargs["_from_auto"] = True kwargs["_from_auto"] = True
......
...@@ -17,6 +17,7 @@ import importlib ...@@ -17,6 +17,7 @@ import importlib
import inspect import inspect
import json import json
import os import os
import warnings
from collections import OrderedDict from collections import OrderedDict
# Build the list of all feature extractors # Build the list of all feature extractors
...@@ -158,7 +159,7 @@ class AutoProcessor: ...@@ -158,7 +159,7 @@ class AutoProcessor:
proxies (`Dict[str, str]`, *optional*): proxies (`Dict[str, str]`, *optional*):
A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128', A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',
'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.
use_auth_token (`str` or *bool*, *optional*): token (`str` or *bool*, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `huggingface-cli login` (stored in `~/.huggingface`). when running `huggingface-cli login` (stored in `~/.huggingface`).
revision (`str`, *optional*, defaults to `"main"`): revision (`str`, *optional*, defaults to `"main"`):
...@@ -181,7 +182,7 @@ class AutoProcessor: ...@@ -181,7 +182,7 @@ class AutoProcessor:
<Tip> <Tip>
Passing `use_auth_token=True` is required when you want to use a private model. Passing `token=True` is required when you want to use a private model.
</Tip> </Tip>
...@@ -196,6 +197,17 @@ class AutoProcessor: ...@@ -196,6 +197,17 @@ class AutoProcessor:
>>> # If processor files are in a directory (e.g. processor was saved using *save_pretrained('./test/saved_model/')*) >>> # If processor files are in a directory (e.g. processor was saved using *save_pretrained('./test/saved_model/')*)
>>> # processor = AutoProcessor.from_pretrained("./test/saved_model/") >>> # processor = AutoProcessor.from_pretrained("./test/saved_model/")
```""" ```"""
use_auth_token = kwargs.pop("use_auth_token", None)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if kwargs.get("token", None) is not None:
raise ValueError(
"`token` and `use_auth_token` are both specified. Please set only the argument `token`."
)
kwargs["token"] = use_auth_token
config = kwargs.pop("config", None) config = kwargs.pop("config", None)
trust_remote_code = kwargs.pop("trust_remote_code", None) trust_remote_code = kwargs.pop("trust_remote_code", None)
kwargs["_from_auto"] = True kwargs["_from_auto"] = True
......
...@@ -17,6 +17,7 @@ ...@@ -17,6 +17,7 @@
import importlib import importlib
import json import json
import os import os
import warnings
from collections import OrderedDict from collections import OrderedDict
from typing import TYPE_CHECKING, Dict, Optional, Tuple, Union from typing import TYPE_CHECKING, Dict, Optional, Tuple, Union
...@@ -426,7 +427,7 @@ def get_tokenizer_config( ...@@ -426,7 +427,7 @@ def get_tokenizer_config(
force_download: bool = False, force_download: bool = False,
resume_download: bool = False, resume_download: bool = False,
proxies: Optional[Dict[str, str]] = None, proxies: Optional[Dict[str, str]] = None,
use_auth_token: Optional[Union[bool, str]] = None, token: Optional[Union[bool, str]] = None,
revision: Optional[str] = None, revision: Optional[str] = None,
local_files_only: bool = False, local_files_only: bool = False,
subfolder: str = "", subfolder: str = "",
...@@ -456,7 +457,7 @@ def get_tokenizer_config( ...@@ -456,7 +457,7 @@ def get_tokenizer_config(
proxies (`Dict[str, str]`, *optional*): proxies (`Dict[str, str]`, *optional*):
A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128', A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',
'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.
use_auth_token (`str` or *bool*, *optional*): token (`str` or *bool*, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `huggingface-cli login` (stored in `~/.huggingface`). when running `huggingface-cli login` (stored in `~/.huggingface`).
revision (`str`, *optional*, defaults to `"main"`): revision (`str`, *optional*, defaults to `"main"`):
...@@ -471,7 +472,7 @@ def get_tokenizer_config( ...@@ -471,7 +472,7 @@ def get_tokenizer_config(
<Tip> <Tip>
Passing `use_auth_token=True` is required when you want to use a private model. Passing `token=True` is required when you want to use a private model.
</Tip> </Tip>
...@@ -493,6 +494,15 @@ def get_tokenizer_config( ...@@ -493,6 +494,15 @@ def get_tokenizer_config(
tokenizer.save_pretrained("tokenizer-test") tokenizer.save_pretrained("tokenizer-test")
tokenizer_config = get_tokenizer_config("tokenizer-test") tokenizer_config = get_tokenizer_config("tokenizer-test")
```""" ```"""
use_auth_token = kwargs.pop("use_auth_token", None)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if token is not None:
raise ValueError("`token` and `use_auth_token` are both specified. Please set only the argument `token`.")
token = use_auth_token
commit_hash = kwargs.get("_commit_hash", None) commit_hash = kwargs.get("_commit_hash", None)
resolved_config_file = cached_file( resolved_config_file = cached_file(
pretrained_model_name_or_path, pretrained_model_name_or_path,
...@@ -501,7 +511,7 @@ def get_tokenizer_config( ...@@ -501,7 +511,7 @@ def get_tokenizer_config(
force_download=force_download, force_download=force_download,
resume_download=resume_download, resume_download=resume_download,
proxies=proxies, proxies=proxies,
use_auth_token=use_auth_token, token=token,
revision=revision, revision=revision,
local_files_only=local_files_only, local_files_only=local_files_only,
subfolder=subfolder, subfolder=subfolder,
...@@ -613,6 +623,17 @@ class AutoTokenizer: ...@@ -613,6 +623,17 @@ class AutoTokenizer:
>>> # Download vocabulary from huggingface.co and define model-specific arguments >>> # Download vocabulary from huggingface.co and define model-specific arguments
>>> tokenizer = AutoTokenizer.from_pretrained("roberta-base", add_prefix_space=True) >>> tokenizer = AutoTokenizer.from_pretrained("roberta-base", add_prefix_space=True)
```""" ```"""
use_auth_token = kwargs.pop("use_auth_token", None)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if kwargs.get("token", None) is not None:
raise ValueError(
"`token` and `use_auth_token` are both specified. Please set only the argument `token`."
)
kwargs["token"] = use_auth_token
config = kwargs.pop("config", None) config = kwargs.pop("config", None)
kwargs["_from_auto"] = True kwargs["_from_auto"] = True
......
...@@ -378,8 +378,8 @@ def convert_esm_checkpoint_to_pytorch( ...@@ -378,8 +378,8 @@ def convert_esm_checkpoint_to_pytorch(
hf_tokenizer.save_pretrained(pytorch_dump_folder_path) hf_tokenizer.save_pretrained(pytorch_dump_folder_path)
if push_to_repo: if push_to_repo:
model.push_to_hub(repo_id=push_to_repo, use_auth_token=auth_token) model.push_to_hub(repo_id=push_to_repo, token_token=auth_token)
hf_tokenizer.push_to_hub(repo_id=push_to_repo, use_auth_token=auth_token) hf_tokenizer.push_to_hub(repo_id=push_to_repo, token_token=auth_token)
if __name__ == "__main__": if __name__ == "__main__":
......
...@@ -264,7 +264,7 @@ def convert_wav2vec2_checkpoint( ...@@ -264,7 +264,7 @@ def convert_wav2vec2_checkpoint(
add_adapter=True, add_adapter=True,
adapter_stride=adapter_stride, adapter_stride=adapter_stride,
adapter_kernel_size=adapter_kernel_size, adapter_kernel_size=adapter_kernel_size,
use_auth_token=True, token_token=True,
output_hidden_size=encoder_output_dim, output_hidden_size=encoder_output_dim,
) )
decoder_config = MBartConfig.from_pretrained(decoder_config_path) decoder_config = MBartConfig.from_pretrained(decoder_config_path)
...@@ -282,7 +282,7 @@ def convert_wav2vec2_checkpoint( ...@@ -282,7 +282,7 @@ def convert_wav2vec2_checkpoint(
model = model[0].eval() model = model[0].eval()
# load feature extractor # load feature extractor
feature_extractor = Wav2Vec2FeatureExtractor.from_pretrained(encoder_config_path, use_auth_token=True) feature_extractor = Wav2Vec2FeatureExtractor.from_pretrained(encoder_config_path, token_token=True)
# set weights for wav2vec2 encoder # set weights for wav2vec2 encoder
hf_encoder = Wav2Vec2Model(encoder_config) hf_encoder = Wav2Vec2Model(encoder_config)
......
...@@ -1230,7 +1230,7 @@ class Wav2Vec2PreTrainedModel(PreTrainedModel): ...@@ -1230,7 +1230,7 @@ class Wav2Vec2PreTrainedModel(PreTrainedModel):
'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request. 'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request.
local_files_only(`bool`, *optional*, defaults to `False`): local_files_only(`bool`, *optional*, defaults to `False`):
Whether or not to only look at local files (i.e., do not try to download the model). Whether or not to only look at local files (i.e., do not try to download the model).
use_auth_token (`str` or `bool`, *optional*): token (`str` or `bool`, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, or not specified, will use The token to use as HTTP bearer authorization for remote files. If `True`, or not specified, will use
the token generated when running `huggingface-cli login` (stored in `~/.huggingface`). the token generated when running `huggingface-cli login` (stored in `~/.huggingface`).
revision (`str`, *optional*, defaults to `"main"`): revision (`str`, *optional*, defaults to `"main"`):
...@@ -1281,10 +1281,21 @@ class Wav2Vec2PreTrainedModel(PreTrainedModel): ...@@ -1281,10 +1281,21 @@ class Wav2Vec2PreTrainedModel(PreTrainedModel):
resume_download = kwargs.pop("resume_download", False) resume_download = kwargs.pop("resume_download", False)
proxies = kwargs.pop("proxies", None) proxies = kwargs.pop("proxies", None)
local_files_only = kwargs.pop("local_files_only", False) local_files_only = kwargs.pop("local_files_only", False)
token = kwargs.pop("token", None)
use_auth_token = kwargs.pop("use_auth_token", None) use_auth_token = kwargs.pop("use_auth_token", None)
revision = kwargs.pop("revision", None) revision = kwargs.pop("revision", None)
use_safetensors = kwargs.pop("use_safetensors", None if is_safetensors_available() else False) use_safetensors = kwargs.pop("use_safetensors", None if is_safetensors_available() else False)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if token is not None:
raise ValueError(
"`token` and `use_auth_token` are both specified. Please set only the argument `token`."
)
token = use_auth_token
model_path_or_id = self.config._name_or_path model_path_or_id = self.config._name_or_path
state_dict = None state_dict = None
...@@ -1300,7 +1311,7 @@ class Wav2Vec2PreTrainedModel(PreTrainedModel): ...@@ -1300,7 +1311,7 @@ class Wav2Vec2PreTrainedModel(PreTrainedModel):
resume_download=resume_download, resume_download=resume_download,
proxies=proxies, proxies=proxies,
local_files_only=local_files_only, local_files_only=local_files_only,
use_auth_token=use_auth_token, token=token,
revision=revision, revision=revision,
cache_dir=cache_dir, cache_dir=cache_dir,
) )
...@@ -1335,7 +1346,7 @@ class Wav2Vec2PreTrainedModel(PreTrainedModel): ...@@ -1335,7 +1346,7 @@ class Wav2Vec2PreTrainedModel(PreTrainedModel):
resume_download=resume_download, resume_download=resume_download,
proxies=proxies, proxies=proxies,
local_files_only=local_files_only, local_files_only=local_files_only,
use_auth_token=use_auth_token, token=token,
revision=revision, revision=revision,
cache_dir=cache_dir, cache_dir=cache_dir,
) )
......
...@@ -114,6 +114,18 @@ class ProcessorMixin(PushToHubMixin): ...@@ -114,6 +114,18 @@ class ProcessorMixin(PushToHubMixin):
kwargs (`Dict[str, Any]`, *optional*): kwargs (`Dict[str, Any]`, *optional*):
Additional key word arguments passed along to the [`~utils.PushToHubMixin.push_to_hub`] method. Additional key word arguments passed along to the [`~utils.PushToHubMixin.push_to_hub`] method.
""" """
use_auth_token = kwargs.pop("use_auth_token", None)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if kwargs.get("token", None) is not None:
raise ValueError(
"`token` and `use_auth_token` are both specified. Please set only the argument `token`."
)
kwargs["token"] = use_auth_token
os.makedirs(save_directory, exist_ok=True) os.makedirs(save_directory, exist_ok=True)
if push_to_hub: if push_to_hub:
...@@ -149,7 +161,7 @@ class ProcessorMixin(PushToHubMixin): ...@@ -149,7 +161,7 @@ class ProcessorMixin(PushToHubMixin):
repo_id, repo_id,
files_timestamps, files_timestamps,
commit_message=commit_message, commit_message=commit_message,
token=kwargs.get("use_auth_token"), token=kwargs.get("token"),
) )
@classmethod @classmethod
...@@ -209,8 +221,7 @@ class ProcessorMixin(PushToHubMixin): ...@@ -209,8 +221,7 @@ class ProcessorMixin(PushToHubMixin):
token = use_auth_token token = use_auth_token
if token is not None: if token is not None:
# change to `token` in a follow-up PR kwargs["token"] = token
kwargs["use_auth_token"] = token
args = cls._get_arguments_from_pretrained(pretrained_model_name_or_path, **kwargs) args = cls._get_arguments_from_pretrained(pretrained_model_name_or_path, **kwargs)
return cls(*args) return cls(*args)
......
...@@ -1681,7 +1681,7 @@ class PreTrainedTokenizerBase(SpecialTokensMixin, PushToHubMixin): ...@@ -1681,7 +1681,7 @@ class PreTrainedTokenizerBase(SpecialTokensMixin, PushToHubMixin):
<Tip> <Tip>
Passing `use_auth_token=True` is required when you want to use a private model. Passing `token=True` is required when you want to use a private model.
</Tip> </Tip>
...@@ -1773,7 +1773,7 @@ class PreTrainedTokenizerBase(SpecialTokensMixin, PushToHubMixin): ...@@ -1773,7 +1773,7 @@ class PreTrainedTokenizerBase(SpecialTokensMixin, PushToHubMixin):
force_download=force_download, force_download=force_download,
resume_download=resume_download, resume_download=resume_download,
proxies=proxies, proxies=proxies,
use_auth_token=token, token=token,
revision=revision, revision=revision,
local_files_only=local_files_only, local_files_only=local_files_only,
subfolder=subfolder, subfolder=subfolder,
...@@ -1810,7 +1810,7 @@ class PreTrainedTokenizerBase(SpecialTokensMixin, PushToHubMixin): ...@@ -1810,7 +1810,7 @@ class PreTrainedTokenizerBase(SpecialTokensMixin, PushToHubMixin):
proxies=proxies, proxies=proxies,
resume_download=resume_download, resume_download=resume_download,
local_files_only=local_files_only, local_files_only=local_files_only,
use_auth_token=token, token=token,
user_agent=user_agent, user_agent=user_agent,
revision=revision, revision=revision,
subfolder=subfolder, subfolder=subfolder,
...@@ -1848,7 +1848,7 @@ class PreTrainedTokenizerBase(SpecialTokensMixin, PushToHubMixin): ...@@ -1848,7 +1848,7 @@ class PreTrainedTokenizerBase(SpecialTokensMixin, PushToHubMixin):
pretrained_model_name_or_path, pretrained_model_name_or_path,
init_configuration, init_configuration,
*init_inputs, *init_inputs,
use_auth_token=token, token=token,
cache_dir=cache_dir, cache_dir=cache_dir,
local_files_only=local_files_only, local_files_only=local_files_only,
_commit_hash=commit_hash, _commit_hash=commit_hash,
...@@ -1863,7 +1863,7 @@ class PreTrainedTokenizerBase(SpecialTokensMixin, PushToHubMixin): ...@@ -1863,7 +1863,7 @@ class PreTrainedTokenizerBase(SpecialTokensMixin, PushToHubMixin):
pretrained_model_name_or_path, pretrained_model_name_or_path,
init_configuration, init_configuration,
*init_inputs, *init_inputs,
use_auth_token=None, token=None,
cache_dir=None, cache_dir=None,
local_files_only=False, local_files_only=False,
_commit_hash=None, _commit_hash=None,
...@@ -1880,7 +1880,7 @@ class PreTrainedTokenizerBase(SpecialTokensMixin, PushToHubMixin): ...@@ -1880,7 +1880,7 @@ class PreTrainedTokenizerBase(SpecialTokensMixin, PushToHubMixin):
pretrained_model_name_or_path, pretrained_model_name_or_path,
copy.deepcopy(init_configuration), copy.deepcopy(init_configuration),
*init_inputs, *init_inputs,
use_auth_token=use_auth_token, token=token,
cache_dir=cache_dir, cache_dir=cache_dir,
local_files_only=local_files_only, local_files_only=local_files_only,
_commit_hash=_commit_hash, _commit_hash=_commit_hash,
...@@ -1920,7 +1920,7 @@ class PreTrainedTokenizerBase(SpecialTokensMixin, PushToHubMixin): ...@@ -1920,7 +1920,7 @@ class PreTrainedTokenizerBase(SpecialTokensMixin, PushToHubMixin):
try: try:
config = AutoConfig.from_pretrained( config = AutoConfig.from_pretrained(
pretrained_model_name_or_path, pretrained_model_name_or_path,
use_auth_token=use_auth_token, token=token,
cache_dir=cache_dir, cache_dir=cache_dir,
local_files_only=local_files_only, local_files_only=local_files_only,
_commit_hash=_commit_hash, _commit_hash=_commit_hash,
...@@ -2139,6 +2139,18 @@ class PreTrainedTokenizerBase(SpecialTokensMixin, PushToHubMixin): ...@@ -2139,6 +2139,18 @@ class PreTrainedTokenizerBase(SpecialTokensMixin, PushToHubMixin):
Returns: Returns:
A tuple of `str`: The files saved. A tuple of `str`: The files saved.
""" """
use_auth_token = kwargs.pop("use_auth_token", None)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if kwargs.get("token", None) is not None:
raise ValueError(
"`token` and `use_auth_token` are both specified. Please set only the argument `token`."
)
kwargs["token"] = use_auth_token
if os.path.isfile(save_directory): if os.path.isfile(save_directory):
logger.error(f"Provided path ({save_directory}) should be a directory, not a file") logger.error(f"Provided path ({save_directory}) should be a directory, not a file")
return return
...@@ -2236,7 +2248,7 @@ class PreTrainedTokenizerBase(SpecialTokensMixin, PushToHubMixin): ...@@ -2236,7 +2248,7 @@ class PreTrainedTokenizerBase(SpecialTokensMixin, PushToHubMixin):
repo_id, repo_id,
files_timestamps, files_timestamps,
commit_message=commit_message, commit_message=commit_message,
token=kwargs.get("use_auth_token"), token=kwargs.get("token"),
) )
return save_files return save_files
......
...@@ -506,7 +506,7 @@ class PipelineTool(Tool): ...@@ -506,7 +506,7 @@ class PipelineTool(Tool):
if device_map is not None: if device_map is not None:
self.model_kwargs["device_map"] = device_map self.model_kwargs["device_map"] = device_map
self.hub_kwargs = hub_kwargs self.hub_kwargs = hub_kwargs
self.hub_kwargs["use_auth_token"] = token self.hub_kwargs["token"] = token
super().__init__() super().__init__()
......
...@@ -305,7 +305,7 @@ def cached_file( ...@@ -305,7 +305,7 @@ def cached_file(
force_download: bool = False, force_download: bool = False,
resume_download: bool = False, resume_download: bool = False,
proxies: Optional[Dict[str, str]] = None, proxies: Optional[Dict[str, str]] = None,
use_auth_token: Optional[Union[bool, str]] = None, token: Optional[Union[bool, str]] = None,
revision: Optional[str] = None, revision: Optional[str] = None,
local_files_only: bool = False, local_files_only: bool = False,
subfolder: str = "", subfolder: str = "",
...@@ -314,6 +314,7 @@ def cached_file( ...@@ -314,6 +314,7 @@ def cached_file(
_raise_exceptions_for_missing_entries: bool = True, _raise_exceptions_for_missing_entries: bool = True,
_raise_exceptions_for_connection_errors: bool = True, _raise_exceptions_for_connection_errors: bool = True,
_commit_hash: Optional[str] = None, _commit_hash: Optional[str] = None,
**deprecated_kwargs,
): ):
""" """
Tries to locate a file in a local folder and repo, downloads and cache it if necessary. Tries to locate a file in a local folder and repo, downloads and cache it if necessary.
...@@ -337,7 +338,7 @@ def cached_file( ...@@ -337,7 +338,7 @@ def cached_file(
proxies (`Dict[str, str]`, *optional*): proxies (`Dict[str, str]`, *optional*):
A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128', A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',
'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.
use_auth_token (`str` or *bool*, *optional*): token (`str` or *bool*, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `huggingface-cli login` (stored in `~/.huggingface`). when running `huggingface-cli login` (stored in `~/.huggingface`).
revision (`str`, *optional*, defaults to `"main"`): revision (`str`, *optional*, defaults to `"main"`):
...@@ -354,7 +355,7 @@ def cached_file( ...@@ -354,7 +355,7 @@ def cached_file(
<Tip> <Tip>
Passing `use_auth_token=True` is required when you want to use a private model. Passing `token=True` is required when you want to use a private model.
</Tip> </Tip>
...@@ -367,6 +368,15 @@ def cached_file( ...@@ -367,6 +368,15 @@ def cached_file(
# Download a model weight from the Hub and cache it. # Download a model weight from the Hub and cache it.
model_weights_file = cached_file("bert-base-uncased", "pytorch_model.bin") model_weights_file = cached_file("bert-base-uncased", "pytorch_model.bin")
```""" ```"""
use_auth_token = deprecated_kwargs.pop("use_auth_token", None)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if token is not None:
raise ValueError("`token` and `use_auth_token` are both specified. Please set only the argument `token`.")
token = use_auth_token
# Private arguments # Private arguments
# _raise_exceptions_for_missing_entries: if False, do not raise an exception for missing entries but return # _raise_exceptions_for_missing_entries: if False, do not raise an exception for missing entries but return
# None. # None.
...@@ -426,7 +436,7 @@ def cached_file( ...@@ -426,7 +436,7 @@ def cached_file(
force_download=force_download, force_download=force_download,
proxies=proxies, proxies=proxies,
resume_download=resume_download, resume_download=resume_download,
use_auth_token=use_auth_token, token=token,
local_files_only=local_files_only, local_files_only=local_files_only,
) )
except GatedRepoError as e: except GatedRepoError as e:
...@@ -490,10 +500,11 @@ def get_file_from_repo( ...@@ -490,10 +500,11 @@ def get_file_from_repo(
force_download: bool = False, force_download: bool = False,
resume_download: bool = False, resume_download: bool = False,
proxies: Optional[Dict[str, str]] = None, proxies: Optional[Dict[str, str]] = None,
use_auth_token: Optional[Union[bool, str]] = None, token: Optional[Union[bool, str]] = None,
revision: Optional[str] = None, revision: Optional[str] = None,
local_files_only: bool = False, local_files_only: bool = False,
subfolder: str = "", subfolder: str = "",
**deprecated_kwargs,
): ):
""" """
Tries to locate a file in a local folder and repo, downloads and cache it if necessary. Tries to locate a file in a local folder and repo, downloads and cache it if necessary.
...@@ -517,7 +528,7 @@ def get_file_from_repo( ...@@ -517,7 +528,7 @@ def get_file_from_repo(
proxies (`Dict[str, str]`, *optional*): proxies (`Dict[str, str]`, *optional*):
A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128', A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',
'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.
use_auth_token (`str` or *bool*, *optional*): token (`str` or *bool*, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `huggingface-cli login` (stored in `~/.huggingface`). when running `huggingface-cli login` (stored in `~/.huggingface`).
revision (`str`, *optional*, defaults to `"main"`): revision (`str`, *optional*, defaults to `"main"`):
...@@ -532,7 +543,7 @@ def get_file_from_repo( ...@@ -532,7 +543,7 @@ def get_file_from_repo(
<Tip> <Tip>
Passing `use_auth_token=True` is required when you want to use a private model. Passing `token=True` is required when you want to use a private model.
</Tip> </Tip>
...@@ -548,6 +559,15 @@ def get_file_from_repo( ...@@ -548,6 +559,15 @@ def get_file_from_repo(
# This model does not have a tokenizer config so the result will be None. # This model does not have a tokenizer config so the result will be None.
tokenizer_config = get_file_from_repo("xlm-roberta-base", "tokenizer_config.json") tokenizer_config = get_file_from_repo("xlm-roberta-base", "tokenizer_config.json")
```""" ```"""
use_auth_token = deprecated_kwargs.pop("use_auth_token", None)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if token is not None:
raise ValueError("`token` and `use_auth_token` are both specified. Please set only the argument `token`.")
token = use_auth_token
return cached_file( return cached_file(
path_or_repo_id=path_or_repo, path_or_repo_id=path_or_repo,
filename=filename, filename=filename,
...@@ -555,7 +575,7 @@ def get_file_from_repo( ...@@ -555,7 +575,7 @@ def get_file_from_repo(
force_download=force_download, force_download=force_download,
resume_download=resume_download, resume_download=resume_download,
proxies=proxies, proxies=proxies,
use_auth_token=use_auth_token, token=token,
revision=revision, revision=revision,
local_files_only=local_files_only, local_files_only=local_files_only,
subfolder=subfolder, subfolder=subfolder,
...@@ -595,7 +615,8 @@ def has_file( ...@@ -595,7 +615,8 @@ def has_file(
filename: str, filename: str,
revision: Optional[str] = None, revision: Optional[str] = None,
proxies: Optional[Dict[str, str]] = None, proxies: Optional[Dict[str, str]] = None,
use_auth_token: Optional[Union[bool, str]] = None, token: Optional[Union[bool, str]] = None,
**deprecated_kwargs,
): ):
""" """
Checks if a repo contains a given file without downloading it. Works for remote repos and local folders. Checks if a repo contains a given file without downloading it. Works for remote repos and local folders.
...@@ -607,11 +628,20 @@ def has_file( ...@@ -607,11 +628,20 @@ def has_file(
</Tip> </Tip>
""" """
use_auth_token = deprecated_kwargs.pop("use_auth_token", None)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if token is not None:
raise ValueError("`token` and `use_auth_token` are both specified. Please set only the argument `token`.")
token = use_auth_token
if os.path.isdir(path_or_repo): if os.path.isdir(path_or_repo):
return os.path.isfile(os.path.join(path_or_repo, filename)) return os.path.isfile(os.path.join(path_or_repo, filename))
url = hf_hub_url(path_or_repo, filename=filename, revision=revision) url = hf_hub_url(path_or_repo, filename=filename, revision=revision)
headers = build_hf_headers(use_auth_token=use_auth_token, user_agent=http_user_agent()) headers = build_hf_headers(token=token, user_agent=http_user_agent())
r = requests.head(url, headers=headers, allow_redirects=False, proxies=proxies, timeout=10) r = requests.head(url, headers=headers, allow_redirects=False, proxies=proxies, timeout=10)
try: try:
...@@ -647,7 +677,7 @@ class PushToHubMixin: ...@@ -647,7 +677,7 @@ class PushToHubMixin:
self, self,
repo_id: str, repo_id: str,
private: Optional[bool] = None, private: Optional[bool] = None,
use_auth_token: Optional[Union[bool, str]] = None, token: Optional[Union[bool, str]] = None,
repo_url: Optional[str] = None, repo_url: Optional[str] = None,
organization: Optional[str] = None, organization: Optional[str] = None,
) -> str: ) -> str:
...@@ -671,11 +701,11 @@ class PushToHubMixin: ...@@ -671,11 +701,11 @@ class PushToHubMixin:
repo_id = repo_id.split("/")[-1] repo_id = repo_id.split("/")[-1]
repo_id = f"{organization}/{repo_id}" repo_id = f"{organization}/{repo_id}"
url = create_repo(repo_id=repo_id, token=use_auth_token, private=private, exist_ok=True) url = create_repo(repo_id=repo_id, token=token, private=private, exist_ok=True)
# If the namespace is not there, add it or `upload_file` will complain # If the namespace is not there, add it or `upload_file` will complain
if "/" not in repo_id and url != f"{HUGGINGFACE_CO_RESOLVE_ENDPOINT}/{repo_id}": if "/" not in repo_id and url != f"{HUGGINGFACE_CO_RESOLVE_ENDPOINT}/{repo_id}":
repo_id = get_full_repo_name(repo_id, token=use_auth_token) repo_id = get_full_repo_name(repo_id, token=token)
return repo_id return repo_id
def _get_files_timestamps(self, working_dir: Union[str, os.PathLike]): def _get_files_timestamps(self, working_dir: Union[str, os.PathLike]):
...@@ -749,7 +779,7 @@ class PushToHubMixin: ...@@ -749,7 +779,7 @@ class PushToHubMixin:
use_temp_dir: Optional[bool] = None, use_temp_dir: Optional[bool] = None,
commit_message: Optional[str] = None, commit_message: Optional[str] = None,
private: Optional[bool] = None, private: Optional[bool] = None,
use_auth_token: Optional[Union[bool, str]] = None, token: Optional[Union[bool, str]] = None,
max_shard_size: Optional[Union[int, str]] = "10GB", max_shard_size: Optional[Union[int, str]] = "10GB",
create_pr: bool = False, create_pr: bool = False,
safe_serialization: bool = False, safe_serialization: bool = False,
...@@ -770,7 +800,7 @@ class PushToHubMixin: ...@@ -770,7 +800,7 @@ class PushToHubMixin:
Message to commit while pushing. Will default to `"Upload {object}"`. Message to commit while pushing. Will default to `"Upload {object}"`.
private (`bool`, *optional*): private (`bool`, *optional*):
Whether or not the repository created should be private. Whether or not the repository created should be private.
use_auth_token (`bool` or `str`, *optional*): token (`bool` or `str`, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `huggingface-cli login` (stored in `~/.huggingface`). Will default to `True` if `repo_url` when running `huggingface-cli login` (stored in `~/.huggingface`). Will default to `True` if `repo_url`
is not specified. is not specified.
...@@ -797,6 +827,17 @@ class PushToHubMixin: ...@@ -797,6 +827,17 @@ class PushToHubMixin:
{object}.push_to_hub("huggingface/my-finetuned-bert") {object}.push_to_hub("huggingface/my-finetuned-bert")
``` ```
""" """
use_auth_token = deprecated_kwargs.pop("use_auth_token", None)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if token is not None:
raise ValueError(
"`token` and `use_auth_token` are both specified. Please set only the argument `token`."
)
token = use_auth_token
if "repo_path_or_name" in deprecated_kwargs: if "repo_path_or_name" in deprecated_kwargs:
warnings.warn( warnings.warn(
"The `repo_path_or_name` argument is deprecated and will be removed in v5 of Transformers. Use " "The `repo_path_or_name` argument is deprecated and will be removed in v5 of Transformers. Use "
...@@ -814,7 +855,7 @@ class PushToHubMixin: ...@@ -814,7 +855,7 @@ class PushToHubMixin:
working_dir = repo_id.split("/")[-1] working_dir = repo_id.split("/")[-1]
repo_id = self._create_repo( repo_id = self._create_repo(
repo_id, private=private, use_auth_token=use_auth_token, repo_url=repo_url, organization=organization repo_id, private=private, token=token, repo_url=repo_url, organization=organization
) )
if use_temp_dir is None: if use_temp_dir is None:
...@@ -831,7 +872,7 @@ class PushToHubMixin: ...@@ -831,7 +872,7 @@ class PushToHubMixin:
repo_id, repo_id,
files_timestamps, files_timestamps,
commit_message=commit_message, commit_message=commit_message,
token=use_auth_token, token=token,
create_pr=create_pr, create_pr=create_pr,
) )
...@@ -923,11 +964,12 @@ def get_checkpoint_shard_files( ...@@ -923,11 +964,12 @@ def get_checkpoint_shard_files(
proxies=None, proxies=None,
resume_download=False, resume_download=False,
local_files_only=False, local_files_only=False,
use_auth_token=None, token=None,
user_agent=None, user_agent=None,
revision=None, revision=None,
subfolder="", subfolder="",
_commit_hash=None, _commit_hash=None,
**deprecated_kwargs,
): ):
""" """
For a given model: For a given model:
...@@ -941,6 +983,15 @@ def get_checkpoint_shard_files( ...@@ -941,6 +983,15 @@ def get_checkpoint_shard_files(
""" """
import json import json
use_auth_token = deprecated_kwargs.pop("use_auth_token", None)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if token is not None:
raise ValueError("`token` and `use_auth_token` are both specified. Please set only the argument `token`.")
token = use_auth_token
if not os.path.isfile(index_filename): if not os.path.isfile(index_filename):
raise ValueError(f"Can't find a checkpoint index ({index_filename}) in {pretrained_model_name_or_path}.") raise ValueError(f"Can't find a checkpoint index ({index_filename}) in {pretrained_model_name_or_path}.")
...@@ -976,7 +1027,7 @@ def get_checkpoint_shard_files( ...@@ -976,7 +1027,7 @@ def get_checkpoint_shard_files(
proxies=proxies, proxies=proxies,
resume_download=resume_download, resume_download=resume_download,
local_files_only=local_files_only, local_files_only=local_files_only,
use_auth_token=use_auth_token, token=token,
user_agent=user_agent, user_agent=user_agent,
revision=revision, revision=revision,
subfolder=subfolder, subfolder=subfolder,
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment