Unverified Commit 9129fd03 authored by Julien Chaumond's avatar Julien Chaumond Committed by GitHub
Browse files

`transformers-cli login` => `huggingface-cli login` (#18490)

* zero chance anyone's using that constant no?

* `transformers-cli login` => `huggingface-cli login`

* `transformers-cli repo create` => `huggingface-cli repo create`

* `make style`
parent 8d1f9039
...@@ -115,7 +115,7 @@ class ModelArguments: ...@@ -115,7 +115,7 @@ class ModelArguments:
default=False, default=False,
metadata={ metadata={
"help": ( "help": (
"Will use the token generated when running `transformers-cli login` (necessary to use this script " "Will use the token generated when running `huggingface-cli login` (necessary to use this script "
"with private models)." "with private models)."
) )
}, },
......
...@@ -157,7 +157,7 @@ class ModelArguments: ...@@ -157,7 +157,7 @@ class ModelArguments:
default=False, default=False,
metadata={ metadata={
"help": ( "help": (
"Will use the token generated when running `transformers-cli login` (necessary to use this script " "Will use the token generated when running `huggingface-cli login` (necessary to use this script "
"with private models)." "with private models)."
) )
}, },
......
...@@ -80,7 +80,7 @@ class ModelArguments: ...@@ -80,7 +80,7 @@ class ModelArguments:
default=False, default=False,
metadata={ metadata={
"help": ( "help": (
"Will use the token generated when running `transformers-cli login` (necessary to use this script " "Will use the token generated when running `huggingface-cli login` (necessary to use this script "
"with private models)." "with private models)."
) )
}, },
......
...@@ -101,7 +101,7 @@ class ModelArguments: ...@@ -101,7 +101,7 @@ class ModelArguments:
default=False, default=False,
metadata={ metadata={
"help": ( "help": (
"Will use the token generated when running `transformers-cli login` (necessary to use this script " "Will use the token generated when running `huggingface-cli login` (necessary to use this script "
"with private models)." "with private models)."
) )
}, },
......
...@@ -183,7 +183,7 @@ class ModelArguments: ...@@ -183,7 +183,7 @@ class ModelArguments:
default=False, default=False,
metadata={ metadata={
"help": ( "help": (
"Will use the token generated when running `transformers-cli login` (necessary to use this script " "Will use the token generated when running `huggingface-cli login` (necessary to use this script "
"with private models)." "with private models)."
) )
}, },
......
...@@ -173,7 +173,7 @@ class ModelArguments: ...@@ -173,7 +173,7 @@ class ModelArguments:
default=False, default=False,
metadata={ metadata={
"help": ( "help": (
"Will use the token generated when running `transformers-cli login` (necessary to use this script " "Will use the token generated when running `huggingface-cli login` (necessary to use this script "
"with private models)." "with private models)."
) )
}, },
......
...@@ -83,7 +83,7 @@ class ModelArguments: ...@@ -83,7 +83,7 @@ class ModelArguments:
default=False, default=False,
metadata={ metadata={
"help": ( "help": (
"Will use the token generated when running `transformers-cli login` (necessary to use this script " "Will use the token generated when running `huggingface-cli login` (necessary to use this script "
"with private models)." "with private models)."
) )
}, },
......
...@@ -95,7 +95,7 @@ class ModelArguments: ...@@ -95,7 +95,7 @@ class ModelArguments:
default=False, default=False,
metadata={ metadata={
"help": ( "help": (
"Will use the token generated when running `transformers-cli login` (necessary to use this script " "Will use the token generated when running `huggingface-cli login` (necessary to use this script "
"with private models)." "with private models)."
) )
}, },
......
...@@ -57,7 +57,7 @@ To upload all converted models, ...@@ -57,7 +57,7 @@ To upload all converted models,
2. Login to `transformers-cli` 2. Login to `transformers-cli`
```bash ```bash
transformers-cli login huggingface-cli login
``` ```
3. Run the `upload_models` script 3. Run the `upload_models` script
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
for FILE in converted/*; do for FILE in converted/*; do
model_name=`basename $FILE` model_name=`basename $FILE`
transformers-cli repo create $model_name -y huggingface-cli repo create $model_name -y
git clone https://huggingface.co/Helsinki-NLP/$model_name git clone https://huggingface.co/Helsinki-NLP/$model_name
mv $FILE/* $model_name/ mv $FILE/* $model_name/
cd $model_name cd $model_name
......
...@@ -22,9 +22,6 @@ from requests.exceptions import HTTPError ...@@ -22,9 +22,6 @@ from requests.exceptions import HTTPError
from . import BaseTransformersCLICommand from . import BaseTransformersCLICommand
UPLOAD_MAX_FILES = 15
class UserCommands(BaseTransformersCLICommand): class UserCommands(BaseTransformersCLICommand):
@staticmethod @staticmethod
def register_subcommand(parser: ArgumentParser): def register_subcommand(parser: ArgumentParser):
...@@ -105,7 +102,7 @@ class LoginCommand(BaseUserCommand): ...@@ -105,7 +102,7 @@ class LoginCommand(BaseUserCommand):
def run(self): def run(self):
print( print(
ANSI.red( ANSI.red(
"ERROR! `transformers-cli login` uses an outdated login mechanism " "ERROR! `huggingface-cli login` uses an outdated login mechanism "
"that is not compatible with the Hugging Face Hub backend anymore. " "that is not compatible with the Hugging Face Hub backend anymore. "
"Please use `huggingface-cli login instead." "Please use `huggingface-cli login instead."
) )
......
...@@ -463,7 +463,7 @@ class PretrainedConfig(PushToHubMixin): ...@@ -463,7 +463,7 @@ class PretrainedConfig(PushToHubMixin):
'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.
use_auth_token (`str` or *bool*, *optional*): use_auth_token (`str` or *bool*, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `transformers-cli login` (stored in `~/.huggingface`). when running `huggingface-cli login` (stored in `~/.huggingface`).
revision (`str`, *optional*, defaults to `"main"`): revision (`str`, *optional*, defaults to `"main"`):
The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a
git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any
......
...@@ -195,7 +195,7 @@ def get_cached_module_file( ...@@ -195,7 +195,7 @@ def get_cached_module_file(
'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.
use_auth_token (`str` or *bool*, *optional*): use_auth_token (`str` or *bool*, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `transformers-cli login` (stored in `~/.huggingface`). when running `huggingface-cli login` (stored in `~/.huggingface`).
revision (`str`, *optional*, defaults to `"main"`): revision (`str`, *optional*, defaults to `"main"`):
The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a
git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any
...@@ -345,7 +345,7 @@ def get_class_from_dynamic_module( ...@@ -345,7 +345,7 @@ def get_class_from_dynamic_module(
'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.
use_auth_token (`str` or `bool`, *optional*): use_auth_token (`str` or `bool`, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `transformers-cli login` (stored in `~/.huggingface`). when running `huggingface-cli login` (stored in `~/.huggingface`).
revision (`str`, *optional*, defaults to `"main"`): revision (`str`, *optional*, defaults to `"main"`):
The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a
git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any
......
...@@ -251,7 +251,7 @@ class FeatureExtractionMixin(PushToHubMixin): ...@@ -251,7 +251,7 @@ class FeatureExtractionMixin(PushToHubMixin):
'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.
use_auth_token (`str` or *bool*, *optional*): use_auth_token (`str` or *bool*, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `transformers-cli login` (stored in `~/.huggingface`). when running `huggingface-cli login` (stored in `~/.huggingface`).
revision (`str`, *optional*, defaults to `"main"`): revision (`str`, *optional*, defaults to `"main"`):
The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a
git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any
......
...@@ -2096,7 +2096,7 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu ...@@ -2096,7 +2096,7 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu
Whether or not to only look at local files (e.g., not try doanloading the model). Whether or not to only look at local files (e.g., not try doanloading the model).
use_auth_token (`str` or *bool*, *optional*): use_auth_token (`str` or *bool*, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `transformers-cli login` (stored in `~/.huggingface`). when running `huggingface-cli login` (stored in `~/.huggingface`).
revision (`str`, *optional*, defaults to `"main"`): revision (`str`, *optional*, defaults to `"main"`):
The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a
git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any
...@@ -2472,8 +2472,8 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu ...@@ -2472,8 +2472,8 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu
Whether or not the repository created should be private (requires a paying subscription). Whether or not the repository created should be private (requires a paying subscription).
use_auth_token (`bool` or `str`, *optional*): use_auth_token (`bool` or `str`, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `transformers-cli login` (stored in `~/.huggingface`). Will default to `True` if when running `huggingface-cli login` (stored in `~/.huggingface`). Will default to `True` if `repo_url`
`repo_url` is not specified. is not specified.
max_shard_size (`int` or `str`, *optional*, defaults to `"10GB"`): max_shard_size (`int` or `str`, *optional*, defaults to `"10GB"`):
Only applicable for models. The maximum size for a checkpoint before being sharded. Checkpoints shard Only applicable for models. The maximum size for a checkpoint before being sharded. Checkpoints shard
will then be each of size lower than this size. If expressed as a string, needs to be digits followed will then be each of size lower than this size. If expressed as a string, needs to be digits followed
......
...@@ -1659,7 +1659,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix ...@@ -1659,7 +1659,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix
Whether or not to only look at local files (i.e., do not try to download the model). Whether or not to only look at local files (i.e., do not try to download the model).
use_auth_token (`str` or *bool*, *optional*): use_auth_token (`str` or *bool*, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `transformers-cli login` (stored in `~/.huggingface`). when running `huggingface-cli login` (stored in `~/.huggingface`).
revision (`str`, *optional*, defaults to `"main"`): revision (`str`, *optional*, defaults to `"main"`):
The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a
git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any
......
...@@ -142,7 +142,7 @@ def get_feature_extractor_config( ...@@ -142,7 +142,7 @@ def get_feature_extractor_config(
'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.
use_auth_token (`str` or *bool*, *optional*): use_auth_token (`str` or *bool*, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `transformers-cli login` (stored in `~/.huggingface`). when running `huggingface-cli login` (stored in `~/.huggingface`).
revision (`str`, *optional*, defaults to `"main"`): revision (`str`, *optional*, defaults to `"main"`):
The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a
git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any
...@@ -247,7 +247,7 @@ class AutoFeatureExtractor: ...@@ -247,7 +247,7 @@ class AutoFeatureExtractor:
'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.
use_auth_token (`str` or *bool*, *optional*): use_auth_token (`str` or *bool*, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `transformers-cli login` (stored in `~/.huggingface`). when running `huggingface-cli login` (stored in `~/.huggingface`).
revision (`str`, *optional*, defaults to `"main"`): revision (`str`, *optional*, defaults to `"main"`):
The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a
git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any
......
...@@ -135,7 +135,7 @@ class AutoProcessor: ...@@ -135,7 +135,7 @@ class AutoProcessor:
'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.
use_auth_token (`str` or *bool*, *optional*): use_auth_token (`str` or *bool*, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `transformers-cli login` (stored in `~/.huggingface`). when running `huggingface-cli login` (stored in `~/.huggingface`).
revision (`str`, *optional*, defaults to `"main"`): revision (`str`, *optional*, defaults to `"main"`):
The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a
git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any
......
...@@ -357,7 +357,7 @@ def get_tokenizer_config( ...@@ -357,7 +357,7 @@ def get_tokenizer_config(
'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.
use_auth_token (`str` or *bool*, *optional*): use_auth_token (`str` or *bool*, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `transformers-cli login` (stored in `~/.huggingface`). when running `huggingface-cli login` (stored in `~/.huggingface`).
revision (`str`, *optional*, defaults to `"main"`): revision (`str`, *optional*, defaults to `"main"`):
The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a
git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any
......
...@@ -505,7 +505,7 @@ def pipeline( ...@@ -505,7 +505,7 @@ def pipeline(
Whether or not to use a Fast tokenizer if possible (a [`PreTrainedTokenizerFast`]). Whether or not to use a Fast tokenizer if possible (a [`PreTrainedTokenizerFast`]).
use_auth_token (`str` or *bool*, *optional*): use_auth_token (`str` or *bool*, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `transformers-cli login` (stored in `~/.huggingface`). when running `huggingface-cli login` (stored in `~/.huggingface`).
device_map (`str` or `Dict[str, Union[int, str, torch.device]`, *optional*): device_map (`str` or `Dict[str, Union[int, str, torch.device]`, *optional*):
Sent directly as `model_kwargs` (just a simpler shortcut). When `accelerate` library is present, set Sent directly as `model_kwargs` (just a simpler shortcut). When `accelerate` library is present, set
`device_map="auto"` to compute the most optimized `device_map` automatically. [More `device_map="auto"` to compute the most optimized `device_map` automatically. [More
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment