Unverified Commit e841b75d authored by Sylvain Gugger's avatar Sylvain Gugger Committed by GitHub
Browse files

Automate the lists in auto-xxx docs (#7061)

* More readable dict

* More nlp -> datasets

* Revert "More nlp -> datasets"

This reverts commit 3cd1883d226c63c4a686fc1fed35f2cd586ebe45.

* Automate the lists in auto-xxx docs

* More readable dict

* Revert "More nlp -> datasets"

This reverts commit 3cd1883d226c63c4a686fc1fed35f2cd586ebe45.

* Automate the lists in auto-xxx docs

* nlp -> datasets

* Fix new key
parent 0054a48c
Configuration Configuration
---------------------------------------------------- ----------------------------------------------------
The base class ``PretrainedConfig`` implements the common methods for loading/saving a configuration either from a The base class :class:`~transformers.PretrainedConfig` implements the common methods for loading/saving a configuration
local file or directory, or from a pretrained model configuration provided by the library (downloaded from either from a local file or directory, or from a pretrained model configuration provided by the library (downloaded
HuggingFace's AWS S3 repository). from HuggingFace's AWS S3 repository).
``PretrainedConfig``
~~~~~~~~~~~~~~~~~~~~~ PretrainedConfig
~~~~~~~~~~~~~~~~
.. autoclass:: transformers.PretrainedConfig .. autoclass:: transformers.PretrainedConfig
:members: :members:
...@@ -14,7 +14,7 @@ ...@@ -14,7 +14,7 @@
# limitations under the License. # limitations under the License.
""" Auto Config class. """ """ Auto Config class. """
import re
from collections import OrderedDict from collections import OrderedDict
from .configuration_albert import ALBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, AlbertConfig from .configuration_albert import ALBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, AlbertConfig
...@@ -78,122 +78,126 @@ ALL_PRETRAINED_CONFIG_ARCHIVE_MAP = dict( ...@@ -78,122 +78,126 @@ ALL_PRETRAINED_CONFIG_ARCHIVE_MAP = dict(
CONFIG_MAPPING = OrderedDict( CONFIG_MAPPING = OrderedDict(
[ [
( ("retribert", RetriBertConfig),
"retribert", ("t5", T5Config),
RetriBertConfig, ("mobilebert", MobileBertConfig),
), ("distilbert", DistilBertConfig),
( ("albert", AlbertConfig),
"t5", ("bert-generation", BertGenerationConfig),
T5Config, ("camembert", CamembertConfig),
), ("xlm-roberta", XLMRobertaConfig),
(
"mobilebert",
MobileBertConfig,
),
(
"distilbert",
DistilBertConfig,
),
(
"albert",
AlbertConfig,
),
(
"bert-generation",
BertGenerationConfig,
),
(
"camembert",
CamembertConfig,
),
(
"xlm-roberta",
XLMRobertaConfig,
),
("pegasus", PegasusConfig), ("pegasus", PegasusConfig),
( ("marian", MarianConfig),
"marian", ("mbart", MBartConfig),
MarianConfig, ("bart", BartConfig),
), ("reformer", ReformerConfig),
( ("longformer", LongformerConfig),
"mbart", ("roberta", RobertaConfig),
MBartConfig, ("flaubert", FlaubertConfig),
), ("bert", BertConfig),
( ("openai-gpt", OpenAIGPTConfig),
"bart", ("gpt2", GPT2Config),
BartConfig, ("transfo-xl", TransfoXLConfig),
), ("xlnet", XLNetConfig),
( ("xlm", XLMConfig),
"reformer", ("ctrl", CTRLConfig),
ReformerConfig, ("electra", ElectraConfig),
), ("encoder-decoder", EncoderDecoderConfig),
( ("funnel", FunnelConfig),
"longformer", ("lxmert", LxmertConfig),
LongformerConfig, ]
), )
(
"roberta", MODEL_NAMES_MAPPING = OrderedDict(
RobertaConfig, [
), ("retribert", "RetriBERT"),
( ("t5", "T5"),
"flaubert", ("mobilebert", "MobileBERT"),
FlaubertConfig, ("distilbert", "DistilBERT"),
), ("albert", "ALBERT"),
( ("bert-generation", "Bert Generation"),
"bert", ("camembert", "CamemBERT"),
BertConfig, ("xlm-roberta", "XLM-RoBERTa"),
), ("pegasus", "Pegasus"),
( ("marian", "Marian"),
"openai-gpt", ("mbart", "mBART"),
OpenAIGPTConfig, ("bart", "BART"),
), ("reformer", "Reformer"),
( ("longformer", "Longformer"),
"gpt2", ("roberta", "RoBERTa"),
GPT2Config, ("flaubert", "FlauBERT"),
), ("bert", "BERT"),
( ("openai-gpt", "OpenAI GPT"),
"transfo-xl", ("gpt2", "OpenAI GPT-2"),
TransfoXLConfig, ("transfo-xl", "Transformer-XL"),
), ("xlnet", "XLNet"),
( ("xlm", "XLM"),
"xlnet", ("ctrl", "CTRL"),
XLNetConfig, ("electra", "ELECTRA"),
), ("encoder-decoder", "Encoder decoder"),
( ("funnel", "Funnel Transformer"),
"xlm", ("lxmert", "LXMERT"),
XLMConfig,
),
(
"ctrl",
CTRLConfig,
),
(
"electra",
ElectraConfig,
),
(
"encoder-decoder",
EncoderDecoderConfig,
),
(
"funnel",
FunnelConfig,
),
(
"lxmert",
LxmertConfig,
),
] ]
) )
def _list_model_options(indent, config_to_class=None, use_model_types=True):
if config_to_class is None and not use_model_types:
raise ValueError("Using `use_model_types=False` requires a `config_to_class` dictionary.")
if use_model_types:
if config_to_class is None:
model_type_to_name = {model_type: config.__name__ for model_type, config in CONFIG_MAPPING.items()}
else:
model_type_to_name = {
model_type: config_to_class[config].__name__
for model_type, config in CONFIG_MAPPING.items()
if config in config_to_class
}
lines = [
f"{indent}- **{model_type}** -- :class:`~transformers.{cls_name}` ({MODEL_NAMES_MAPPING[model_type]} model)"
for model_type, cls_name in model_type_to_name.items()
]
else:
config_to_name = {config.__name__: clas.__name__ for config, clas in config_to_class.items()}
config_to_model_name = {
config.__name__: MODEL_NAMES_MAPPING[model_type] for model_type, config in CONFIG_MAPPING.items()
}
lines = [
f"{indent}- :class:`~transformers.{config_name}` configuration class: :class:`~transformers.{cls_name}` ({config_to_model_name[config_name]} model)"
for config_name, cls_name in config_to_name.items()
]
return "\n".join(lines)
def replace_list_option_in_docstrings(config_to_class=None, use_model_types=True):
def docstring_decorator(fn):
docstrings = fn.__doc__
lines = docstrings.split("\n")
i = 0
while i < len(lines) and re.search(r"^(\s*)List options\s*$", lines[i]) is None:
i += 1
if i < len(lines):
indent = re.search(r"^(\s*)List options\s*$", lines[i]).groups()[0]
if use_model_types:
indent = f"{indent} "
lines[i] = _list_model_options(indent, config_to_class=config_to_class, use_model_types=use_model_types)
docstrings = "\n".join(lines)
else:
raise ValueError(
f"The function {fn} should have an empty 'List options' in its docstring as placeholder, current docstring is:\n{docstrings}"
)
fn.__doc__ = docstrings
return fn
return docstring_decorator
class AutoConfig: class AutoConfig:
r""" r"""
:class:`~transformers.AutoConfig` is a generic configuration class This is a generic configuration class that will be instantiated as one of the configuration classes of the library
that will be instantiated as one of the configuration classes of the library when created with the :meth:`~transformers.AutoConfig.from_pretrained` class method.
when created with the :func:`~transformers.AutoConfig.from_pretrained` class method.
The :func:`~transformers.AutoConfig.from_pretrained` method takes care of returning the correct model class instance This method takes care of returning the correct model class instance
based on the `model_type` property of the config object, or when it's missing, based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string. falling back to using pattern matching on the `pretrained_model_name_or_path` string.
""" """
...@@ -216,6 +220,7 @@ class AutoConfig: ...@@ -216,6 +220,7 @@ class AutoConfig:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings()
def from_pretrained(cls, pretrained_model_name_or_path, **kwargs): def from_pretrained(cls, pretrained_model_name_or_path, **kwargs):
r""" Instantiates one of the configuration classes of the library r""" Instantiates one of the configuration classes of the library
from a pre-trained model configuration. from a pre-trained model configuration.
...@@ -224,24 +229,7 @@ class AutoConfig: ...@@ -224,24 +229,7 @@ class AutoConfig:
based on the `model_type` property of the config object, or when it's missing, based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string: falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `t5`: :class:`~transformers.T5Config` (T5 model) List options
- `distilbert`: :class:`~transformers.DistilBertConfig` (DistilBERT model)
- `albert`: :class:`~transformers.AlbertConfig` (ALBERT model)
- `camembert`: :class:`~transformers.CamembertConfig` (CamemBERT model)
- `xlm-roberta`: :class:`~transformers.XLMRobertaConfig` (XLM-RoBERTa model)
- `longformer`: :class:`~transformers.LongformerConfig` (Longformer model)
- `roberta`: :class:`~transformers.RobertaConfig` (RoBERTa model)
- `reformer`: :class:`~transformers.ReformerConfig` (Reformer model)
- `bert`: :class:`~transformers.BertConfig` (Bert model)
- `openai-gpt`: :class:`~transformers.OpenAIGPTConfig` (OpenAI GPT model)
- `gpt2`: :class:`~transformers.GPT2Config` (OpenAI GPT-2 model)
- `transfo-xl`: :class:`~transformers.TransfoXLConfig` (Transformer-XL model)
- `xlnet`: :class:`~transformers.XLNetConfig` (XLNet model)
- `xlm`: :class:`~transformers.XLMConfig` (XLM model)
- `ctrl` : :class:`~transformers.CTRLConfig` (CTRL model)
- `flaubert` : :class:`~transformers.FlaubertConfig` (Flaubert model)
- `electra` : :class:`~transformers.ElectraConfig` (ELECTRA model)
- `funnel`: :class:`~transformers.FunnelConfig` (Funnel Transformer model)
Args: Args:
pretrained_model_name_or_path (:obj:`string`): pretrained_model_name_or_path (:obj:`string`):
......
...@@ -46,6 +46,7 @@ from .configuration_auto import ( ...@@ -46,6 +46,7 @@ from .configuration_auto import (
XLMConfig, XLMConfig,
XLMRobertaConfig, XLMRobertaConfig,
XLNetConfig, XLNetConfig,
replace_list_option_in_docstrings,
) )
from .configuration_marian import MarianConfig from .configuration_marian import MarianConfig
from .configuration_utils import PretrainedConfig from .configuration_utils import PretrainedConfig
...@@ -416,6 +417,7 @@ class AutoModel: ...@@ -416,6 +417,7 @@ class AutoModel:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(MODEL_MAPPING, use_model_types=False)
def from_config(cls, config): def from_config(cls, config):
r"""Instantiates one of the base model classes of the library r"""Instantiates one of the base model classes of the library
from a configuration. from a configuration.
...@@ -429,19 +431,7 @@ class AutoModel: ...@@ -429,19 +431,7 @@ class AutoModel:
config (:class:`~transformers.PretrainedConfig`): config (:class:`~transformers.PretrainedConfig`):
The model class to instantiate is selected based on the configuration class: The model class to instantiate is selected based on the configuration class:
- isInstance of `distilbert` configuration class: :class:`~transformers.DistilBertModel` (DistilBERT model) List options
- isInstance of `longformer` configuration class: :class:`~transformers.LongformerModel` (Longformer model)
- isInstance of `roberta` configuration class: :class:`~transformers.RobertaModel` (RoBERTa model)
- isInstance of `bert` configuration class: :class:`~transformers.BertModel` (Bert model)
- isInstance of `openai-gpt` configuration class: :class:`~transformers.OpenAIGPTModel` (OpenAI GPT model)
- isInstance of `gpt2` configuration class: :class:`~transformers.GPT2Model` (OpenAI GPT-2 model)
- isInstance of `ctrl` configuration class: :class:`~transformers.CTRLModel` (Salesforce CTRL model)
- isInstance of `transfo-xl` configuration class: :class:`~transformers.TransfoXLModel` (Transformer-XL model)
- isInstance of `xlnet` configuration class: :class:`~transformers.XLNetModel` (XLNet model)
- isInstance of `xlm` configuration class: :class:`~transformers.XLMModel` (XLM model)
- isInstance of `flaubert` configuration class: :class:`~transformers.FlaubertModel` (Flaubert model)
- isInstance of `electra` configuration class: :class:`~transformers.ElectraModel` (Electra model)
- isInstance of `funnel` configuration class: :class:`~transformers.FunnelModel` (Funnel Transformer model)
Examples:: Examples::
...@@ -459,6 +449,7 @@ class AutoModel: ...@@ -459,6 +449,7 @@ class AutoModel:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(MODEL_MAPPING)
def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
r"""Instantiates one of the base model classes of the library r"""Instantiates one of the base model classes of the library
from a pre-trained model configuration. from a pre-trained model configuration.
...@@ -467,23 +458,7 @@ class AutoModel: ...@@ -467,23 +458,7 @@ class AutoModel:
based on the `model_type` property of the config object, or when it's missing, based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string: falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `t5`: :class:`~transformers.T5Model` (T5 model) List options
- `distilbert`: :class:`~transformers.DistilBertModel` (DistilBERT model)
- `albert`: :class:`~transformers.AlbertModel` (ALBERT model)
- `camembert`: :class:`~transformers.CamembertModel` (CamemBERT model)
- `xlm-roberta`: :class:`~transformers.XLMRobertaModel` (XLM-RoBERTa model)
- `longformer` :class:`~transformers.LongformerModel` (Longformer model)
- `roberta`: :class:`~transformers.RobertaModel` (RoBERTa model)
- `bert`: :class:`~transformers.BertModel` (Bert model)
- `openai-gpt`: :class:`~transformers.OpenAIGPTModel` (OpenAI GPT model)
- `gpt2`: :class:`~transformers.GPT2Model` (OpenAI GPT-2 model)
- `transfo-xl`: :class:`~transformers.TransfoXLModel` (Transformer-XL model)
- `xlnet`: :class:`~transformers.XLNetModel` (XLNet model)
- `xlm`: :class:`~transformers.XLMModel` (XLM model)
- `ctrl`: :class:`~transformers.CTRLModel` (Salesforce CTRL model)
- `flaubert`: :class:`~transformers.FlaubertModel` (Flaubert model)
- `electra`: :class:`~transformers.ElectraModel` (Electra model)
- `funnel`: :class:`~transformers.FunnelModel` (Funnel Transformer model)
The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated) The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated)
To train the model, you should first set it back in training mode with `model.train()` To train the model, you should first set it back in training mode with `model.train()`
...@@ -575,6 +550,7 @@ class AutoModelForPreTraining: ...@@ -575,6 +550,7 @@ class AutoModelForPreTraining:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(MODEL_FOR_PRETRAINING_MAPPING, use_model_types=False)
def from_config(cls, config): def from_config(cls, config):
r"""Instantiates one of the base model classes of the library r"""Instantiates one of the base model classes of the library
from a configuration. from a configuration.
...@@ -588,18 +564,7 @@ class AutoModelForPreTraining: ...@@ -588,18 +564,7 @@ class AutoModelForPreTraining:
config (:class:`~transformers.PretrainedConfig`): config (:class:`~transformers.PretrainedConfig`):
The model class to instantiate is selected based on the configuration class: The model class to instantiate is selected based on the configuration class:
- isInstance of `distilbert` configuration class: :class:`~transformers.DistilBertForMaskedLM` (DistilBERT model) List options
- isInstance of `longformer` configuration class: :class:`~transformers.LongformerForMaskedLM` (Longformer model)
- isInstance of `roberta` configuration class: :class:`~transformers.RobertaForMaskedLM` (RoBERTa model)
- isInstance of `bert` configuration class: :class:`~transformers.BertForPreTraining` (Bert model)
- isInstance of `openai-gpt` configuration class: :class:`~transformers.OpenAIGPTLMHeadModel` (OpenAI GPT model)
- isInstance of `gpt2` configuration class: :class:`~transformers.GPT2LMHeadModel` (OpenAI GPT-2 model)
- isInstance of `ctrl` configuration class: :class:`~transformers.CTRLLMHeadModel` (Salesforce CTRL model)
- isInstance of `transfo-xl` configuration class: :class:`~transformers.TransfoXLLMHeadModel` (Transformer-XL model)
- isInstance of `xlnet` configuration class: :class:`~transformers.XLNetLMHeadModel` (XLNet model)
- isInstance of `xlm` configuration class: :class:`~transformers.XLMWithLMHeadModel` (XLM model)
- isInstance of `flaubert` configuration class: :class:`~transformers.FlaubertWithLMHeadModel` (Flaubert model)
- isInstance of `electra` configuration class: :class:`~transformers.ElectraForPreTraining` (Electra model)
Examples:: Examples::
...@@ -617,6 +582,7 @@ class AutoModelForPreTraining: ...@@ -617,6 +582,7 @@ class AutoModelForPreTraining:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(MODEL_FOR_PRETRAINING_MAPPING)
def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
r"""Instantiates one of the model classes of the library -with the architecture used for pretraining this model– from a pre-trained model configuration. r"""Instantiates one of the model classes of the library -with the architecture used for pretraining this model– from a pre-trained model configuration.
...@@ -624,22 +590,7 @@ class AutoModelForPreTraining: ...@@ -624,22 +590,7 @@ class AutoModelForPreTraining:
based on the `model_type` property of the config object, or when it's missing, based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string: falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `t5`: :class:`~transformers.T5ModelWithLMHead` (T5 model) List options
- `distilbert`: :class:`~transformers.DistilBertForMaskedLM` (DistilBERT model)
- `albert`: :class:`~transformers.AlbertForMaskedLM` (ALBERT model)
- `camembert`: :class:`~transformers.CamembertForMaskedLM` (CamemBERT model)
- `xlm-roberta`: :class:`~transformers.XLMRobertaForMaskedLM` (XLM-RoBERTa model)
- `longformer`: :class:`~transformers.LongformerForMaskedLM` (Longformer model)
- `roberta`: :class:`~transformers.RobertaForMaskedLM` (RoBERTa model)
- `bert`: :class:`~transformers.BertForPreTraining` (Bert model)
- `openai-gpt`: :class:`~transformers.OpenAIGPTLMHeadModel` (OpenAI GPT model)
- `gpt2`: :class:`~transformers.GPT2LMHeadModel` (OpenAI GPT-2 model)
- `transfo-xl`: :class:`~transformers.TransfoXLLMHeadModel` (Transformer-XL model)
- `xlnet`: :class:`~transformers.XLNetLMHeadModel` (XLNet model)
- `xlm`: :class:`~transformers.XLMWithLMHeadModel` (XLM model)
- `ctrl`: :class:`~transformers.CTRLLMHeadModel` (Salesforce CTRL model)
- `flaubert`: :class:`~transformers.FlaubertWithLMHeadModel` (Flaubert model)
- `electra`: :class:`~transformers.ElectraForPreTraining` (Electra model)
The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated) The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated)
To train the model, you should first set it back in training mode with `model.train()` To train the model, you should first set it back in training mode with `model.train()`
...@@ -726,6 +677,7 @@ class AutoModelWithLMHead: ...@@ -726,6 +677,7 @@ class AutoModelWithLMHead:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(MODEL_WITH_LM_HEAD_MAPPING, use_model_types=False)
def from_config(cls, config): def from_config(cls, config):
r"""Instantiates one of the base model classes of the library r"""Instantiates one of the base model classes of the library
from a configuration. from a configuration.
...@@ -739,19 +691,7 @@ class AutoModelWithLMHead: ...@@ -739,19 +691,7 @@ class AutoModelWithLMHead:
config (:class:`~transformers.PretrainedConfig`): config (:class:`~transformers.PretrainedConfig`):
The model class to instantiate is selected based on the configuration class: The model class to instantiate is selected based on the configuration class:
- isInstance of `distilbert` configuration class: :class:`~transformers.DistilBertForMaskedLM` (DistilBERT model) List options
- isInstance of `longformer` configuration class: :class:`~transformers.LongformerForMaskedLM` (Longformer model)
- isInstance of `roberta` configuration class: :class:`~transformers.RobertaForMaskedLM` (RoBERTa model)
- isInstance of `bert` configuration class: :class:`~transformers.BertForMaskedLM` (Bert model)
- isInstance of `openai-gpt` configuration class: :class:`~transformers.OpenAIGPTLMHeadModel` (OpenAI GPT model)
- isInstance of `gpt2` configuration class: :class:`~transformers.GPT2LMHeadModel` (OpenAI GPT-2 model)
- isInstance of `ctrl` configuration class: :class:`~transformers.CTRLLMHeadModel` (Salesforce CTRL model)
- isInstance of `transfo-xl` configuration class: :class:`~transformers.TransfoXLLMHeadModel` (Transformer-XL model)
- isInstance of `xlnet` configuration class: :class:`~transformers.XLNetLMHeadModel` (XLNet model)
- isInstance of `xlm` configuration class: :class:`~transformers.XLMWithLMHeadModel` (XLM model)
- isInstance of `flaubert` configuration class: :class:`~transformers.FlaubertWithLMHeadModel` (Flaubert model)
- isInstance of `electra` configuration class: :class:`~transformers.ElectraForMaskedLM` (Electra model)
- isInstance of `funnel` configuration class: :class:`~transformers.FunnelForMaskedLM` (Funnel Transformer model)
Examples:: Examples::
...@@ -773,6 +713,7 @@ class AutoModelWithLMHead: ...@@ -773,6 +713,7 @@ class AutoModelWithLMHead:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(MODEL_WITH_LM_HEAD_MAPPING)
def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
r"""Instantiates one of the language modeling model classes of the library r"""Instantiates one of the language modeling model classes of the library
from a pre-trained model configuration. from a pre-trained model configuration.
...@@ -781,23 +722,7 @@ class AutoModelWithLMHead: ...@@ -781,23 +722,7 @@ class AutoModelWithLMHead:
based on the `model_type` property of the config object, or when it's missing, based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string: falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `t5`: :class:`~transformers.T5ForConditionalGeneration` (T5 model) List options
- `distilbert`: :class:`~transformers.DistilBertForMaskedLM` (DistilBERT model)
- `albert`: :class:`~transformers.AlbertForMaskedLM` (ALBERT model)
- `camembert`: :class:`~transformers.CamembertForMaskedLM` (CamemBERT model)
- `xlm-roberta`: :class:`~transformers.XLMRobertaForMaskedLM` (XLM-RoBERTa model)
- `longformer`: :class:`~transformers.LongformerForMaskedLM` (Longformer model)
- `roberta`: :class:`~transformers.RobertaForMaskedLM` (RoBERTa model)
- `bert`: :class:`~transformers.BertForMaskedLM` (Bert model)
- `openai-gpt`: :class:`~transformers.OpenAIGPTLMHeadModel` (OpenAI GPT model)
- `gpt2`: :class:`~transformers.GPT2LMHeadModel` (OpenAI GPT-2 model)
- `transfo-xl`: :class:`~transformers.TransfoXLLMHeadModel` (Transformer-XL model)
- `xlnet`: :class:`~transformers.XLNetLMHeadModel` (XLNet model)
- `xlm`: :class:`~transformers.XLMWithLMHeadModel` (XLM model)
- `ctrl`: :class:`~transformers.CTRLLMHeadModel` (Salesforce CTRL model)
- `flaubert`: :class:`~transformers.FlaubertWithLMHeadModel` (Flaubert model)
- `electra`: :class:`~transformers.ElectraForMaskedLM` (Electra model)
- `funnel`: :class:`~transformers.FunnelForMaskedLM` (Funnel Transformer model)
The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated) The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated)
To train the model, you should first set it back in training mode with `model.train()` To train the model, you should first set it back in training mode with `model.train()`
...@@ -888,6 +813,7 @@ class AutoModelForCausalLM: ...@@ -888,6 +813,7 @@ class AutoModelForCausalLM:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(MODEL_FOR_CAUSAL_LM_MAPPING, use_model_types=False)
def from_config(cls, config): def from_config(cls, config):
r"""Instantiates one of the base model classes of the library r"""Instantiates one of the base model classes of the library
from a configuration. from a configuration.
...@@ -901,13 +827,7 @@ class AutoModelForCausalLM: ...@@ -901,13 +827,7 @@ class AutoModelForCausalLM:
config (:class:`~transformers.PretrainedConfig`): config (:class:`~transformers.PretrainedConfig`):
The model class to instantiate is selected based on the configuration class: The model class to instantiate is selected based on the configuration class:
- isInstance of `bert` configuration class: :class:`~transformers.BertLMHeadModel` (Bert model) List options
- isInstance of `openai-gpt` configuration class: :class:`~transformers.OpenAIGPTLMHeadModel` (OpenAI GPT model)
- isInstance of `gpt2` configuration class: :class:`~transformers.GPT2LMHeadModel` (OpenAI GPT-2 model)
- isInstance of `ctrl` configuration class: :class:`~transformers.CTRLLMHeadModel` (Salesforce CTRL model)
- isInstance of `transfo-xl` configuration class: :class:`~transformers.TransfoXLLMHeadModel` (Transformer-XL model)
- isInstance of `xlnet` configuration class: :class:`~transformers.XLNetLMHeadModel` (XLNet model)
- isInstance of `reformer` configuration class: :class:`~transformers.ReformerModelWithLMHead` (Reformer model)
Examples:: Examples::
...@@ -925,6 +845,7 @@ class AutoModelForCausalLM: ...@@ -925,6 +845,7 @@ class AutoModelForCausalLM:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(MODEL_FOR_CAUSAL_LM_MAPPING)
def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
r"""Instantiates one of the language modeling model classes of the library r"""Instantiates one of the language modeling model classes of the library
from a pre-trained model configuration. from a pre-trained model configuration.
...@@ -933,13 +854,7 @@ class AutoModelForCausalLM: ...@@ -933,13 +854,7 @@ class AutoModelForCausalLM:
based on the `model_type` property of the config object, or when it's missing, based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string: falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `bert`: :class:`~transformers.BertLMHeadModel` (Bert model) List options
- `openai-gpt`: :class:`~transformers.OpenAIGPTLMHeadModel` (OpenAI GPT model)
- `gpt2`: :class:`~transformers.GPT2LMHeadModel` (OpenAI GPT-2 model)
- `transfo-xl`: :class:`~transformers.TransfoXLLMHeadModel` (Transformer-XL model)
- `xlnet`: :class:`~transformers.XLNetLMHeadModel` (XLNet model)
- `ctrl`: :class:`~transformers.CTRLLMHeadModel` (Salesforce CTRL model)
- `reformer`: :class:`~transformers.ReformerModelWithLMHead` (Google Reformer model)
The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated) The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated)
To train the model, you should first set it back in training mode with `model.train()` To train the model, you should first set it back in training mode with `model.train()`
...@@ -1026,6 +941,7 @@ class AutoModelForMaskedLM: ...@@ -1026,6 +941,7 @@ class AutoModelForMaskedLM:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(MODEL_FOR_MASKED_LM_MAPPING, use_model_types=False)
def from_config(cls, config): def from_config(cls, config):
r"""Instantiates one of the base model classes of the library r"""Instantiates one of the base model classes of the library
from a configuration. from a configuration.
...@@ -1038,18 +954,8 @@ class AutoModelForMaskedLM: ...@@ -1038,18 +954,8 @@ class AutoModelForMaskedLM:
Args: Args:
config (:class:`~transformers.PretrainedConfig`): config (:class:`~transformers.PretrainedConfig`):
The model class to instantiate is selected based on the configuration class: The model class to instantiate is selected based on the configuration class:
- isInstance of `distilbert` configuration class: :class:`~transformers.DistilBertForMaskedLM` (DistilBERT model)
- isInstance of `longformer` configuration class: :class:`~transformers.LongformerForMaskedLM` (Longformer model)
- isInstance of `roberta` configuration class: :class:`~transformers.RobertaForMaskedLM` (RoBERTa model)
- isInstance of `bert` configuration class: :class:`~transformers.BertForMaskedLM` (Bert model)
- isInstance of `flaubert` configuration class: :class:`~transformers.FlaubertWithLMHeadModel` (Flaubert model)
- isInstance of `xlm` configuration class: :class:`~transformers.XLMWithLMHeadModel` (XLM model)
- isInstance of `xlm-roberta` configuration class: :class:`~transformers.XLMRobertaForMaskedLM` (XLM-Roberta model)
- isInstance of `electra` configuration class: :class:`~transformers.ElectraForMaskedLM` (Electra model)
- isInstance of `camembert` configuration class: :class:`~transformers.CamembertForMaskedLM` (Camembert model)
- isInstance of `albert` configuration class: :class:`~transformers.AlbertForMaskedLM` (Albert model)
- isInstance of `funnel` configuration class: :class:`~transformers.FunnelForMaskedLM` (Funnel Transformer model)
List options
Examples:: Examples::
...@@ -1067,6 +973,7 @@ class AutoModelForMaskedLM: ...@@ -1067,6 +973,7 @@ class AutoModelForMaskedLM:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(MODEL_FOR_MASKED_LM_MAPPING)
def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
r"""Instantiates one of the language modeling model classes of the library r"""Instantiates one of the language modeling model classes of the library
from a pre-trained model configuration. from a pre-trained model configuration.
...@@ -1075,17 +982,7 @@ class AutoModelForMaskedLM: ...@@ -1075,17 +982,7 @@ class AutoModelForMaskedLM:
based on the `model_type` property of the config object, or when it's missing, based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string: falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `distilbert`: :class:`~transformers.DistilBertForMaskedLM` (DistilBERT model) List options
- `albert`: :class:`~transformers.AlbertForMaskedLM` (ALBERT model)
- `camembert`: :class:`~transformers.CamembertForMaskedLM` (CamemBERT model)
- `xlm-roberta`: :class:`~transformers.XLMRobertaForMaskedLM` (XLM-RoBERTa model)
- `longformer`: :class:`~transformers.LongformerForMaskedLM` (Longformer model)
- `roberta`: :class:`~transformers.RobertaForMaskedLM` (RoBERTa model)
- `xlm`: :class:`~transformers.XLMWithLMHeadModel` (XLM model)
- `flaubert`: :class:`~transformers.FlaubertWithLMHeadModel` (Flaubert model)
- `electra`: :class:`~transformers.ElectraForMaskedLM` (Electra model)
- `bert`: :class:`~transformers.BertLMHeadModel` (Bert model)
- `funnel`: :class:`~transformers.FunnelForMaskedLM` (Funnel Transformer model)
The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated) The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated)
To train the model, you should first set it back in training mode with `model.train()` To train the model, you should first set it back in training mode with `model.train()`
...@@ -1172,6 +1069,7 @@ class AutoModelForSeq2SeqLM: ...@@ -1172,6 +1069,7 @@ class AutoModelForSeq2SeqLM:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING, use_model_types=False)
def from_config(cls, config): def from_config(cls, config):
r"""Instantiates one of the base model classes of the library r"""Instantiates one of the base model classes of the library
from a configuration. from a configuration.
...@@ -1185,10 +1083,7 @@ class AutoModelForSeq2SeqLM: ...@@ -1185,10 +1083,7 @@ class AutoModelForSeq2SeqLM:
config (:class:`~transformers.PretrainedConfig`): config (:class:`~transformers.PretrainedConfig`):
The model class to instantiate is selected based on the configuration class: The model class to instantiate is selected based on the configuration class:
- isInstance of `t5` configuration class: :class:`~transformers.T5ForConditionalGeneration` (T5 model) List options
- isInstance of `bart` configuration class: :class:`~transformers.BartForConditionalGeneration` (Bart model)
- isInstance of `marian` configuration class: :class:`~transformers.MarianMTModel` (Marian model)
- isInstance of `encoder-decoder` configuration class: :class:`~transformers.EncoderDecoderModel` (Encoder Decoder model)
Examples:: Examples::
...@@ -1208,6 +1103,7 @@ class AutoModelForSeq2SeqLM: ...@@ -1208,6 +1103,7 @@ class AutoModelForSeq2SeqLM:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING)
def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
r"""Instantiates one of the language modeling model classes of the library r"""Instantiates one of the language modeling model classes of the library
from a pre-trained model configuration. from a pre-trained model configuration.
...@@ -1216,10 +1112,7 @@ class AutoModelForSeq2SeqLM: ...@@ -1216,10 +1112,7 @@ class AutoModelForSeq2SeqLM:
based on the `model_type` property of the config object, or when it's missing, based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string: falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `t5`: :class:`~transformers.T5ForConditionalGeneration` (T5 model) List options
- `bart`: :class:`~transformers.BartForConditionalGeneration` (Bert model)
- `marian`: :class:`~transformers.MarianMTModel` (Marian model)
- `encoder-decoder`: :class:`~transformers.EncoderDecoderModel` (Encoder Decoder model)
The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated) The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated)
To train the model, you should first set it back in training mode with `model.train()` To train the model, you should first set it back in training mode with `model.train()`
...@@ -1308,6 +1201,7 @@ class AutoModelForSequenceClassification: ...@@ -1308,6 +1201,7 @@ class AutoModelForSequenceClassification:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING, use_model_types=False)
def from_config(cls, config): def from_config(cls, config):
r"""Instantiates one of the base model classes of the library r"""Instantiates one of the base model classes of the library
from a configuration. from a configuration.
...@@ -1321,16 +1215,7 @@ class AutoModelForSequenceClassification: ...@@ -1321,16 +1215,7 @@ class AutoModelForSequenceClassification:
config (:class:`~transformers.PretrainedConfig`): config (:class:`~transformers.PretrainedConfig`):
The model class to instantiate is selected based on the configuration class: The model class to instantiate is selected based on the configuration class:
- isInstance of `distilbert` configuration class: :class:`~transformers.DistilBertForSequenceClassification` (DistilBERT model) List options
- isInstance of `albert` configuration class: :class:`~transformers.AlbertForSequenceClassification` (ALBERT model)
- isInstance of `camembert` configuration class: :class:`~transformers.CamembertForSequenceClassification` (CamemBERT model)
- isInstance of `xlm roberta` configuration class: :class:`~transformers.XLMRobertaForSequenceClassification` (XLM-RoBERTa model)
- isInstance of `roberta` configuration class: :class:`~transformers.RobertaForSequenceClassification` (RoBERTa model)
- isInstance of `bert` configuration class: :class:`~transformers.BertForSequenceClassification` (Bert model)
- isInstance of `xlnet` configuration class: :class:`~transformers.XLNetForSequenceClassification` (XLNet model)
- isInstance of `xlm` configuration class: :class:`~transformers.XLMForSequenceClassification` (XLM model)
- isInstance of `flaubert` configuration class: :class:`~transformers.FlaubertForSequenceClassification` (Flaubert model)
- isInstance of `funnel` configuration class: :class:`~transformers.FunnelModelForSequenceClassification` (Funnel Transformer model)
Examples:: Examples::
...@@ -1350,6 +1235,7 @@ class AutoModelForSequenceClassification: ...@@ -1350,6 +1235,7 @@ class AutoModelForSequenceClassification:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING)
def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
r"""Instantiates one of the sequence classification model classes of the library r"""Instantiates one of the sequence classification model classes of the library
from a pre-trained model configuration. from a pre-trained model configuration.
...@@ -1358,15 +1244,7 @@ class AutoModelForSequenceClassification: ...@@ -1358,15 +1244,7 @@ class AutoModelForSequenceClassification:
based on the `model_type` property of the config object, or when it's missing, based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string: falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `distilbert`: :class:`~transformers.DistilBertForSequenceClassification` (DistilBERT model) List options
- `albert`: :class:`~transformers.AlbertForSequenceClassification` (ALBERT model)
- `camembert`: :class:`~transformers.CamembertForSequenceClassification` (CamemBERT model)
- `xlm-roberta`: :class:`~transformers.XLMRobertaForSequenceClassification` (XLM-RoBERTa model)
- `roberta`: :class:`~transformers.RobertaForSequenceClassification` (RoBERTa model)
- `bert`: :class:`~transformers.BertForSequenceClassification` (Bert model)
- `xlnet`: :class:`~transformers.XLNetForSequenceClassification` (XLNet model)
- `flaubert`: :class:`~transformers.FlaubertForSequenceClassification` (Flaubert model)
- `funnel`: :class:`~transformers.FunnelForSequenceClassification` (Funnel Transformer model)
The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated) The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated)
To train the model, you should first set it back in training mode with `model.train()` To train the model, you should first set it back in training mode with `model.train()`
...@@ -1462,6 +1340,7 @@ class AutoModelForQuestionAnswering: ...@@ -1462,6 +1340,7 @@ class AutoModelForQuestionAnswering:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(MODEL_FOR_QUESTION_ANSWERING_MAPPING, use_model_types=False)
def from_config(cls, config): def from_config(cls, config):
r"""Instantiates one of the base model classes of the library r"""Instantiates one of the base model classes of the library
from a configuration. from a configuration.
...@@ -1475,13 +1354,7 @@ class AutoModelForQuestionAnswering: ...@@ -1475,13 +1354,7 @@ class AutoModelForQuestionAnswering:
config (:class:`~transformers.PretrainedConfig`): config (:class:`~transformers.PretrainedConfig`):
The model class to instantiate is selected based on the configuration class: The model class to instantiate is selected based on the configuration class:
- isInstance of `distilbert` configuration class: :class:`~transformers.DistilBertForQuestionAnswering` (DistilBERT model) List options
- isInstance of `albert` configuration class: :class:`~transformers.AlbertForQuestionAnswering` (ALBERT model)
- isInstance of `bert` configuration class: :class:`~transformers.BertModelForQuestionAnswering` (Bert model)
- isInstance of `xlnet` configuration class: :class:`~transformers.XLNetForQuestionAnswering` (XLNet model)
- isInstance of `xlm` configuration class: :class:`~transformers.XLMForQuestionAnswering` (XLM model)
- isInstance of `flaubert` configuration class: :class:`~transformers.FlaubertForQuestionAnswering` (XLM model)
- isInstance of `funnel` configuration class: :class:`~transformers.FunnelForQuestionAnswering` (Funnel Transformer model)
Examples:: Examples::
...@@ -1502,6 +1375,7 @@ class AutoModelForQuestionAnswering: ...@@ -1502,6 +1375,7 @@ class AutoModelForQuestionAnswering:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(MODEL_FOR_QUESTION_ANSWERING_MAPPING)
def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
r"""Instantiates one of the question answering model classes of the library r"""Instantiates one of the question answering model classes of the library
from a pre-trained model configuration. from a pre-trained model configuration.
...@@ -1510,13 +1384,7 @@ class AutoModelForQuestionAnswering: ...@@ -1510,13 +1384,7 @@ class AutoModelForQuestionAnswering:
based on the `model_type` property of the config object, or when it's missing, based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string: falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `distilbert`: :class:`~transformers.DistilBertForQuestionAnswering` (DistilBERT model) List options
- `albert`: :class:`~transformers.AlbertForQuestionAnswering` (ALBERT model)
- `bert`: :class:`~transformers.BertForQuestionAnswering` (Bert model)
- `xlnet`: :class:`~transformers.XLNetForQuestionAnswering` (XLNet model)
- `xlm`: :class:`~transformers.XLMForQuestionAnswering` (XLM model)
- `flaubert`: :class:`~transformers.FlaubertForQuestionAnswering` (XLM model)
- `funnel`: :class:`~transformers.FunnelForQuestionAnswering` (Funnel Transformer model)
The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated) The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated)
To train the model, you should first set it back in training mode with `model.train()` To train the model, you should first set it back in training mode with `model.train()`
...@@ -1610,6 +1478,7 @@ class AutoModelForTokenClassification: ...@@ -1610,6 +1478,7 @@ class AutoModelForTokenClassification:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING, use_model_types=False)
def from_config(cls, config): def from_config(cls, config):
r"""Instantiates one of the base model classes of the library r"""Instantiates one of the base model classes of the library
from a configuration. from a configuration.
...@@ -1623,17 +1492,7 @@ class AutoModelForTokenClassification: ...@@ -1623,17 +1492,7 @@ class AutoModelForTokenClassification:
config (:class:`~transformers.PretrainedConfig`): config (:class:`~transformers.PretrainedConfig`):
The model class to instantiate is selected based on the configuration class: The model class to instantiate is selected based on the configuration class:
- isInstance of `distilbert` configuration class: :class:`~transformers.DistilBertModelForTokenClassification` (DistilBERT model) List options
- isInstance of `xlm` configuration class: :class:`~transformers.XLMForTokenClassification` (XLM model)
- isInstance of `xlm roberta` configuration class: :class:`~transformers.XLMRobertaModelForTokenClassification` (XLMRoberta model)
- isInstance of `bert` configuration class: :class:`~transformers.BertModelForTokenClassification` (Bert model)
- isInstance of `albert` configuration class: :class:`~transformers.AlbertForTokenClassification` (AlBert model)
- isInstance of `xlnet` configuration class: :class:`~transformers.XLNetModelForTokenClassification` (XLNet model)
- isInstance of `flaubert` configuration class: :class:`~transformers.FlaubertForTokenClassification` (Flaubert model)
- isInstance of `camembert` configuration class: :class:`~transformers.CamembertModelForTokenClassification` (Camembert model)
- isInstance of `roberta` configuration class: :class:`~transformers.RobertaModelForTokenClassification` (Roberta model)
- isInstance of `electra` configuration class: :class:`~transformers.ElectraForTokenClassification` (Electra model)
- isInstance of `funnel` configuration class: :class:`~transformers.FunnelForTokenClassification` (Funnel Transformer model)
Examples:: Examples::
...@@ -1654,6 +1513,7 @@ class AutoModelForTokenClassification: ...@@ -1654,6 +1513,7 @@ class AutoModelForTokenClassification:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING)
def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
r"""Instantiates one of the question answering model classes of the library r"""Instantiates one of the question answering model classes of the library
from a pre-trained model configuration. from a pre-trained model configuration.
...@@ -1662,16 +1522,7 @@ class AutoModelForTokenClassification: ...@@ -1662,16 +1522,7 @@ class AutoModelForTokenClassification:
based on the `model_type` property of the config object, or when it's missing, based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string: falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `distilbert`: :class:`~transformers.DistilBertForTokenClassification` (DistilBERT model) List options
- `xlm`: :class:`~transformers.XLMForTokenClassification` (XLM model)
- `xlm-roberta`: :class:`~transformers.XLMRobertaForTokenClassification` (XLM-RoBERTa?Para model)
- `camembert`: :class:`~transformers.CamembertForTokenClassification` (Camembert model)
- `bert`: :class:`~transformers.BertForTokenClassification` (Bert model)
- `xlnet`: :class:`~transformers.XLNetForTokenClassification` (XLNet model)
- `flaubert`: :class:`~transformers.FlaubertForTokenClassification` (Flaubert model)
- `roberta`: :class:`~transformers.RobertaForTokenClassification` (Roberta model)
- `electra`: :class:`~transformers.ElectraForTokenClassification` (Electra model)
- `funnel`: :class:`~transformers.FunnelForTokenClassification` (Funnel Transformer model)
The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated) The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated)
To train the model, you should first set it back in training mode with `model.train()` To train the model, you should first set it back in training mode with `model.train()`
...@@ -1765,7 +1616,27 @@ class AutoModelForMultipleChoice: ...@@ -1765,7 +1616,27 @@ class AutoModelForMultipleChoice:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(MODEL_FOR_MULTIPLE_CHOICE_MAPPING, use_model_types=False)
def from_config(cls, config): def from_config(cls, config):
r"""Instantiates one of the base model classes of the library
from a configuration.
Note:
Loading a model from its configuration file does **not** load the model weights.
It only affects the model's configuration. Use :func:`~transformers.AutoModel.from_pretrained` to load
the model weights
Args:
config (:class:`~transformers.PretrainedConfig`):
The model class to instantiate is selected based on the configuration class:
List options
Examples::
config = BertConfig.from_pretrained('bert-base-uncased') # Download configuration from S3 and cache.
model = AutoModelForMultipleChoice.from_config(config) # E.g. model was saved using `save_pretrained('./test/saved_model/')`
"""
for config_class, model_class in MODEL_FOR_MULTIPLE_CHOICE_MAPPING.items(): for config_class, model_class in MODEL_FOR_MULTIPLE_CHOICE_MAPPING.items():
if isinstance(config, config_class): if isinstance(config, config_class):
return model_class(config) return model_class(config)
...@@ -1780,7 +1651,71 @@ class AutoModelForMultipleChoice: ...@@ -1780,7 +1651,71 @@ class AutoModelForMultipleChoice:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(MODEL_FOR_MULTIPLE_CHOICE_MAPPING)
def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
r"""Instantiates one of the question answering model classes of the library
from a pre-trained model configuration.
The `from_pretrained()` method takes care of returning the correct model class instance
based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string:
List options
The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated)
To train the model, you should first set it back in training mode with `model.train()`
Args:
pretrained_model_name_or_path:
Either:
- a string with the `shortcut name` of a pre-trained model to load from cache or download, e.g.: ``bert-base-uncased``.
- a path to a `directory` containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/``.
- a path or url to a `tensorflow index checkpoint file` (e.g. `./tf_model/model.ckpt.index`). In this case, ``from_tf`` should be set to True and a configuration object should be provided as ``config`` argument. This loading path is slower than converting the TensorFlow checkpoint in a PyTorch model using the provided conversion scripts and loading the PyTorch model afterwards.
model_args: (`optional`) Sequence of positional arguments:
All remaning positional arguments will be passed to the underlying model's ``__init__`` method
config: (`optional`) instance of a class derived from :class:`~transformers.PretrainedConfig`:
Configuration for the model to use instead of an automatically loaded configuation. Configuration can be automatically loaded when:
- the model is a model provided by the library (loaded with the ``shortcut-name`` string of a pretrained model), or
- the model was saved using :func:`~transformers.PreTrainedModel.save_pretrained` and is reloaded by suppling the save directory.
- the model is loaded by suppling a local directory as ``pretrained_model_name_or_path`` and a configuration JSON file named `config.json` is found in the directory.
state_dict: (`optional`) dict:
an optional state dictionary for the model to use instead of a state dictionary loaded from saved weights file.
This option can be used if you want to create a model from a pretrained configuration but load your own weights.
In this case though, you should check if using :func:`~transformers.PreTrainedModel.save_pretrained` and :func:`~transformers.PreTrainedModel.from_pretrained` is not a simpler option.
cache_dir: (`optional`) string:
Path to a directory in which a downloaded pre-trained model
configuration should be cached if the standard cache should not be used.
force_download: (`optional`) boolean, default False:
Force to (re-)download the model weights and configuration files and override the cached versions if they exists.
proxies: (`optional`) dict, default None:
A dictionary of proxy servers to use by protocol or endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}.
The proxies are used on each request.
output_loading_info: (`optional`) boolean:
Set to ``True`` to also return a dictionary containing missing keys, unexpected keys and error messages.
kwargs: (`optional`) Remaining dictionary of keyword arguments:
These arguments will be passed to the configuration and the model.
Examples::
model = AutoModelForForMultipleChoice.from_pretrained('bert-base-uncased') # Download model and configuration from S3 and cache.
model = AutoModelForMultipleChoice.from_pretrained('./test/bert_model/') # E.g. model was saved using `save_pretrained('./test/saved_model/')`
model = AutoModelForMultipleChoice.from_pretrained('bert-base-uncased', output_attentions=True) # Update configuration during loading
assert model.config.output_attentions == True
# Loading from a TF checkpoint file instead of a PyTorch model (slower)
config = AutoConfig.from_json_file('./tf_model/bert_tf_model_config.json')
model = AutoModelForMultipleChoice.from_pretrained('./tf_model/bert_tf_checkpoint.ckpt.index', from_tf=True, config=config)
"""
config = kwargs.pop("config", None) config = kwargs.pop("config", None)
if not isinstance(config, PretrainedConfig): if not isinstance(config, PretrainedConfig):
config, kwargs = AutoConfig.from_pretrained( config, kwargs = AutoConfig.from_pretrained(
......
...@@ -38,6 +38,7 @@ from .configuration_auto import ( ...@@ -38,6 +38,7 @@ from .configuration_auto import (
XLMConfig, XLMConfig,
XLMRobertaConfig, XLMRobertaConfig,
XLNetConfig, XLNetConfig,
replace_list_option_in_docstrings,
) )
from .configuration_utils import PretrainedConfig from .configuration_utils import PretrainedConfig
from .modeling_tf_albert import ( from .modeling_tf_albert import (
...@@ -333,21 +334,6 @@ class TFAutoModel(object): ...@@ -333,21 +334,6 @@ class TFAutoModel(object):
when created with the `TFAutoModel.from_pretrained(pretrained_model_name_or_path)` when created with the `TFAutoModel.from_pretrained(pretrained_model_name_or_path)`
class method. class method.
The `from_pretrained()` method takes care of returning the correct model class instance
based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `t5`: TFT5Model (T5 model)
- `distilbert`: TFDistilBertModel (DistilBERT model)
- `roberta`: TFRobertaModel (RoBERTa model)
- `bert`: TFBertModel (Bert model)
- `openai-gpt`: TFOpenAIGPTModel (OpenAI GPT model)
- `gpt2`: TFGPT2Model (OpenAI GPT-2 model)
- `transfo-xl`: TFTransfoXLModel (Transformer-XL model)
- `xlnet`: TFXLNetModel (XLNet model)
- `xlm`: TFXLMModel (XLM model)
- `ctrl`: TFCTRLModel (CTRL model)
This class cannot be instantiated using `__init__()` (throws an error). This class cannot be instantiated using `__init__()` (throws an error).
""" """
...@@ -359,6 +345,7 @@ class TFAutoModel(object): ...@@ -359,6 +345,7 @@ class TFAutoModel(object):
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(TF_MODEL_MAPPING, use_model_types=False)
def from_config(cls, config): def from_config(cls, config):
r"""Instantiates one of the base model classes of the library r"""Instantiates one of the base model classes of the library
from a configuration. from a configuration.
...@@ -372,15 +359,7 @@ class TFAutoModel(object): ...@@ -372,15 +359,7 @@ class TFAutoModel(object):
config: (`optional`) instance of a class derived from :class:`~transformers.TFPretrainedConfig`: config: (`optional`) instance of a class derived from :class:`~transformers.TFPretrainedConfig`:
The model class to instantiate is selected based on the configuration class: The model class to instantiate is selected based on the configuration class:
- isInstance of `distilbert` configuration class: TFDistilBertModel (DistilBERT model) List options
- isInstance of `roberta` configuration class: TFRobertaModel (RoBERTa model)
- isInstance of `bert` configuration class: TFBertModel (Bert model)
- isInstance of `openai-gpt` configuration class: TFOpenAIGPTModel (OpenAI GPT model)
- isInstance of `gpt2` configuration class: TFGPT2Model (OpenAI GPT-2 model)
- isInstance of `ctrl` configuration class: TFCTRLModel (Salesforce CTRL model)
- isInstance of `transfo-xl` configuration class: TFTransfoXLModel (Transformer-XL model)
- isInstance of `xlnet` configuration class: TFXLNetModel (XLNet model)
- isInstance of `xlm` configuration class: TFXLMModel (XLM model)
Examples:: Examples::
...@@ -398,6 +377,7 @@ class TFAutoModel(object): ...@@ -398,6 +377,7 @@ class TFAutoModel(object):
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(TF_MODEL_MAPPING)
def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
r"""Instantiates one of the base model classes of the library r"""Instantiates one of the base model classes of the library
from a pre-trained model configuration. from a pre-trained model configuration.
...@@ -406,15 +386,7 @@ class TFAutoModel(object): ...@@ -406,15 +386,7 @@ class TFAutoModel(object):
based on the `model_type` property of the config object, or when it's missing, based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string: falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `t5`: TFT5Model (T5 model) List options
- `distilbert`: TFDistilBertModel (DistilBERT model)
- `roberta`: TFRobertaModel (RoBERTa model)
- `bert`: TFTFBertModel (Bert model)
- `openai-gpt`: TFOpenAIGPTModel (OpenAI GPT model)
- `gpt2`: TFGPT2Model (OpenAI GPT-2 model)
- `transfo-xl`: TFTransfoXLModel (Transformer-XL model)
- `xlnet`: TFXLNetModel (XLNet model)
- `ctrl`: TFCTRLModel (CTRL model)
Params: Params:
pretrained_model_name_or_path: either: pretrained_model_name_or_path: either:
...@@ -510,6 +482,7 @@ class TFAutoModelForPreTraining(object): ...@@ -510,6 +482,7 @@ class TFAutoModelForPreTraining(object):
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(TF_MODEL_FOR_PRETRAINING_MAPPING, use_model_types=False)
def from_config(cls, config): def from_config(cls, config):
r"""Instantiates one of the base model classes of the library r"""Instantiates one of the base model classes of the library
from a configuration. from a configuration.
...@@ -523,15 +496,7 @@ class TFAutoModelForPreTraining(object): ...@@ -523,15 +496,7 @@ class TFAutoModelForPreTraining(object):
config (:class:`~transformers.TFPretrainedConfig`): config (:class:`~transformers.TFPretrainedConfig`):
The model class to instantiate is selected based on the configuration class: The model class to instantiate is selected based on the configuration class:
- isInstance of `distilbert` configuration class: :class:`~transformers.TFDistilBertModelForMaskedLM` (DistilBERT model) List options
- isInstance of `roberta` configuration class: :class:`~transformers.TFRobertaModelForMaskedLM` (RoBERTa model)
- isInstance of `bert` configuration class: :class:`~transformers.TFBertForPreTraining` (Bert model)
- isInstance of `openai-gpt` configuration class: :class:`~transformers.TFOpenAIGPTLMHeadModel` (OpenAI GPT model)
- isInstance of `gpt2` configuration class: :class:`~transformers.TFGPT2ModelLMHeadModel` (OpenAI GPT-2 model)
- isInstance of `ctrl` configuration class: :class:`~transformers.TFCTRLModelLMHeadModel` (Salesforce CTRL model)
- isInstance of `transfo-xl` configuration class: :class:`~transformers.TFTransfoXLLMHeadModel` (Transformer-XL model)
- isInstance of `xlnet` configuration class: :class:`~transformers.TFXLNetLMHeadModel` (XLNet model)
- isInstance of `xlm` configuration class: :class:`~transformers.TFXLMWithLMHeadModel` (XLM model)
Examples:: Examples::
...@@ -549,6 +514,7 @@ class TFAutoModelForPreTraining(object): ...@@ -549,6 +514,7 @@ class TFAutoModelForPreTraining(object):
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(TF_MODEL_FOR_PRETRAINING_MAPPING)
def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
r"""Instantiates one of the model classes of the library -with the architecture used for pretraining this model– from a pre-trained model configuration. r"""Instantiates one of the model classes of the library -with the architecture used for pretraining this model– from a pre-trained model configuration.
...@@ -556,17 +522,7 @@ class TFAutoModelForPreTraining(object): ...@@ -556,17 +522,7 @@ class TFAutoModelForPreTraining(object):
based on the `model_type` property of the config object, or when it's missing, based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string: falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `t5`: :class:`~transformers.TFT5ModelWithLMHead` (T5 model) List options
- `distilbert`: :class:`~transformers.TFDistilBertForMaskedLM` (DistilBERT model)
- `albert`: :class:`~transformers.TFAlbertForPreTraining` (ALBERT model)
- `roberta`: :class:`~transformers.TFRobertaForMaskedLM` (RoBERTa model)
- `bert`: :class:`~transformers.TFBertForPreTraining` (Bert model)
- `openai-gpt`: :class:`~transformers.TFOpenAIGPTLMHeadModel` (OpenAI GPT model)
- `gpt2`: :class:`~transformers.TFGPT2LMHeadModel` (OpenAI GPT-2 model)
- `transfo-xl`: :class:`~transformers.TFTransfoXLLMHeadModel` (Transformer-XL model)
- `xlnet`: :class:`~transformers.TFXLNetLMHeadModel` (XLNet model)
- `xlm`: :class:`~transformers.TFXLMWithLMHeadModel` (XLM model)
- `ctrl`: :class:`~transformers.TFCTRLLMHeadModel` (Salesforce CTRL model)
The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated) The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated)
To train the model, you should first set it back in training mode with `model.train()` To train the model, you should first set it back in training mode with `model.train()`
...@@ -653,21 +609,6 @@ class TFAutoModelWithLMHead(object): ...@@ -653,21 +609,6 @@ class TFAutoModelWithLMHead(object):
when created with the `TFAutoModelWithLMHead.from_pretrained(pretrained_model_name_or_path)` when created with the `TFAutoModelWithLMHead.from_pretrained(pretrained_model_name_or_path)`
class method. class method.
The `from_pretrained()` method takes care of returning the correct model class instance
based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `t5`: TFT5ForConditionalGeneration (T5 model)
- `distilbert`: TFDistilBertForMaskedLM (DistilBERT model)
- `roberta`: TFRobertaForMaskedLM (RoBERTa model)
- `bert`: TFBertForMaskedLM (Bert model)
- `openai-gpt`: TFOpenAIGPTLMHeadModel (OpenAI GPT model)
- `gpt2`: TFGPT2LMHeadModel (OpenAI GPT-2 model)
- `transfo-xl`: TFTransfoXLLMHeadModel (Transformer-XL model)
- `xlnet`: TFXLNetLMHeadModel (XLNet model)
- `xlm`: TFXLMWithLMHeadModel (XLM model)
- `ctrl`: TFCTRLLMHeadModel (CTRL model)
This class cannot be instantiated using `__init__()` (throws an error). This class cannot be instantiated using `__init__()` (throws an error).
""" """
...@@ -679,6 +620,7 @@ class TFAutoModelWithLMHead(object): ...@@ -679,6 +620,7 @@ class TFAutoModelWithLMHead(object):
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(TF_MODEL_WITH_LM_HEAD_MAPPING, use_model_types=False)
def from_config(cls, config): def from_config(cls, config):
r"""Instantiates one of the base model classes of the library r"""Instantiates one of the base model classes of the library
from a configuration. from a configuration.
...@@ -692,15 +634,7 @@ class TFAutoModelWithLMHead(object): ...@@ -692,15 +634,7 @@ class TFAutoModelWithLMHead(object):
config: (`optional`) instance of a class derived from :class:`~transformers.TFPretrainedConfig`: config: (`optional`) instance of a class derived from :class:`~transformers.TFPretrainedConfig`:
The model class to instantiate is selected based on the configuration class: The model class to instantiate is selected based on the configuration class:
- isInstance of `distilbert` configuration class: TFDistilBertModel (DistilBERT model) List options
- isInstance of `roberta` configuration class: TFRobertaModel (RoBERTa model)
- isInstance of `bert` configuration class: TFBertModel (Bert model)
- isInstance of `openai-gpt` configuration class: OpenAIGPTModel (OpenAI GPT model)
- isInstance of `gpt2` configuration class: TFGPT2Model (OpenAI GPT-2 model)
- isInstance of `ctrl` configuration class: TFCTRLModel (Salesforce CTRL model)
- isInstance of `transfo-xl` configuration class: TransfoXLModel (Transformer-XL model)
- isInstance of `xlnet` configuration class: TFXLNetModel (XLNet model)
- isInstance of `xlm` configuration class: TFXLMModel (XLM model)
Examples:: Examples::
...@@ -722,6 +656,7 @@ class TFAutoModelWithLMHead(object): ...@@ -722,6 +656,7 @@ class TFAutoModelWithLMHead(object):
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(TF_MODEL_WITH_LM_HEAD_MAPPING)
def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
r"""Instantiates one of the language modeling model classes of the library r"""Instantiates one of the language modeling model classes of the library
from a pre-trained model configuration. from a pre-trained model configuration.
...@@ -730,16 +665,7 @@ class TFAutoModelWithLMHead(object): ...@@ -730,16 +665,7 @@ class TFAutoModelWithLMHead(object):
based on the `model_type` property of the config object, or when it's missing, based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string: falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `t5`: TFT5ForConditionalGeneration (T5 model) List options
- `distilbert`: TFDistilBertForMaskedLM (DistilBERT model)
- `roberta`: TFRobertaForMaskedLM (RoBERTa model)
- `bert`: TFBertForMaskedLM (Bert model)
- `openai-gpt`: TFOpenAIGPTLMHeadModel (OpenAI GPT model)
- `gpt2`: TFGPT2LMHeadModel (OpenAI GPT-2 model)
- `transfo-xl`: TFTransfoXLLMHeadModel (Transformer-XL model)
- `xlnet`: TFXLNetLMHeadModel (XLNet model)
- `xlm`: TFXLMWithLMHeadModel (XLM model)
- `ctrl`: TFCTRLLMHeadModel (CTRL model)
Params: Params:
pretrained_model_name_or_path: either: pretrained_model_name_or_path: either:
...@@ -831,12 +757,6 @@ class TFAutoModelForMultipleChoice: ...@@ -831,12 +757,6 @@ class TFAutoModelForMultipleChoice:
when created with the `TFAutoModelForMultipleChoice.from_pretrained(pretrained_model_name_or_path)` when created with the `TFAutoModelForMultipleChoice.from_pretrained(pretrained_model_name_or_path)`
class method. class method.
The `from_pretrained()` method takes care of returning the correct model class instance
based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `albert`: TFAlbertForMultipleChoice (Albert model)
- `bert`: TFBertForMultipleChoice (Bert model)
This class cannot be instantiated using `__init__()` (throws an error). This class cannot be instantiated using `__init__()` (throws an error).
""" """
...@@ -848,6 +768,7 @@ class TFAutoModelForMultipleChoice: ...@@ -848,6 +768,7 @@ class TFAutoModelForMultipleChoice:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(TF_MODEL_FOR_MULTIPLE_CHOICE_MAPPING, use_model_types=False)
def from_config(cls, config): def from_config(cls, config):
r"""Instantiates one of the base model classes of the library r"""Instantiates one of the base model classes of the library
from a configuration. from a configuration.
...@@ -860,8 +781,8 @@ class TFAutoModelForMultipleChoice: ...@@ -860,8 +781,8 @@ class TFAutoModelForMultipleChoice:
Args: Args:
config: (`optional`) instance of a class derived from :class:`~transformers.TFPretrainedConfig`: config: (`optional`) instance of a class derived from :class:`~transformers.TFPretrainedConfig`:
The model class to instantiate is selected based on the configuration class: The model class to instantiate is selected based on the configuration class:
- isInstance of `albert` configuration class: TFAlbertModel (Albert model)
- isInstance of `bert` configuration class: TFBertModel (Bert model) List options
Examples:: Examples::
...@@ -881,6 +802,7 @@ class TFAutoModelForMultipleChoice: ...@@ -881,6 +802,7 @@ class TFAutoModelForMultipleChoice:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(TF_MODEL_FOR_MULTIPLE_CHOICE_MAPPING)
def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
r"""Instantiates one of the multiple choice model classes of the library r"""Instantiates one of the multiple choice model classes of the library
from a pre-trained model configuration. from a pre-trained model configuration.
...@@ -889,8 +811,7 @@ class TFAutoModelForMultipleChoice: ...@@ -889,8 +811,7 @@ class TFAutoModelForMultipleChoice:
based on the `model_type` property of the config object, or when it's missing, based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string: falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `albert`: TFRobertaForMultiple (Albert model) List options
- `bert`: TFBertForMultipleChoice (Bert model)
The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated) The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated)
To train the model, you should first set it back in training mode with `model.train()` To train the model, you should first set it back in training mode with `model.train()`
...@@ -992,6 +913,7 @@ class TFAutoModelForCausalLM: ...@@ -992,6 +913,7 @@ class TFAutoModelForCausalLM:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(TF_MODEL_FOR_CAUSAL_LM_MAPPING, use_model_types=False)
def from_config(cls, config): def from_config(cls, config):
r"""Instantiates one of the base model classes of the library r"""Instantiates one of the base model classes of the library
from a configuration. from a configuration.
...@@ -1005,12 +927,7 @@ class TFAutoModelForCausalLM: ...@@ -1005,12 +927,7 @@ class TFAutoModelForCausalLM:
config (:class:`~transformers.TFPretrainedConfig`): config (:class:`~transformers.TFPretrainedConfig`):
The model class to instantiate is selected based on the configuration class: The model class to instantiate is selected based on the configuration class:
- isInstance of `bert` configuration class: :class:`~transformers.TFBertLMHeadModel` (Bert model) List options
- isInstance of `openai-gpt` configuration class: :class:`~transformers.TFOpenAIGPTLMHeadModel` (OpenAI GPT model)
- isInstance of `gpt2` configuration class: :class:`~transformers.TFGPT2LMHeadModel` (OpenAI GPT-2 model)
- isInstance of `ctrl` configuration class: :class:`~transformers.TFCTRLLMHeadModel` (Salesforce CTRL model)
- isInstance of `transfo-xl` configuration class: :class:`~transformers.TFTransfoXLLMHeadModel` (Transformer-XL model)
- isInstance of `xlnet` configuration class: :class:`~transformers.TFXLNetLMHeadModel` (XLNet model)
Examples:: Examples::
...@@ -1028,6 +945,7 @@ class TFAutoModelForCausalLM: ...@@ -1028,6 +945,7 @@ class TFAutoModelForCausalLM:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(TF_MODEL_FOR_CAUSAL_LM_MAPPING)
def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
r"""Instantiates one of the language modeling model classes of the library r"""Instantiates one of the language modeling model classes of the library
from a pre-trained model configuration. from a pre-trained model configuration.
...@@ -1036,12 +954,7 @@ class TFAutoModelForCausalLM: ...@@ -1036,12 +954,7 @@ class TFAutoModelForCausalLM:
based on the `model_type` property of the config object, or when it's missing, based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string: falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `bert`: :class:`~transformers.TFBertLMHeadModel` (Bert model) List options
- `openai-gpt`: :class:`~transformers.TFOpenAIGPTLMHeadModel` (OpenAI GPT model)
- `gpt2`: :class:`~transformers.TFGPT2LMHeadModel` (OpenAI GPT-2 model)
- `transfo-xl`: :class:`~transformers.TFTransfoXLLMHeadModel` (Transformer-XL model)
- `xlnet`: :class:`~transformers.TFXLNetLMHeadModel` (XLNet model)
- `ctrl`: :class:`~transformers.TFCTRLLMHeadModel` (Salesforce CTRL model)
The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated) The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated)
To train the model, you should first set it back in training mode with `model.train()` To train the model, you should first set it back in training mode with `model.train()`
...@@ -1128,6 +1041,7 @@ class TFAutoModelForMaskedLM: ...@@ -1128,6 +1041,7 @@ class TFAutoModelForMaskedLM:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(TF_MODEL_FOR_MASKED_LM_MAPPING, use_model_types=False)
def from_config(cls, config): def from_config(cls, config):
r"""Instantiates one of the base model classes of the library r"""Instantiates one of the base model classes of the library
from a configuration. from a configuration.
...@@ -1140,16 +1054,8 @@ class TFAutoModelForMaskedLM: ...@@ -1140,16 +1054,8 @@ class TFAutoModelForMaskedLM:
Args: Args:
config (:class:`~transformers.TFPretrainedConfig`): config (:class:`~transformers.TFPretrainedConfig`):
The model class to instantiate is selected based on the configuration class: The model class to instantiate is selected based on the configuration class:
- isInstance of `distilbert` configuration class: :class:`~transformers.TFDistilBertForMaskedLM` (DistilBERT model)
- isInstance of `roberta` configuration class: :class:`~transformers.TFRobertaForMaskedLM` (RoBERTa model)
- isInstance of `bert` configuration class: :class:`~transformers.TFBertForMaskedLM` (Bert model)
- isInstance of `flaubert` configuration class: :class:`~transformers.TFFlaubertWithLMHeadModel` (Flaubert model)
- isInstance of `xlm` configuration class: :class:`~transformers.TFXLMWithLMHeadModel` (XLM model)
- isInstance of `xlm-roberta` configuration class: :class:`~transformers.TFXLMRobertaForMaskedLM` (XLM-Roberta model)
- isInstance of `electra` configuration class: :class:`~transformers.TFElectraForMaskedLM` (Electra model)
- isInstance of `camembert` configuration class: :class:`~transformers.TFCamembertForMaskedLM` (Camembert model)
- isInstance of `albert` configuration class: :class:`~transformers.TFAlbertForMaskedLM` (Albert model)
List options
Examples:: Examples::
...@@ -1167,6 +1073,7 @@ class TFAutoModelForMaskedLM: ...@@ -1167,6 +1073,7 @@ class TFAutoModelForMaskedLM:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(TF_MODEL_FOR_MASKED_LM_MAPPING)
def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
r"""Instantiates one of the language modeling model classes of the library r"""Instantiates one of the language modeling model classes of the library
from a pre-trained model configuration. from a pre-trained model configuration.
...@@ -1175,16 +1082,7 @@ class TFAutoModelForMaskedLM: ...@@ -1175,16 +1082,7 @@ class TFAutoModelForMaskedLM:
based on the `model_type` property of the config object, or when it's missing, based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string: falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `distilbert`: :class:`~transformers.TFDistilBertForMaskedLM` (DistilBERT model) List options
- `albert`: :class:`~transformers.TFAlbertForMaskedLM` (ALBERT model)
- `camembert`: :class:`~transformers.TFCamembertForMaskedLM` (CamemBERT model)
- `xlm-roberta`: :class:`~transformers.TFXLMRobertaForMaskedLM` (XLM-RoBERTa model)
- `longformer`: :class:`~transformers.TFLongformerForMaskedLM` (Longformer model)
- `roberta`: :class:`~transformers.TFRobertaForMaskedLM` (RoBERTa model)
- `xlm`: :class:`~transformers.TFXLMWithLMHeadModel` (XLM model)
- `flaubert`: :class:`~transformers.TFFlaubertWithLMHeadModel` (Flaubert model)
- `electra`: :class:`~transformers.TFElectraForMaskedLM` (Electra model)
- `bert`: :class:`~transformers.TFBertLMHeadModel` (Bert model)
The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated) The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated)
To train the model, you should first set it back in training mode with `model.train()` To train the model, you should first set it back in training mode with `model.train()`
...@@ -1271,6 +1169,7 @@ class TFAutoModelForSeq2SeqLM: ...@@ -1271,6 +1169,7 @@ class TFAutoModelForSeq2SeqLM:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING, use_model_types=False)
def from_config(cls, config): def from_config(cls, config):
r"""Instantiates one of the base model classes of the library r"""Instantiates one of the base model classes of the library
from a configuration. from a configuration.
...@@ -1284,7 +1183,7 @@ class TFAutoModelForSeq2SeqLM: ...@@ -1284,7 +1183,7 @@ class TFAutoModelForSeq2SeqLM:
config (:class:`~transformers.TFPretrainedConfig`): config (:class:`~transformers.TFPretrainedConfig`):
The model class to instantiate is selected based on the configuration class: The model class to instantiate is selected based on the configuration class:
- isInstance of `t5` configuration class: :class:`~transformers.TFT5ForConditionalGeneration` (T5 model) List options
Examples:: Examples::
...@@ -1304,6 +1203,7 @@ class TFAutoModelForSeq2SeqLM: ...@@ -1304,6 +1203,7 @@ class TFAutoModelForSeq2SeqLM:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING, use_model_types=False)
def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
r"""Instantiates one of the language modeling model classes of the library r"""Instantiates one of the language modeling model classes of the library
from a pre-trained model configuration. from a pre-trained model configuration.
...@@ -1312,7 +1212,7 @@ class TFAutoModelForSeq2SeqLM: ...@@ -1312,7 +1212,7 @@ class TFAutoModelForSeq2SeqLM:
based on the `model_type` property of the config object, or when it's missing, based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string: falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `t5`: :class:`~transformers.TFT5ForConditionalGeneration` (T5 model) List options
The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated) The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated)
To train the model, you should first set it back in training mode with `model.train()` To train the model, you should first set it back in training mode with `model.train()`
...@@ -1390,16 +1290,6 @@ class TFAutoModelForSequenceClassification(object): ...@@ -1390,16 +1290,6 @@ class TFAutoModelForSequenceClassification(object):
when created with the `TFAutoModelForSequenceClassification.from_pretrained(pretrained_model_name_or_path)` when created with the `TFAutoModelForSequenceClassification.from_pretrained(pretrained_model_name_or_path)`
class method. class method.
The `from_pretrained()` method takes care of returning the correct model class instance
based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `distilbert`: TFDistilBertForSequenceClassification (DistilBERT model)
- `roberta`: TFRobertaForSequenceClassification (RoBERTa model)
- `bert`: TFBertForSequenceClassification (Bert model)
- `xlnet`: TFXLNetForSequenceClassification (XLNet model)
- `xlm`: TFXLMForSequenceClassification (XLM model)
This class cannot be instantiated using `__init__()` (throws an error). This class cannot be instantiated using `__init__()` (throws an error).
""" """
...@@ -1411,6 +1301,7 @@ class TFAutoModelForSequenceClassification(object): ...@@ -1411,6 +1301,7 @@ class TFAutoModelForSequenceClassification(object):
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING, use_model_types=False)
def from_config(cls, config): def from_config(cls, config):
r"""Instantiates one of the base model classes of the library r"""Instantiates one of the base model classes of the library
from a configuration. from a configuration.
...@@ -1424,11 +1315,7 @@ class TFAutoModelForSequenceClassification(object): ...@@ -1424,11 +1315,7 @@ class TFAutoModelForSequenceClassification(object):
config: (`optional`) instance of a class derived from :class:`~transformers.TFPretrainedConfig`: config: (`optional`) instance of a class derived from :class:`~transformers.TFPretrainedConfig`:
The model class to instantiate is selected based on the configuration class: The model class to instantiate is selected based on the configuration class:
- isInstance of `distilbert` configuration class: DistilBertModel (DistilBERT model) List options
- isInstance of `roberta` configuration class: RobertaModel (RoBERTa model)
- isInstance of `bert` configuration class: BertModel (Bert model)
- isInstance of `xlnet` configuration class: XLNetModel (XLNet model)
- isInstance of `xlm` configuration class: XLMModel (XLM model)
Examples:: Examples::
...@@ -1448,6 +1335,7 @@ class TFAutoModelForSequenceClassification(object): ...@@ -1448,6 +1335,7 @@ class TFAutoModelForSequenceClassification(object):
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING)
def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
r"""Instantiates one of the sequence classification model classes of the library r"""Instantiates one of the sequence classification model classes of the library
from a pre-trained model configuration. from a pre-trained model configuration.
...@@ -1456,11 +1344,7 @@ class TFAutoModelForSequenceClassification(object): ...@@ -1456,11 +1344,7 @@ class TFAutoModelForSequenceClassification(object):
based on the `model_type` property of the config object, or when it's missing, based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string: falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `distilbert`: TFDistilBertForSequenceClassification (DistilBERT model) List options
- `roberta`: TFRobertaForSequenceClassification (RoBERTa model)
- `bert`: TFBertForSequenceClassification (Bert model)
- `xlnet`: TFXLNetForSequenceClassification (XLNet model)
- `xlm`: TFXLMForSequenceClassification (XLM model)
The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated) The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated)
To train the model, you should first set it back in training mode with `model.train()` To train the model, you should first set it back in training mode with `model.train()`
...@@ -1551,17 +1435,6 @@ class TFAutoModelForQuestionAnswering(object): ...@@ -1551,17 +1435,6 @@ class TFAutoModelForQuestionAnswering(object):
when created with the `TFAutoModelForQuestionAnswering.from_pretrained(pretrained_model_name_or_path)` when created with the `TFAutoModelForQuestionAnswering.from_pretrained(pretrained_model_name_or_path)`
class method. class method.
The `from_pretrained()` method takes care of returning the correct model class instance
based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `distilbert`: TFDistilBertForQuestionAnswering (DistilBERT model)
- `albert`: TFAlbertForQuestionAnswering (ALBERT model)
- `roberta`: TFRobertaForQuestionAnswering (RoBERTa model)
- `bert`: TFBertForQuestionAnswering (Bert model)
- `xlnet`: TFXLNetForQuestionAnswering (XLNet model)
- `xlm`: TFXLMForQuestionAnswering (XLM model)
This class cannot be instantiated using `__init__()` (throws an error). This class cannot be instantiated using `__init__()` (throws an error).
""" """
...@@ -1573,6 +1446,7 @@ class TFAutoModelForQuestionAnswering(object): ...@@ -1573,6 +1446,7 @@ class TFAutoModelForQuestionAnswering(object):
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(TF_MODEL_FOR_QUESTION_ANSWERING_MAPPING, use_model_types=False)
def from_config(cls, config): def from_config(cls, config):
r"""Instantiates one of the base model classes of the library r"""Instantiates one of the base model classes of the library
from a configuration. from a configuration.
...@@ -1586,12 +1460,7 @@ class TFAutoModelForQuestionAnswering(object): ...@@ -1586,12 +1460,7 @@ class TFAutoModelForQuestionAnswering(object):
config: (`optional`) instance of a class derived from :class:`~transformers.TFPretrainedConfig`: config: (`optional`) instance of a class derived from :class:`~transformers.TFPretrainedConfig`:
The model class to instantiate is selected based on the configuration class: The model class to instantiate is selected based on the configuration class:
- isInstance of `distilbert` configuration class: DistilBertModel (DistilBERT model) List options
- isInstance of `albert` configuration class: AlbertModel (ALBERT model)
- isInstance of `roberta` configuration class: RobertaModel (RoBERTa model)
- isInstance of `bert` configuration class: BertModel (Bert model)
- isInstance of `xlnet` configuration class: XLNetModel (XLNet model)
- isInstance of `xlm` configuration class: XLMModel (XLM model)
Examples:: Examples::
...@@ -1611,6 +1480,7 @@ class TFAutoModelForQuestionAnswering(object): ...@@ -1611,6 +1480,7 @@ class TFAutoModelForQuestionAnswering(object):
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(TF_MODEL_FOR_QUESTION_ANSWERING_MAPPING)
def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
r"""Instantiates one of the question answering model classes of the library r"""Instantiates one of the question answering model classes of the library
from a pre-trained model configuration. from a pre-trained model configuration.
...@@ -1619,12 +1489,7 @@ class TFAutoModelForQuestionAnswering(object): ...@@ -1619,12 +1489,7 @@ class TFAutoModelForQuestionAnswering(object):
based on the `model_type` property of the config object, or when it's missing, based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string: falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `distilbert`: TFDistilBertForQuestionAnswering (DistilBERT model) List options
- `albert`: TFAlbertForQuestionAnswering (ALBERT model)
- `roberta`: TFRobertaForQuestionAnswering (RoBERTa model)
- `bert`: TFBertForQuestionAnswering (Bert model)
- `xlnet`: TFXLNetForQuestionAnswering (XLNet model)
- `xlm`: TFXLMForQuestionAnswering (XLM model)
The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated) The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated)
To train the model, you should first set it back in training mode with `model.train()` To train the model, you should first set it back in training mode with `model.train()`
...@@ -1717,6 +1582,7 @@ class TFAutoModelForTokenClassification: ...@@ -1717,6 +1582,7 @@ class TFAutoModelForTokenClassification:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(TF_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING, use_model_types=False)
def from_config(cls, config): def from_config(cls, config):
r"""Instantiates one of the base model classes of the library r"""Instantiates one of the base model classes of the library
from a configuration. from a configuration.
...@@ -1730,10 +1596,7 @@ class TFAutoModelForTokenClassification: ...@@ -1730,10 +1596,7 @@ class TFAutoModelForTokenClassification:
config: (`optional`) instance of a class derived from :class:`~transformers.TFPretrainedConfig`: config: (`optional`) instance of a class derived from :class:`~transformers.TFPretrainedConfig`:
The model class to instantiate is selected based on the configuration class: The model class to instantiate is selected based on the configuration class:
- isInstance of `bert` configuration class: BertModel (Bert model) List options
- isInstance of `xlnet` configuration class: XLNetModel (XLNet model)
- isInstance of `distilbert` configuration class: DistilBertModel (DistilBert model)
- isInstance of `roberta` configuration class: RobteraModel (Roberta model)
Examples:: Examples::
...@@ -1753,6 +1616,7 @@ class TFAutoModelForTokenClassification: ...@@ -1753,6 +1616,7 @@ class TFAutoModelForTokenClassification:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(TF_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING)
def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
r"""Instantiates one of the question answering model classes of the library r"""Instantiates one of the question answering model classes of the library
from a pre-trained model configuration. from a pre-trained model configuration.
...@@ -1761,10 +1625,7 @@ class TFAutoModelForTokenClassification: ...@@ -1761,10 +1625,7 @@ class TFAutoModelForTokenClassification:
based on the `model_type` property of the config object, or when it's missing, based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string: falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `bert`: BertForTokenClassification (Bert model) List options
- `xlnet`: XLNetForTokenClassification (XLNet model)
- `distilbert`: DistilBertForTokenClassification (DistilBert model)
- `roberta`: RobertaForTokenClassification (Roberta model)
The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated) The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated)
To train the model, you should first set it back in training mode with `model.train()` To train the model, you should first set it back in training mode with `model.train()`
......
...@@ -46,6 +46,7 @@ from .configuration_auto import ( ...@@ -46,6 +46,7 @@ from .configuration_auto import (
XLMConfig, XLMConfig,
XLMRobertaConfig, XLMRobertaConfig,
XLNetConfig, XLNetConfig,
replace_list_option_in_docstrings,
) )
from .configuration_utils import PretrainedConfig from .configuration_utils import PretrainedConfig
from .tokenization_albert import AlbertTokenizer from .tokenization_albert import AlbertTokenizer
...@@ -112,6 +113,8 @@ TOKENIZER_MAPPING = OrderedDict( ...@@ -112,6 +113,8 @@ TOKENIZER_MAPPING = OrderedDict(
] ]
) )
SLOW_TOKENIZER_MAPPING = {k: v[0] for k, v in TOKENIZER_MAPPING.items()}
class AutoTokenizer: class AutoTokenizer:
r""":class:`~transformers.AutoTokenizer` is a generic tokenizer class r""":class:`~transformers.AutoTokenizer` is a generic tokenizer class
...@@ -119,28 +122,6 @@ class AutoTokenizer: ...@@ -119,28 +122,6 @@ class AutoTokenizer:
when created with the `AutoTokenizer.from_pretrained(pretrained_model_name_or_path)` when created with the `AutoTokenizer.from_pretrained(pretrained_model_name_or_path)`
class method. class method.
The `from_pretrained()` method takes care of returning the correct tokenizer class instance
based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `t5`: T5Tokenizer (T5 model)
- `distilbert`: DistilBertTokenizer (DistilBert model)
- `albert`: AlbertTokenizer (ALBERT model)
- `camembert`: CamembertTokenizer (CamemBERT model)
- `xlm-roberta`: XLMRobertaTokenizer (XLM-RoBERTa model)
- `longformer`: LongformerTokenizer (AllenAI Longformer model)
- `roberta`: RobertaTokenizer (RoBERTa model)
- `bert`: BertTokenizer (Bert model)
- `openai-gpt`: OpenAIGPTTokenizer (OpenAI GPT model)
- `gpt2`: GPT2Tokenizer (OpenAI GPT-2 model)
- `transfo-xl`: TransfoXLTokenizer (Transformer-XL model)
- `xlnet`: XLNetTokenizer (XLNet model)
- `xlm`: XLMTokenizer (XLM model)
- `ctrl`: CTRLTokenizer (Salesforce CTRL model)
- `electra`: ElectraTokenizer (Google ELECTRA model)
- `funnel`: FunnelTokenizer (Funnel Transformer model)
- `lxmert`: LxmertTokenizer (Lxmert model)
This class cannot be instantiated using `__init__()` (throw an error). This class cannot be instantiated using `__init__()` (throw an error).
""" """
...@@ -151,6 +132,7 @@ class AutoTokenizer: ...@@ -151,6 +132,7 @@ class AutoTokenizer:
) )
@classmethod @classmethod
@replace_list_option_in_docstrings(SLOW_TOKENIZER_MAPPING)
def from_pretrained(cls, pretrained_model_name_or_path, *inputs, **kwargs): def from_pretrained(cls, pretrained_model_name_or_path, *inputs, **kwargs):
r"""Instantiate one of the tokenizer classes of the library r"""Instantiate one of the tokenizer classes of the library
from a pre-trained model vocabulary. from a pre-trained model vocabulary.
...@@ -159,24 +141,7 @@ class AutoTokenizer: ...@@ -159,24 +141,7 @@ class AutoTokenizer:
based on the `model_type` property of the config object, or when it's missing, based on the `model_type` property of the config object, or when it's missing,
falling back to using pattern matching on the `pretrained_model_name_or_path` string: falling back to using pattern matching on the `pretrained_model_name_or_path` string:
- `t5`: T5Tokenizer (T5 model) List options
- `distilbert`: DistilBertTokenizer (DistilBert model)
- `albert`: AlbertTokenizer (ALBERT model)
- `camembert`: CamembertTokenizer (CamemBERT model)
- `xlm-roberta`: XLMRobertaTokenizer (XLM-RoBERTa model)
- `longformer`: LongformerTokenizer (AllenAI Longformer model)
- `roberta`: RobertaTokenizer (RoBERTa model)
- `bert-base-japanese`: BertJapaneseTokenizer (Bert model)
- `bert`: BertTokenizer (Bert model)
- `openai-gpt`: OpenAIGPTTokenizer (OpenAI GPT model)
- `gpt2`: GPT2Tokenizer (OpenAI GPT-2 model)
- `transfo-xl`: TransfoXLTokenizer (Transformer-XL model)
- `xlnet`: XLNetTokenizer (XLNet model)
- `xlm`: XLMTokenizer (XLM model)
- `ctrl`: CTRLTokenizer (Salesforce CTRL model)
- `electra`: ElectraTokenizer (Google ELECTRA model)
- `funnel`: FunnelTokenizer (Funnel Transformer model)
- `lxmert`: LxmertTokenizer (Lxmert model)
Params: Params:
pretrained_model_name_or_path: either: pretrained_model_name_or_path: either:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment