"...git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "e566adc09c443af843e83b239c3a18b8e7bd422d"
Commit f9abf73e authored by Julien Chaumond's avatar Julien Chaumond
Browse files

[camembert] realign w/ recent changes

parent 26858f27
...@@ -37,7 +37,7 @@ CAMEMBERT_START_DOCSTRING = r""" The CamemBERT model was proposed in ...@@ -37,7 +37,7 @@ CAMEMBERT_START_DOCSTRING = r""" The CamemBERT model was proposed in
It is a model trained on 138GB of French text. It is a model trained on 138GB of French text.
This implementation is the same RoBERTa. This implementation is the same as RoBERTa.
This model is a PyTorch `torch.nn.Module`_ sub-class. Use it as a regular PyTorch Module and This model is a PyTorch `torch.nn.Module`_ sub-class. Use it as a regular PyTorch Module and
refer to the PyTorch documentation for all matter related to general usage and behavior. refer to the PyTorch documentation for all matter related to general usage and behavior.
...@@ -94,6 +94,10 @@ CAMEMBERT_INPUTS_DOCSTRING = r""" ...@@ -94,6 +94,10 @@ CAMEMBERT_INPUTS_DOCSTRING = r"""
Mask to nullify selected heads of the self-attention modules. Mask to nullify selected heads of the self-attention modules.
Mask values selected in ``[0, 1]``: Mask values selected in ``[0, 1]``:
``1`` indicates the head is **not masked**, ``0`` indicates the head is **masked**. ``1`` indicates the head is **not masked**, ``0`` indicates the head is **masked**.
**inputs_embeds**: (`optional`) ``torch.FloatTensor`` of shape ``(batch_size, sequence_length, embedding_dim)``:
Optionally, instead of passing ``input_ids`` you can choose to directly pass an embedded representation.
This is useful if you want more control over how to convert `input_ids` indices into associated vectors
than the model's internal embedding lookup matrix.
""" """
@add_start_docstrings("The bare CamemBERT Model transformer outputting raw hidden-states without any specific head on top.", @add_start_docstrings("The bare CamemBERT Model transformer outputting raw hidden-states without any specific head on top.",
...@@ -143,7 +147,6 @@ class CamembertModel(RobertaModel): ...@@ -143,7 +147,6 @@ class CamembertModel(RobertaModel):
""" """
config_class = CamembertConfig config_class = CamembertConfig
pretrained_model_archive_map = CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_MAP pretrained_model_archive_map = CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_MAP
base_model_prefix = "camembert"
@add_start_docstrings("""CamemBERT Model with a `language modeling` head on top. """, @add_start_docstrings("""CamemBERT Model with a `language modeling` head on top. """,
...@@ -180,7 +183,6 @@ class CamembertForMaskedLM(RobertaForMaskedLM): ...@@ -180,7 +183,6 @@ class CamembertForMaskedLM(RobertaForMaskedLM):
""" """
config_class = CamembertConfig config_class = CamembertConfig
pretrained_model_archive_map = CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_MAP pretrained_model_archive_map = CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_MAP
base_model_prefix = "camembert"
@add_start_docstrings("""CamemBERT Model transformer with a sequence classification/regression head on top (a linear layer @add_start_docstrings("""CamemBERT Model transformer with a sequence classification/regression head on top (a linear layer
...@@ -219,7 +221,6 @@ class CamembertForSequenceClassification(RobertaForSequenceClassification): ...@@ -219,7 +221,6 @@ class CamembertForSequenceClassification(RobertaForSequenceClassification):
""" """
config_class = CamembertConfig config_class = CamembertConfig
pretrained_model_archive_map = CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_MAP pretrained_model_archive_map = CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_MAP
base_model_prefix = "camembert"
@add_start_docstrings("""CamemBERT Model with a multiple choice classification head on top (a linear layer on top of @add_start_docstrings("""CamemBERT Model with a multiple choice classification head on top (a linear layer on top of
...@@ -254,4 +255,3 @@ class CamembertForMultipleChoice(RobertaForMultipleChoice): ...@@ -254,4 +255,3 @@ class CamembertForMultipleChoice(RobertaForMultipleChoice):
""" """
config_class = CamembertConfig config_class = CamembertConfig
pretrained_model_archive_map = CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_MAP pretrained_model_archive_map = CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_MAP
base_model_prefix = "camembert"
...@@ -87,7 +87,7 @@ class CamembertTokenizer(PreTrainedTokenizer): ...@@ -87,7 +87,7 @@ class CamembertTokenizer(PreTrainedTokenizer):
special tokens for the model special tokens for the model
Returns: Returns:
A list of integers in the range [0, 1]: 0 for a special token, 1 for a sequence token. A list of integers in the range [0, 1]: 1 for a special token, 0 for a sequence token.
""" """
if already_has_special_tokens: if already_has_special_tokens:
if token_ids_1 is not None: if token_ids_1 is not None:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment