Unverified Commit 221d4c63 authored by Patrick von Platen's avatar Patrick von Platen Committed by GitHub
Browse files

clean naming (#7068)

parent 8fcbe486
...@@ -99,7 +99,7 @@ CONFIG_MAPPING = OrderedDict( ...@@ -99,7 +99,7 @@ CONFIG_MAPPING = OrderedDict(
AlbertConfig, AlbertConfig,
), ),
( (
"bert-for-seq-generation", "bert-generation",
BertGenerationConfig, BertGenerationConfig,
), ),
( (
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
""" BertForSeqGeneration model configuration """ """ BertGeneration model configuration """
from .configuration_utils import PretrainedConfig from .configuration_utils import PretrainedConfig
...@@ -29,8 +29,8 @@ class BertGenerationConfig(PretrainedConfig): ...@@ -29,8 +29,8 @@ class BertGenerationConfig(PretrainedConfig):
Args: Args:
vocab_size (:obj:`int`, `optional`, defaults to 50358): vocab_size (:obj:`int`, `optional`, defaults to 50358):
Vocabulary size of the BertForSeqGeneration model. Defines the different tokens that Vocabulary size of the BertGeneration model. Defines the different tokens that
can be represented by the `inputs_ids` passed to the forward method of :class:`~transformers.BertForSeqGeneration`. can be represented by the `inputs_ids` passed to the forward method of :class:`~transformers.BertGeneration`.
hidden_size (:obj:`int`, `optional`, defaults to 1024): hidden_size (:obj:`int`, `optional`, defaults to 1024):
Dimensionality of the encoder layers and the pooler layer. Dimensionality of the encoder layers and the pooler layer.
num_hidden_layers (:obj:`int`, `optional`, defaults to 24): num_hidden_layers (:obj:`int`, `optional`, defaults to 24):
...@@ -60,7 +60,7 @@ class BertGenerationConfig(PretrainedConfig): ...@@ -60,7 +60,7 @@ class BertGenerationConfig(PretrainedConfig):
>>> from transformers import BertGenerationConfig, BertGenerationEncoder >>> from transformers import BertGenerationConfig, BertGenerationEncoder
>>> # Initializing a BertForSeqGeneration config >>> # Initializing a BertGeneration config
>>> configuration = BertGenerationConfig() >>> configuration = BertGenerationConfig()
>>> # Initializing a modelfrom the config >>> # Initializing a modelfrom the config
...@@ -69,7 +69,7 @@ class BertGenerationConfig(PretrainedConfig): ...@@ -69,7 +69,7 @@ class BertGenerationConfig(PretrainedConfig):
>>> # Accessing the model configuration >>> # Accessing the model configuration
>>> configuration = model.config >>> configuration = model.config
""" """
model_type = "bert-for-seq-generation" model_type = "bert-generation"
def __init__( def __init__(
self, self,
......
...@@ -238,7 +238,7 @@ BERT_GENERATION_INPUTS_DOCSTRING = r""" ...@@ -238,7 +238,7 @@ BERT_GENERATION_INPUTS_DOCSTRING = r"""
@add_start_docstrings( @add_start_docstrings(
"The bare BertForSeqGeneration model transformer outputting raw hidden-states without any specific head on top.", "The bare BertGeneration model transformer outputting raw hidden-states without any specific head on top.",
BERT_GENERATION_START_DOCSTRING, BERT_GENERATION_START_DOCSTRING,
) )
class BertGenerationEncoder(BertGenerationPreTrainedModel): class BertGenerationEncoder(BertGenerationPreTrainedModel):
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
""" Tokenization class for model BertForSeqGeneration.""" """ Tokenization class for model BertGeneration."""
import os import os
......
...@@ -493,7 +493,7 @@ class BertEncoderDecoderModelTest(EncoderDecoderMixin, unittest.TestCase): ...@@ -493,7 +493,7 @@ class BertEncoderDecoderModelTest(EncoderDecoderMixin, unittest.TestCase):
self.assertEqual(summary, EXPECTED_SUMMARY) self.assertEqual(summary, EXPECTED_SUMMARY)
class BertForSeqGenerationEncoderDecoderModelTest(EncoderDecoderMixin, unittest.TestCase): class BertGenerationEncoderDecoderModelTest(EncoderDecoderMixin, unittest.TestCase):
def get_pretrained_model(self): def get_pretrained_model(self):
return EncoderDecoderModel.from_encoder_decoder_pretrained( return EncoderDecoderModel.from_encoder_decoder_pretrained(
"google/bert_for_seq_generation_L-24_bbc_encoder", "google/bert_for_seq_generation_L-24_bbc_encoder" "google/bert_for_seq_generation_L-24_bbc_encoder", "google/bert_for_seq_generation_L-24_bbc_encoder"
......
...@@ -29,7 +29,7 @@ SPIECE_UNDERLINE = "▁" ...@@ -29,7 +29,7 @@ SPIECE_UNDERLINE = "▁"
SAMPLE_VOCAB = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures/test_sentencepiece.model") SAMPLE_VOCAB = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures/test_sentencepiece.model")
class BertForSeqGenerationTokenizationTest(TokenizerTesterMixin, unittest.TestCase): class BertGenerationTokenizationTest(TokenizerTesterMixin, unittest.TestCase):
tokenizer_class = BertGenerationTokenizer tokenizer_class = BertGenerationTokenizer
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment