@@ -190,6 +190,11 @@ class PreTrainedTokenizer(object):
...
@@ -190,6 +190,11 @@ class PreTrainedTokenizer(object):
""" Id of the padding token in the vocabulary. Log an error if used while not having been set. """
""" Id of the padding token in the vocabulary. Log an error if used while not having been set. """
returnself.convert_tokens_to_ids(self.pad_token)
returnself.convert_tokens_to_ids(self.pad_token)
@property
defpad_token_type_id(self):
""" Id of the padding token in the vocabulary. Log an error if used while not having been set. """
returnself._pad_token_type_id
@property
@property
defcls_token_id(self):
defcls_token_id(self):
""" Id of the classification token in the vocabulary. E.g. to extract a summary of an input sequence leveraging self-attention along the full depth of the model. Log an error if used while not having been set. """
""" Id of the classification token in the vocabulary. E.g. to extract a summary of an input sequence leveraging self-attention along the full depth of the model. Log an error if used while not having been set. """
...
@@ -213,6 +218,7 @@ class PreTrainedTokenizer(object):
...
@@ -213,6 +218,7 @@ class PreTrainedTokenizer(object):