Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
44286b94
Commit
44286b94
authored
Oct 22, 2019
by
Lysandre
Browse files
RoBERTa doesn't print a warning when no special tokens are passed.
parent
1cfd9748
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
0 additions
and
28 deletions
+0
-28
transformers/modeling_roberta.py
transformers/modeling_roberta.py
+0
-12
transformers/modeling_tf_roberta.py
transformers/modeling_tf_roberta.py
+0
-16
No files found.
transformers/modeling_roberta.py
View file @
44286b94
...
@@ -169,18 +169,6 @@ class RobertaModel(BertModel):
...
@@ -169,18 +169,6 @@ class RobertaModel(BertModel):
self
.
embeddings
=
RobertaEmbeddings
(
config
)
self
.
embeddings
=
RobertaEmbeddings
(
config
)
self
.
init_weights
()
self
.
init_weights
()
def
forward
(
self
,
input_ids
,
attention_mask
=
None
,
token_type_ids
=
None
,
position_ids
=
None
,
head_mask
=
None
):
if
input_ids
[:,
0
].
sum
().
item
()
!=
0
:
logger
.
warning
(
"A sequence with no special tokens has been passed to the RoBERTa model. "
"This model requires special tokens in order to work. "
"Please specify add_special_tokens=True in your tokenize.encode()"
"or tokenizer.convert_tokens_to_ids()."
)
return
super
(
RobertaModel
,
self
).
forward
(
input_ids
,
attention_mask
=
attention_mask
,
token_type_ids
=
token_type_ids
,
position_ids
=
position_ids
,
head_mask
=
head_mask
)
@
add_start_docstrings
(
"""RoBERTa Model with a `language modeling` head on top. """
,
@
add_start_docstrings
(
"""RoBERTa Model with a `language modeling` head on top. """
,
ROBERTA_START_DOCSTRING
,
ROBERTA_INPUTS_DOCSTRING
)
ROBERTA_START_DOCSTRING
,
ROBERTA_INPUTS_DOCSTRING
)
...
...
transformers/modeling_tf_roberta.py
View file @
44286b94
...
@@ -65,22 +65,6 @@ class TFRobertaMainLayer(TFBertMainLayer):
...
@@ -65,22 +65,6 @@ class TFRobertaMainLayer(TFBertMainLayer):
super
(
TFRobertaMainLayer
,
self
).
__init__
(
config
,
**
kwargs
)
super
(
TFRobertaMainLayer
,
self
).
__init__
(
config
,
**
kwargs
)
self
.
embeddings
=
TFRobertaEmbeddings
(
config
,
name
=
'embeddings'
)
self
.
embeddings
=
TFRobertaEmbeddings
(
config
,
name
=
'embeddings'
)
def
call
(
self
,
inputs
,
**
kwargs
):
# Check that input_ids starts with control token
if
isinstance
(
inputs
,
(
tuple
,
list
)):
input_ids
=
inputs
[
0
]
elif
isinstance
(
inputs
,
dict
):
input_ids
=
inputs
.
get
(
'input_ids'
)
else
:
input_ids
=
inputs
if
tf
.
not_equal
(
tf
.
reduce_sum
(
input_ids
[:,
0
]),
0
):
tf
.
print
(
"A sequence with no special tokens has been passed to the RoBERTa model. "
"This model requires special tokens in order to work. "
"Please specify add_special_tokens=True in your encoding."
)
return
super
(
TFRobertaMainLayer
,
self
).
call
(
inputs
,
**
kwargs
)
class
TFRobertaPreTrainedModel
(
TFPreTrainedModel
):
class
TFRobertaPreTrainedModel
(
TFPreTrainedModel
):
""" An abstract class to handle weights initialization and
""" An abstract class to handle weights initialization and
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment