Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
3e52915f
Commit
3e52915f
authored
Dec 20, 2019
by
Julien Chaumond
Browse files
[RoBERTa] Embeddings: fix dimensionality bug
parent
228f5286
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
23 additions
and
13 deletions
+23
-13
transformers/modeling_roberta.py
transformers/modeling_roberta.py
+1
-2
transformers/tests/modeling_roberta_test.py
transformers/tests/modeling_roberta_test.py
+22
-11
No files found.
transformers/modeling_roberta.py
View file @
3e52915f
...
@@ -52,7 +52,6 @@ class RobertaEmbeddings(BertEmbeddings):
...
@@ -52,7 +52,6 @@ class RobertaEmbeddings(BertEmbeddings):
def
forward
(
self
,
input_ids
=
None
,
token_type_ids
=
None
,
position_ids
=
None
,
inputs_embeds
=
None
):
def
forward
(
self
,
input_ids
=
None
,
token_type_ids
=
None
,
position_ids
=
None
,
inputs_embeds
=
None
):
if
position_ids
is
None
:
if
position_ids
is
None
:
if
input_ids
is
not
None
:
if
input_ids
is
not
None
:
# Create the position ids from the input token ids. Any padded tokens remain padded.
# Create the position ids from the input token ids. Any padded tokens remain padded.
position_ids
=
self
.
create_position_ids_from_input_ids
(
input_ids
).
to
(
input_ids
.
device
)
position_ids
=
self
.
create_position_ids_from_input_ids
(
input_ids
).
to
(
input_ids
.
device
)
...
@@ -88,7 +87,7 @@ class RobertaEmbeddings(BertEmbeddings):
...
@@ -88,7 +87,7 @@ class RobertaEmbeddings(BertEmbeddings):
position_ids
=
torch
.
arange
(
self
.
padding_idx
+
1
,
sequence_length
+
self
.
padding_idx
+
1
,
dtype
=
torch
.
long
,
position_ids
=
torch
.
arange
(
self
.
padding_idx
+
1
,
sequence_length
+
self
.
padding_idx
+
1
,
dtype
=
torch
.
long
,
device
=
inputs_embeds
.
device
)
device
=
inputs_embeds
.
device
)
return
position_ids
.
unsqueeze
(
0
)
return
position_ids
.
unsqueeze
(
0
)
.
expand
(
input_shape
)
ROBERTA_START_DOCSTRING
=
r
""" The RoBERTa model was proposed in
ROBERTA_START_DOCSTRING
=
r
""" The RoBERTa model was proposed in
...
...
transformers/tests/modeling_roberta_test.py
View file @
3e52915f
...
@@ -225,6 +225,10 @@ class RobertaModelTest(CommonTestCases.CommonModelTester):
...
@@ -225,6 +225,10 @@ class RobertaModelTest(CommonTestCases.CommonModelTester):
]])
]])
position_ids
=
model
.
create_position_ids_from_input_ids
(
input_ids
)
position_ids
=
model
.
create_position_ids_from_input_ids
(
input_ids
)
self
.
assertEqual
(
position_ids
.
shape
,
expected_positions
.
shape
)
self
.
assertTrue
(
torch
.
all
(
torch
.
eq
(
position_ids
,
expected_positions
)))
self
.
assertTrue
(
torch
.
all
(
torch
.
eq
(
position_ids
,
expected_positions
)))
def
test_create_position_ids_from_inputs_embeds
(
self
):
def
test_create_position_ids_from_inputs_embeds
(
self
):
...
@@ -235,17 +239,24 @@ class RobertaModelTest(CommonTestCases.CommonModelTester):
...
@@ -235,17 +239,24 @@ class RobertaModelTest(CommonTestCases.CommonModelTester):
first available non-padding position index is RobertaEmbeddings.padding_idx + 1
first available non-padding position index is RobertaEmbeddings.padding_idx + 1
"""
"""
config
=
self
.
model_tester
.
prepare_config_and_inputs
()[
0
]
config
=
self
.
model_tester
.
prepare_config_and_inputs
()[
0
]
model
=
RobertaEmbeddings
(
config
=
config
)
embeddings
=
RobertaEmbeddings
(
config
=
config
)
input_ids
=
torch
.
Tensor
(
1
,
4
,
30
)
inputs_embeds
=
torch
.
Tensor
(
2
,
4
,
30
)
expected_positions
=
torch
.
as_tensor
([[
expected_single_positions
=
[
0
+
model
.
padding_idx
+
1
,
0
+
embeddings
.
padding_idx
+
1
,
1
+
model
.
padding_idx
+
1
,
1
+
embeddings
.
padding_idx
+
1
,
2
+
model
.
padding_idx
+
1
,
2
+
embeddings
.
padding_idx
+
1
,
3
+
model
.
padding_idx
+
1
,
3
+
embeddings
.
padding_idx
+
1
,
]])
]
position_ids
=
model
.
create_position_ids_from_inputs_embeds
(
input_ids
)
expected_positions
=
torch
.
as_tensor
([
expected_single_positions
,
expected_single_positions
])
self
.
assertTrue
(
torch
.
all
(
torch
.
eq
(
position_ids
,
expected_positions
)))
position_ids
=
embeddings
.
create_position_ids_from_inputs_embeds
(
inputs_embeds
)
self
.
assertEqual
(
position_ids
.
shape
,
expected_positions
.
shape
)
self
.
assertTrue
(
torch
.
all
(
torch
.
eq
(
position_ids
,
expected_positions
))
)
class
RobertaModelIntegrationTest
(
unittest
.
TestCase
):
class
RobertaModelIntegrationTest
(
unittest
.
TestCase
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment