Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
cd6a59d5
"vscode:/vscode.git/clone" did not exist on "4a790c40b1817fd457043f9933266b4d5e20b3b7"
Commit
cd6a59d5
authored
Oct 08, 2019
by
Rémi Louf
Browse files
add a decoder layer for Bert
parent
a0dcefa3
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
21 additions
and
3 deletions
+21
-3
transformers/modeling_bert.py
transformers/modeling_bert.py
+21
-3
No files found.
transformers/modeling_bert.py
View file @
cd6a59d5
...
@@ -341,10 +341,28 @@ class BertEncoderLayer(nn.Module):
...
@@ -341,10 +341,28 @@ class BertEncoderLayer(nn.Module):
class
BertDecoderLayer
(
nn
.
Module
):
class
BertDecoderLayer
(
nn
.
Module
):
def
__init__
(
self
,
config
):
def
__init__
(
self
,
config
):
super
(
BertDecoderLayer
,
self
).
__init__
()
super
(
BertDecoderLayer
,
self
).
__init__
()
raise
NotImplementedError
self
.
self_attention
=
BertAttention
(
config
)
self
.
attention
=
BertAttention
(
config
)
self
.
intermediate
=
BertIntermediate
(
config
)
self
.
output
=
BertOutput
(
config
)
def
forward
(
self
,
hidden_state
,
encoder_output
):
def
forward
(
self
,
hidden_states
,
encoder_outputs
,
attention_mask
=
None
,
head_mask
=
None
):
raise
NotImplementedError
self_attention_outputs
=
self
.
self_attention
(
query_tensor
=
hidden_states
,
key_tensor
=
hidden_states
,
value_tensor
=
hidden_states
,
attention_mask
=
attention_mask
,
head_mask
=
head_mask
)
self_attention_output
=
self_attention_outputs
[
0
]
attention_outputs
=
self
.
attention
(
query_tensor
=
self_attention_output
,
key_tensor
=
encoder_outputs
,
value_tensor
=
encoder_outputs
,
attention_mask
=
attention_mask
,
head_mask
=
head_mask
)
attention_output
=
attention_outputs
[
0
]
intermediate_output
=
self
.
intermediate
(
attention_output
)
layer_output
=
self
.
output
(
intermediate_output
,
attention_output
)
outputs
=
(
layer_output
,)
+
attention_outputs
[
1
:]
return
outputs
class
BertEncoder
(
nn
.
Module
):
class
BertEncoder
(
nn
.
Module
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment