Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
7251a473
Unverified
Commit
7251a473
authored
Jan 20, 2021
by
Julien Plu
Committed by
GitHub
Jan 20, 2021
Browse files
Fix template (#9697)
parent
14042d56
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
5 additions
and
5 deletions
+5
-5
src/transformers/models/roberta/modeling_tf_roberta.py
src/transformers/models/roberta/modeling_tf_roberta.py
+3
-3
templates/adding_a_new_model/cookiecutter-template-{{cookiecutter.modelname}}/modeling_tf_{{cookiecutter.lowercase_modelname}}.py
...ame}}/modeling_tf_{{cookiecutter.lowercase_modelname}}.py
+2
-2
No files found.
src/transformers/models/roberta/modeling_tf_roberta.py
View file @
7251a473
...
@@ -307,7 +307,7 @@ class TFRobertaPooler(tf.keras.layers.Layer):
...
@@ -307,7 +307,7 @@ class TFRobertaPooler(tf.keras.layers.Layer):
return
pooled_output
return
pooled_output
# Copied from transformers.models.bert.modeling_tf_bert.TFBertSelfAttention
# Copied from transformers.models.bert.modeling_tf_bert.TFBertSelfAttention
with Bert->Roberta
class
TFRobertaSelfAttention
(
tf
.
keras
.
layers
.
Layer
):
class
TFRobertaSelfAttention
(
tf
.
keras
.
layers
.
Layer
):
def
__init__
(
self
,
config
,
**
kwargs
):
def
__init__
(
self
,
config
,
**
kwargs
):
super
().
__init__
(
**
kwargs
)
super
().
__init__
(
**
kwargs
)
...
@@ -355,7 +355,7 @@ class TFRobertaSelfAttention(tf.keras.layers.Layer):
...
@@ -355,7 +355,7 @@ class TFRobertaSelfAttention(tf.keras.layers.Layer):
attention_scores
=
tf
.
einsum
(
"aecd,abcd->acbe"
,
key_layer
,
query_layer
)
attention_scores
=
tf
.
einsum
(
"aecd,abcd->acbe"
,
key_layer
,
query_layer
)
if
attention_mask
is
not
None
:
if
attention_mask
is
not
None
:
# Apply the attention mask is (precomputed for all layers in TF
B
ertModel call() function)
# Apply the attention mask is (precomputed for all layers in TF
Rob
ert
a
Model call() function)
attention_scores
=
attention_scores
+
attention_mask
attention_scores
=
attention_scores
+
attention_mask
# Normalize the attention scores to probabilities.
# Normalize the attention scores to probabilities.
...
@@ -375,7 +375,7 @@ class TFRobertaSelfAttention(tf.keras.layers.Layer):
...
@@ -375,7 +375,7 @@ class TFRobertaSelfAttention(tf.keras.layers.Layer):
return
outputs
return
outputs
# Copied from transformers.models.bert.modeling_tf_bert.TFBertSelfOutput
with Bert->Roberta
# Copied from transformers.models.bert.modeling_tf_bert.TFBertSelfOutput
class
TFRobertaSelfOutput
(
tf
.
keras
.
layers
.
Layer
):
class
TFRobertaSelfOutput
(
tf
.
keras
.
layers
.
Layer
):
def
__init__
(
self
,
config
,
**
kwargs
):
def
__init__
(
self
,
config
,
**
kwargs
):
super
().
__init__
(
**
kwargs
)
super
().
__init__
(
**
kwargs
)
...
...
templates/adding_a_new_model/cookiecutter-template-{{cookiecutter.modelname}}/modeling_tf_{{cookiecutter.lowercase_modelname}}.py
View file @
7251a473
...
@@ -241,7 +241,7 @@ class TF{{cookiecutter.camelcase_modelname}}Embeddings(tf.keras.layers.Layer):
...
@@ -241,7 +241,7 @@ class TF{{cookiecutter.camelcase_modelname}}Embeddings(tf.keras.layers.Layer):
# Copied from transformers.models.bert.modeling_tf_bert.TFBertSelfAttention
# Copied from transformers.models.bert.modeling_tf_bert.TFBertSelfAttention
with Bert->{{cookiecutter.camelcase_modelname}}
class
TF
{{
cookiecutter
.
camelcase_modelname
}}
SelfAttention
(
tf
.
keras
.
layers
.
Layer
):
class
TF
{{
cookiecutter
.
camelcase_modelname
}}
SelfAttention
(
tf
.
keras
.
layers
.
Layer
):
def
__init__
(
self
,
config
,
**
kwargs
):
def
__init__
(
self
,
config
,
**
kwargs
):
super
().
__init__
(
**
kwargs
)
super
().
__init__
(
**
kwargs
)
...
@@ -309,7 +309,7 @@ class TF{{cookiecutter.camelcase_modelname}}SelfAttention(tf.keras.layers.Layer)
...
@@ -309,7 +309,7 @@ class TF{{cookiecutter.camelcase_modelname}}SelfAttention(tf.keras.layers.Layer)
return
outputs
return
outputs
# Copied from transformers.models.bert.modeling_tf_bert.TFBertSelfOutput
with Bert->{{cookiecutter.camelcase_modelname}}
# Copied from transformers.models.bert.modeling_tf_bert.TFBertSelfOutput
class
TF
{{
cookiecutter
.
camelcase_modelname
}}
SelfOutput
(
tf
.
keras
.
layers
.
Layer
):
class
TF
{{
cookiecutter
.
camelcase_modelname
}}
SelfOutput
(
tf
.
keras
.
layers
.
Layer
):
def
__init__
(
self
,
config
,
**
kwargs
):
def
__init__
(
self
,
config
,
**
kwargs
):
super
().
__init__
(
**
kwargs
)
super
().
__init__
(
**
kwargs
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment