Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
161a6461
"vscode:/vscode.git/clone" did not exist on "a25037beb9f039270b30a94c34ead72ea80ae8a5"
Unverified
Commit
161a6461
authored
Dec 21, 2020
by
Julien Plu
Committed by
GitHub
Dec 21, 2020
Browse files
Fix TF template (#9234)
parent
5a8a4eb1
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
19 additions
and
11 deletions
+19
-11
templates/adding_a_new_model/cookiecutter-template-{{cookiecutter.modelname}}/modeling_tf_{{cookiecutter.lowercase_modelname}}.py
...ame}}/modeling_tf_{{cookiecutter.lowercase_modelname}}.py
+19
-11
No files found.
templates/adding_a_new_model/cookiecutter-template-{{cookiecutter.modelname}}/modeling_tf_{{cookiecutter.lowercase_modelname}}.py
View file @
161a6461
...
...
@@ -310,18 +310,22 @@ class TF{{cookiecutter.camelcase_modelname}}Intermediate(tf.keras.layers.Layer):
def
__init__
(
self
,
config
,
**
kwargs
):
super
().
__init__
(
**
kwargs
)
self
.
dense
=
tf
.
keras
.
layers
.
Dense
(
config
.
intermediate_size
,
kernel_initializer
=
get_initializer
(
config
.
initializer_range
),
name
=
"dense"
self
.
dense
=
tf
.
keras
.
layers
.
experimental
.
EinsumDense
(
equation
=
"abc,cd->abd"
,
output_shape
=
(
None
,
config
.
intermediate_size
),
bias_axes
=
"d"
,
kernel_initializer
=
get_initializer
(
initializer_range
=
config
.
initializer_range
),
name
=
"dense"
,
)
if
isinstance
(
config
.
hidden_act
,
str
):
self
.
intermediate_act_fn
=
get_tf_activation
(
config
.
hidden_act
)
self
.
intermediate_act_fn
=
get_tf_activation
(
activation_string
=
config
.
hidden_act
)
else
:
self
.
intermediate_act_fn
=
config
.
hidden_act
def
call
(
self
,
hidden_states
):
hidden_states
=
self
.
dense
(
hidden_states
)
hidden_states
=
self
.
intermediate_act_fn
(
hidden_states
)
hidden_states
=
self
.
dense
(
inputs
=
hidden_states
)
hidden_states
=
self
.
intermediate_act_fn
(
inputs
=
hidden_states
)
return
hidden_states
...
...
@@ -331,16 +335,20 @@ class TF{{cookiecutter.camelcase_modelname}}Output(tf.keras.layers.Layer):
def
__init__
(
self
,
config
,
**
kwargs
):
super
().
__init__
(
**
kwargs
)
self
.
dense
=
tf
.
keras
.
layers
.
Dense
(
config
.
hidden_size
,
kernel_initializer
=
get_initializer
(
config
.
initializer_range
),
name
=
"dense"
self
.
dense
=
tf
.
keras
.
layers
.
experimental
.
EinsumDense
(
equation
=
"abc,cd->abd"
,
bias_axes
=
"d"
,
output_shape
=
(
None
,
config
.
hidden_size
),
kernel_initializer
=
get_initializer
(
config
.
initializer_range
),
name
=
"dense"
,
)
self
.
LayerNorm
=
tf
.
keras
.
layers
.
LayerNormalization
(
epsilon
=
config
.
layer_norm_eps
,
name
=
"LayerNorm"
)
self
.
dropout
=
tf
.
keras
.
layers
.
Dropout
(
config
.
hidden_dropout_prob
)
self
.
dropout
=
tf
.
keras
.
layers
.
Dropout
(
rate
=
config
.
hidden_dropout_prob
)
def
call
(
self
,
hidden_states
,
input_tensor
,
training
=
False
):
hidden_states
=
self
.
dense
(
hidden_states
)
hidden_states
=
self
.
dropout
(
hidden_states
,
training
=
training
)
hidden_states
=
self
.
LayerNorm
(
hidden_states
+
input_tensor
)
hidden_states
=
self
.
dense
(
inputs
=
hidden_states
)
hidden_states
=
self
.
dropout
(
inputs
=
hidden_states
,
training
=
training
)
hidden_states
=
self
.
LayerNorm
(
inputs
=
hidden_states
+
input_tensor
)
return
hidden_states
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment