"git@developer.sourcefind.cn:modelzoo/resnet50_tensorflow.git" did not exist on "12f279d6f4cb33574bc20109b41eb8a59f40cfd1"
Commit 6f7a682d authored by Chen Chen's avatar Chen Chen Committed by A. Unique TensorFlower
Browse files

Internal Change

PiperOrigin-RevId: 312116965
parent 19e60dc3
...@@ -133,17 +133,6 @@ class TransformerScaffold(tf.keras.layers.Layer): ...@@ -133,17 +133,6 @@ class TransformerScaffold(tf.keras.layers.Layer):
attention_cfg = self._attention_cfg attention_cfg = self._attention_cfg
self._attention_layer = self._attention_cls(**attention_cfg) self._attention_layer = self._attention_cls(**attention_cfg)
self._attention_output_dense = dense_einsum.DenseEinsum(
output_shape=hidden_size,
num_summed_dimensions=2,
kernel_initializer=self._kernel_initializer,
bias_initializer=self._bias_initializer,
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer,
activity_regularizer=self._activity_regularizer,
kernel_constraint=self._kernel_constraint,
bias_constraint=self._bias_constraint,
name="self_attention_output")
self._attention_dropout = tf.keras.layers.Dropout(rate=self._dropout_rate) self._attention_dropout = tf.keras.layers.Dropout(rate=self._dropout_rate)
# Use float32 in layernorm for numeric stability. # Use float32 in layernorm for numeric stability.
# It is probably safe in mixed_float16, but we haven't validated this yet. # It is probably safe in mixed_float16, but we haven't validated this yet.
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment