"vscode:/vscode.git/clone" did not exist on "1b3c2e403585bf1884b195289b7e863d2924379d"
Commit 9575381a authored by xinliupitt's avatar xinliupitt
Browse files

initializers.get

parent 2deb09ae
......@@ -100,7 +100,8 @@ class Transformer(tf.keras.layers.Layer):
self._norm_epsilon = norm_epsilon
self._intermediate_dropout = intermediate_dropout
if attention_initializer:
self._attention_initializer = attention_initializer
self._attention_initializer = tf.keras.initializers.get(
attention_initializer)
else:
self._attention_initializer = self._kernel_initializer
......@@ -222,7 +223,7 @@ class Transformer(tf.keras.layers.Layer):
"intermediate_dropout":
self._intermediate_dropout,
"attention_initializer":
self._attention_initializer
tf.keras.constraints.serialize(self._attention_initializer)
}
base_config = super(Transformer, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
......@@ -355,7 +356,8 @@ class TransformerDecoderLayer(tf.keras.layers.Layer):
self._norm_epsilon = norm_epsilon
self._intermediate_dropout = intermediate_dropout
if attention_initializer:
self._attention_initializer = attention_initializer
self._attention_initializer = tf.keras.initializers.get(
attention_initializer)
else:
self._attention_initializer = self._kernel_initializer
if self.multi_channel_cross_attention:
......@@ -484,7 +486,7 @@ class TransformerDecoderLayer(tf.keras.layers.Layer):
"intermediate_dropout":
self._intermediate_dropout,
"attention_initializer":
self._attention_initializer
tf.keras.constraints.serialize(self._attention_initializer)
}
base_config = super(TransformerDecoderLayer, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment