"web/git@developer.sourcefind.cn:chenpangpang/ComfyUI.git" did not exist on "79d08997f163ff36e3772ceac96937ebde3bb56f"
Commit 6fe1cc08 authored by Gunnlaugur Thor Briem's avatar Gunnlaugur Thor Briem
Browse files

fix: clean up inadvertent change in tf_t5

This was the beginnings of an attempt to address the test failure on
this layer, and instead I backed out of making this layer
keras-serializable at all ... so it was a mistake to commit this.
parent 18f4b927
...@@ -383,21 +383,14 @@ class TFT5MainLayer(tf.keras.layers.Layer): ...@@ -383,21 +383,14 @@ class TFT5MainLayer(tf.keras.layers.Layer):
def call( def call(
self, self,
inputs, hidden_states,
attention_mask=None, attention_mask=None,
encoder_hidden_states=None, encoder_hidden_states=None,
encoder_attention_mask=None, encoder_attention_mask=None,
head_mask=None, head_mask=None,
training=False, training=False,
): ):
if isinstance(inputs, (tuple, list)):
hidden_states = inputs[0]
assert len(inputs) <= 1, "Too many inputs."
elif isinstance(inputs, dict):
hidden_states = inputs["hidden_states"]
assert len(inputs) <= 1, "Too many inputs."
else:
hidden_states = inputs
batch_size, seq_length = shape_list(hidden_states)[:2] batch_size, seq_length = shape_list(hidden_states)[:2]
if attention_mask is None: if attention_mask is None:
attention_mask = tf.fill((batch_size, seq_length), 1) attention_mask = tf.fill((batch_size, seq_length), 1)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment