Unverified Commit bbe9c698 authored by Yih-Dar's avatar Yih-Dar Committed by GitHub
Browse files

Fix TFRemBertEncoder all_hidden_states (#15510)



* fix

* fix test

* remove expected_num_hidden_layers
Co-authored-by: default avatarydshieh <ydshieh@users.noreply.github.com>
parent 854a0d52
...@@ -477,7 +477,7 @@ class TFRemBertEncoder(tf.keras.layers.Layer): ...@@ -477,7 +477,7 @@ class TFRemBertEncoder(tf.keras.layers.Layer):
training: bool = False, training: bool = False,
) -> Union[TFBaseModelOutputWithPastAndCrossAttentions, Tuple[tf.Tensor]]: ) -> Union[TFBaseModelOutputWithPastAndCrossAttentions, Tuple[tf.Tensor]]:
hidden_states = self.embedding_hidden_mapping_in(inputs=hidden_states) hidden_states = self.embedding_hidden_mapping_in(inputs=hidden_states)
all_hidden_states = (hidden_states,) if output_hidden_states else None all_hidden_states = () if output_hidden_states else None
all_attentions = () if output_attentions else None all_attentions = () if output_attentions else None
all_cross_attentions = () if output_attentions and self.config.add_cross_attention else None all_cross_attentions = () if output_attentions and self.config.add_cross_attention else None
......
...@@ -90,9 +90,6 @@ class TFRemBertModelTester: ...@@ -90,9 +90,6 @@ class TFRemBertModelTester:
self.num_choices = 4 self.num_choices = 4
self.scope = None self.scope = None
# RemBERT also returns the upprojected word embeddings as an hidden layers
self.expected_num_hidden_layers = self.num_hidden_layers + 2
def prepare_config_and_inputs(self): def prepare_config_and_inputs(self):
input_ids = ids_tensor([self.batch_size, self.seq_length], self.vocab_size) input_ids = ids_tensor([self.batch_size, self.seq_length], self.vocab_size)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment