Unverified Commit 3a134f7c authored by Julien Plu's avatar Julien Plu Committed by GitHub
Browse files

Fix TF savedmodel in Roberta (#7795)

* Remove wrong parameter.

* Same in Longformer
parent 3032de93
......@@ -125,7 +125,7 @@ class TFLongformerEmbeddings(tf.keras.layers.Layer):
"""
def __init__(self, config, **kwargs):
super().__init__(config, **kwargs)
super().__init__(**kwargs)
self.padding_idx = 1
self.vocab_size = config.vocab_size
......
......@@ -70,7 +70,7 @@ class TFRobertaEmbeddings(tf.keras.layers.Layer):
"""
def __init__(self, config, **kwargs):
super().__init__(config, **kwargs)
super().__init__(**kwargs)
self.padding_idx = 1
self.vocab_size = config.vocab_size
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment