Unverified Commit 88cc26dc authored by Lysandre Debut's avatar Lysandre Debut Committed by GitHub
Browse files

Ignore unexpected weights from PT conversion (#10397)

parent 63645b3b
...@@ -919,7 +919,11 @@ Bert Model with two heads on top as done during the pretraining: ...@@ -919,7 +919,11 @@ Bert Model with two heads on top as done during the pretraining:
) )
class TFBertForPreTraining(TFBertPreTrainedModel, TFBertPreTrainingLoss): class TFBertForPreTraining(TFBertPreTrainedModel, TFBertPreTrainingLoss):
# names with a '.' represents the authorized unexpected/missing layers when a TF model is loaded from a PT model # names with a '.' represents the authorized unexpected/missing layers when a TF model is loaded from a PT model
_keys_to_ignore_on_load_unexpected = [r"cls.predictions.decoder.weight"] _keys_to_ignore_on_load_unexpected = [
r"position_ids",
r"cls.predictions.decoder.weight",
r"cls.predictions.decoder.bias",
]
def __init__(self, config: BertConfig, *inputs, **kwargs): def __init__(self, config: BertConfig, *inputs, **kwargs):
super().__init__(config, *inputs, **kwargs) super().__init__(config, *inputs, **kwargs)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment