Unverified Commit 26ba56cc authored by Lysandre Debut's avatar Lysandre Debut Committed by GitHub
Browse files

Fix FSMT weight sharing (#26292)

parent da971b22
...@@ -1056,6 +1056,7 @@ class FSMTModel(PretrainedFSMTModel): ...@@ -1056,6 +1056,7 @@ class FSMTModel(PretrainedFSMTModel):
return self.decoder return self.decoder
def _tie_weights(self): def _tie_weights(self):
if self.config.tie_word_embeddings:
self._tie_or_clone_weights(self.decoder.embed_tokens, self.get_input_embeddings()) self._tie_or_clone_weights(self.decoder.embed_tokens, self.get_input_embeddings())
self._tie_or_clone_weights(self.decoder.output_projection, self.get_input_embeddings()) self._tie_or_clone_weights(self.decoder.output_projection, self.get_input_embeddings())
......
...@@ -273,6 +273,8 @@ class FSMTModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin ...@@ -273,6 +273,8 @@ class FSMTModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin
def test_ensure_weights_are_shared(self): def test_ensure_weights_are_shared(self):
config, inputs_dict = self.model_tester.prepare_config_and_inputs() config, inputs_dict = self.model_tester.prepare_config_and_inputs()
config.tie_word_embeddings = True
model = FSMTForConditionalGeneration(config) model = FSMTForConditionalGeneration(config)
# FSMT shares three weights. # FSMT shares three weights.
...@@ -288,6 +290,22 @@ class FSMTModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin ...@@ -288,6 +290,22 @@ class FSMTModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin
1, 1,
) )
config.tie_word_embeddings = False
model = FSMTForConditionalGeneration(config)
# FSMT shares three weights.
# Not an issue to not have these correctly tied for torch.load, but it is an issue for safetensors.
self.assertEqual(
len(
{
model.get_output_embeddings().weight.data_ptr(),
model.get_input_embeddings().weight.data_ptr(),
model.base_model.decoder.output_projection.weight.data_ptr(),
}
),
2,
)
@unittest.skip("can't be implemented for FSMT due to dual vocab.") @unittest.skip("can't be implemented for FSMT due to dual vocab.")
def test_resize_tokens_embeddings(self): def test_resize_tokens_embeddings(self):
pass pass
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment