"...composable_kernel_rocm.git" did not exist on "922e42a039a42770446c42fabc62fe1e7b050625"
Commit 624a5644 authored by Rémi Louf's avatar Rémi Louf
Browse files

revert black formatting to conform with lib style

parent 9b71fc9a
...@@ -43,13 +43,7 @@ class PreTrainedSeq2seq(PreTrainedModel): ...@@ -43,13 +43,7 @@ class PreTrainedSeq2seq(PreTrainedModel):
self.decoder = decoder self.decoder = decoder
@classmethod @classmethod
def from_pretrained( def from_pretrained(cls, encoder_pretrained_model_name_or_path, decoder_pretrained_model_name_or_path, *model_args, **kwargs):
cls,
encoder_pretrained_model_name_or_path,
decoder_pretrained_model_name_or_path,
*model_args,
**kwargs
):
r""" Instantiates an encoder and a decoder from one or two base classes r""" Instantiates an encoder and a decoder from one or two base classes
of the library from pre-trained model checkpoints. of the library from pre-trained model checkpoints.
...@@ -190,7 +184,7 @@ class Model2Model(PreTrainedSeq2seq): ...@@ -190,7 +184,7 @@ class Model2Model(PreTrainedSeq2seq):
def tie_weights(self): def tie_weights(self):
""" Tying the encoder and decoders' embeddings together. """ Tying the encoder and decoders' embeddings together.
We need for each to get down to the embedding weights. However the We need for each to get down to the embedding weights. However the
different model classes are inconsistent to that respect: different model classes are inconsistent to that respect:
- BertModel: embeddings.word_embeddings - BertModel: embeddings.word_embeddings
- RoBERTa: embeddings.word_embeddings - RoBERTa: embeddings.word_embeddings
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment