Commit 50e62a4c authored by LysandreJik's avatar LysandreJik
Browse files

fix gpt/gpt-2 from pretrained

parent 273617b8
...@@ -423,7 +423,7 @@ class GPT2PreTrainedModel(PreTrainedModel): ...@@ -423,7 +423,7 @@ class GPT2PreTrainedModel(PreTrainedModel):
""" """
num_special_tokens = kwargs.pop('num_special_tokens', None) num_special_tokens = kwargs.pop('num_special_tokens', None)
model = super(PreTrainedModel, cls).from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs) model = super().from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs)
# Add additional embeddings for special tokens if needed # Add additional embeddings for special tokens if needed
# This step also make sure we are still sharing the output and input embeddings after loading weights # This step also make sure we are still sharing the output and input embeddings after loading weights
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment