"git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "2d8fbf012a12c4ad6b4a8d4e08671f068732628c"
Commit 273617b8 authored by thomwolf's avatar thomwolf
Browse files

update config - fix gpt/gpt-2 from pretrained

parent 6b13f4cb
...@@ -24,7 +24,7 @@ jobs: ...@@ -24,7 +24,7 @@ jobs:
- checkout - checkout
- run: sudo pip install --progress-bar off . - run: sudo pip install --progress-bar off .
- run: sudo pip install pytest codecov pytest-cov - run: sudo pip install pytest codecov pytest-cov
- run: sudo pip install tensorboardX scikit-learn - run: sudo pip install tensorboardX scikit-learn mock
- run: python -m pytest -sv ./pytorch_transformers/tests/ --cov - run: python -m pytest -sv ./pytorch_transformers/tests/ --cov
- run: python -m pytest -sv ./examples/ - run: python -m pytest -sv ./examples/
- run: codecov - run: codecov
......
...@@ -423,7 +423,7 @@ class GPT2PreTrainedModel(PreTrainedModel): ...@@ -423,7 +423,7 @@ class GPT2PreTrainedModel(PreTrainedModel):
""" """
num_special_tokens = kwargs.pop('num_special_tokens', None) num_special_tokens = kwargs.pop('num_special_tokens', None)
model = PreTrainedModel.from_pretrained(cls, pretrained_model_name_or_path, *inputs, **kwargs) model = super(PreTrainedModel, cls).from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs)
# Add additional embeddings for special tokens if needed # Add additional embeddings for special tokens if needed
# This step also make sure we are still sharing the output and input embeddings after loading weights # This step also make sure we are still sharing the output and input embeddings after loading weights
......
...@@ -431,7 +431,7 @@ class OpenAIGPTPreTrainedModel(PreTrainedModel): ...@@ -431,7 +431,7 @@ class OpenAIGPTPreTrainedModel(PreTrainedModel):
num_special_tokens = kwargs.get('num_special_tokens', None) num_special_tokens = kwargs.get('num_special_tokens', None)
kwargs.pop('num_special_tokens', None) kwargs.pop('num_special_tokens', None)
model = PreTrainedModel.from_pretrained(cls, pretrained_model_name_or_path, *inputs, **kwargs) model = super(PreTrainedModel, cls).from_pretrained(pretrained_model_name_or_path, pretrained_model_name_or_path, *inputs, **kwargs)
# Add additional embeddings for special tokens if needed # Add additional embeddings for special tokens if needed
# This step also make sure we are still sharing the output and input embeddings after loading weights # This step also make sure we are still sharing the output and input embeddings after loading weights
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment