Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
81ee29ee
"git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "42d57549b82014834706ca86515eb6cc6431b3cb"
Commit
81ee29ee
authored
Oct 10, 2019
by
Rémi Louf
Browse files
remove the staticmethod used to load the config
parent
d7092d59
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
16 additions
and
17 deletions
+16
-17
transformers/modeling_bert.py
transformers/modeling_bert.py
+16
-17
No files found.
transformers/modeling_bert.py
View file @
81ee29ee
...
...
@@ -715,7 +715,7 @@ class BertDecoderModel(BertPreTrainedModel):
"""
def
__init__
(
self
,
config
):
super
(
BertModel
,
self
).
__init__
(
config
)
super
(
Bert
Decoder
Model
,
self
).
__init__
(
config
)
self
.
embeddings
=
BertEmbeddings
(
config
)
self
.
decoder
=
BertDecoder
(
config
)
...
...
@@ -1357,28 +1357,27 @@ class Bert2Rnd(BertPreTrainedModel):
pretrained weights we need to override the `from_pretrained` method of the base `PreTrainedModel`
class.
"""
pretrained_encoder
=
BertModel
.
from_pretrained
(
pretrained_model_or_path
,
*
model_args
,
**
model_kwargs
)
config
=
cls
.
_load_config
(
pretrained_model_or_path
,
*
model_args
,
**
model_kwargs
)
model
=
cls
(
config
)
model
.
encoder
=
pretrained_encoder
return
model
def
_load_config
(
self
,
pretrained_model_name_or_path
,
*
args
,
**
kwargs
):
config
=
kwargs
.
pop
(
'config'
,
None
)
# Load the configuration
config
=
model_
kwargs
.
pop
(
'config'
,
None
)
if
config
is
None
:
cache_dir
=
kwargs
.
pop
(
'cache_dir'
,
None
)
force_download
=
kwargs
.
pop
(
'force_download'
,
False
)
config
,
_
=
self
.
config_class
.
from_pretrained
(
pretrained_model_
name_
or_path
,
*
args
,
cache_dir
=
model_
kwargs
.
pop
(
'cache_dir'
,
None
)
force_download
=
model_
kwargs
.
pop
(
'force_download'
,
False
)
config
,
_
=
cls
.
config_class
.
from_pretrained
(
pretrained_model_or_path
,
*
model_
args
,
cache_dir
=
cache_dir
,
return_unused_kwargs
=
True
,
force_download
=
force_download
,
**
kwargs
**
model_
kwargs
)
return
config
model
=
cls
(
config
)
# The encoder is loaded with pretrained weights
pretrained_encoder
=
BertModel
.
from_pretrained
(
pretrained_model_or_path
,
*
model_args
,
**
model_kwargs
)
model
.
encoder
=
pretrained_encoder
return
model
def
forward
(
self
,
input_ids
,
attention_mask
=
None
,
token_type_ids
=
None
,
position_ids
=
None
,
head_mask
=
None
):
encoder_outputs
=
self
.
encoder
(
input_ids
,
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment