Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
33adab2b
Commit
33adab2b
authored
Dec 19, 2019
by
Lysandre
Browse files
Fix albert example
parent
a1f1dce0
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
2 additions
and
7 deletions
+2
-7
transformers/modeling_tf_albert.py
transformers/modeling_tf_albert.py
+2
-2
transformers/modeling_utils.py
transformers/modeling_utils.py
+0
-5
No files found.
transformers/modeling_tf_albert.py
View file @
33adab2b
...
@@ -587,8 +587,8 @@ class TFAlbertModel(TFAlbertPreTrainedModel):
...
@@ -587,8 +587,8 @@ class TFAlbertModel(TFAlbertPreTrainedModel):
import tensorflow as tf
import tensorflow as tf
from transformers import AlbertTokenizer, TFAlbertModel
from transformers import AlbertTokenizer, TFAlbertModel
tokenizer = AlbertTokenizer.from_pretrained('bert-base-
uncased
')
tokenizer = AlbertTokenizer.from_pretrained('
al
bert-base-
v1
')
model = TFAlbertModel.from_pretrained('bert-base-
uncased
')
model = TFAlbertModel.from_pretrained('
al
bert-base-
v1
')
input_ids = tf.constant(tokenizer.encode("Hello, my dog is cute"))[None, :] # Batch size 1
input_ids = tf.constant(tokenizer.encode("Hello, my dog is cute"))[None, :] # Batch size 1
outputs = model(input_ids)
outputs = model(input_ids)
last_hidden_states = outputs[0] # The last hidden-state is the first element of the output tuple
last_hidden_states = outputs[0] # The last hidden-state is the first element of the output tuple
...
...
transformers/modeling_utils.py
View file @
33adab2b
...
@@ -327,11 +327,6 @@ class PreTrainedModel(nn.Module):
...
@@ -327,11 +327,6 @@ class PreTrainedModel(nn.Module):
model = BertModel.from_pretrained('./tf_model/my_tf_checkpoint.ckpt.index', from_tf=True, config=config)
model = BertModel.from_pretrained('./tf_model/my_tf_checkpoint.ckpt.index', from_tf=True, config=config)
"""
"""
if
pretrained_model_name_or_path
is
not
None
and
(
"albert"
in
pretrained_model_name_or_path
and
"v2"
in
pretrained_model_name_or_path
):
logger
.
warning
(
"There is currently an upstream reproducibility issue with ALBERT v2 models. Please see "
+
"https://github.com/google-research/google-research/issues/119 for more information."
)
config
=
kwargs
.
pop
(
'config'
,
None
)
config
=
kwargs
.
pop
(
'config'
,
None
)
state_dict
=
kwargs
.
pop
(
'state_dict'
,
None
)
state_dict
=
kwargs
.
pop
(
'state_dict'
,
None
)
cache_dir
=
kwargs
.
pop
(
'cache_dir'
,
None
)
cache_dir
=
kwargs
.
pop
(
'cache_dir'
,
None
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment