Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
2c668423
"...resnet50_tensorflow.git" did not exist on "4452fcecee11d32b1db74f6e2c1bd53de7a624d8"
Unverified
Commit
2c668423
authored
Mar 22, 2021
by
Sebastian Olsson
Committed by
GitHub
Mar 22, 2021
Browse files
Correct AutoConfig call docstrings (#10822)
parent
8fb46718
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
27 additions
and
27 deletions
+27
-27
hubconf.py
hubconf.py
+4
-4
src/transformers/models/auto/modeling_auto.py
src/transformers/models/auto/modeling_auto.py
+12
-12
src/transformers/models/auto/modeling_tf_auto.py
src/transformers/models/auto/modeling_tf_auto.py
+11
-11
No files found.
hubconf.py
View file @
2c668423
...
...
@@ -78,7 +78,7 @@ def model(*args, **kwargs):
model = torch.hub.load('huggingface/transformers', 'model', 'bert-base-uncased', output_attentions=True) # Update configuration during loading
assert model.config.output_attentions == True
# Loading from a TF checkpoint file instead of a PyTorch model (slower)
config = AutoConfig.from_
json_file
('./tf_model/bert_tf_model_config.json')
config = AutoConfig.from_
pretrained
('./tf_model/bert_tf_model_config.json')
model = torch.hub.load('huggingface/transformers', 'model', './tf_model/bert_tf_checkpoint.ckpt.index', from_tf=True, config=config)
"""
...
...
@@ -97,7 +97,7 @@ def modelWithLMHead(*args, **kwargs):
model = torch.hub.load('huggingface/transformers', 'modelWithLMHead', 'bert-base-uncased', output_attentions=True) # Update configuration during loading
assert model.config.output_attentions == True
# Loading from a TF checkpoint file instead of a PyTorch model (slower)
config = AutoConfig.from_
json_file
('./tf_model/bert_tf_model_config.json')
config = AutoConfig.from_
pretrained
('./tf_model/bert_tf_model_config.json')
model = torch.hub.load('huggingface/transformers', 'modelWithLMHead', './tf_model/bert_tf_checkpoint.ckpt.index', from_tf=True, config=config)
"""
...
...
@@ -115,7 +115,7 @@ def modelForSequenceClassification(*args, **kwargs):
model = torch.hub.load('huggingface/transformers', 'modelForSequenceClassification', 'bert-base-uncased', output_attentions=True) # Update configuration during loading
assert model.config.output_attentions == True
# Loading from a TF checkpoint file instead of a PyTorch model (slower)
config = AutoConfig.from_
json_file
('./tf_model/bert_tf_model_config.json')
config = AutoConfig.from_
pretrained
('./tf_model/bert_tf_model_config.json')
model = torch.hub.load('huggingface/transformers', 'modelForSequenceClassification', './tf_model/bert_tf_checkpoint.ckpt.index', from_tf=True, config=config)
"""
...
...
@@ -134,7 +134,7 @@ def modelForQuestionAnswering(*args, **kwargs):
model = torch.hub.load('huggingface/transformers', 'modelForQuestionAnswering', 'bert-base-uncased', output_attentions=True) # Update configuration during loading
assert model.config.output_attentions == True
# Loading from a TF checkpoint file instead of a PyTorch model (slower)
config = AutoConfig.from_
json_file
('./tf_model/bert_tf_model_config.json')
config = AutoConfig.from_
pretrained
('./tf_model/bert_tf_model_config.json')
model = torch.hub.load('huggingface/transformers', 'modelForQuestionAnswering', './tf_model/bert_tf_checkpoint.ckpt.index', from_tf=True, config=config)
"""
...
...
src/transformers/models/auto/modeling_auto.py
View file @
2c668423
...
...
@@ -801,7 +801,7 @@ class AutoModel:
True
>>> # Loading from a TF checkpoint file instead of a PyTorch model (slower)
>>> config = AutoConfig.from_
json_file
('./tf_model/bert_tf_model_config.json')
>>> config = AutoConfig.from_
pretrained
('./tf_model/bert_tf_model_config.json')
>>> model = AutoModel.from_pretrained('./tf_model/bert_tf_checkpoint.ckpt.index', from_tf=True, config=config)
"""
config
=
kwargs
.
pop
(
"config"
,
None
)
...
...
@@ -895,7 +895,7 @@ class AutoModelForPreTraining:
True
>>> # Loading from a TF checkpoint file instead of a PyTorch model (slower)
>>> config = AutoConfig.from_
json_file
('./tf_model/bert_tf_model_config.json')
>>> config = AutoConfig.from_
pretrained
('./tf_model/bert_tf_model_config.json')
>>> model = AutoModelForPreTraining.from_pretrained('./tf_model/bert_tf_checkpoint.ckpt.index', from_tf=True, config=config)
"""
config
=
kwargs
.
pop
(
"config"
,
None
)
...
...
@@ -1000,7 +1000,7 @@ class AutoModelWithLMHead:
True
>>> # Loading from a TF checkpoint file instead of a PyTorch model (slower)
>>> config = AutoConfig.from_
json_file
('./tf_model/bert_tf_model_config.json')
>>> config = AutoConfig.from_
pretrained
('./tf_model/bert_tf_model_config.json')
>>> model = AutoModelWithLMHead.from_pretrained('./tf_model/bert_tf_checkpoint.ckpt.index', from_tf=True, config=config)
"""
warnings
.
warn
(
...
...
@@ -1099,7 +1099,7 @@ class AutoModelForCausalLM:
True
>>> # Loading from a TF checkpoint file instead of a PyTorch model (slower)
>>> config = AutoConfig.from_
json_file
('./tf_model/gpt2_tf_model_config.json')
>>> config = AutoConfig.from_
pretrained
('./tf_model/gpt2_tf_model_config.json')
>>> model = AutoModelForCausalLM.from_pretrained('./tf_model/gpt2_tf_checkpoint.ckpt.index', from_tf=True, config=config)
"""
config
=
kwargs
.
pop
(
"config"
,
None
)
...
...
@@ -1192,7 +1192,7 @@ class AutoModelForMaskedLM:
True
>>> # Loading from a TF checkpoint file instead of a PyTorch model (slower)
>>> config = AutoConfig.from_
json_file
('./tf_model/bert_tf_model_config.json')
>>> config = AutoConfig.from_
pretrained
('./tf_model/bert_tf_model_config.json')
>>> model = AutoModelForMaskedLM.from_pretrained('./tf_model/bert_tf_checkpoint.ckpt.index', from_tf=True, config=config)
"""
config
=
kwargs
.
pop
(
"config"
,
None
)
...
...
@@ -1288,7 +1288,7 @@ class AutoModelForSeq2SeqLM:
True
>>> # Loading from a TF checkpoint file instead of a PyTorch model (slower)
>>> config = AutoConfig.from_
json_file
('./tf_model/t5_tf_model_config.json')
>>> config = AutoConfig.from_
pretrained
('./tf_model/t5_tf_model_config.json')
>>> model = AutoModelForSeq2SeqLM.from_pretrained('./tf_model/t5_tf_checkpoint.ckpt.index', from_tf=True, config=config)
"""
config
=
kwargs
.
pop
(
"config"
,
None
)
...
...
@@ -1386,7 +1386,7 @@ class AutoModelForSequenceClassification:
True
>>> # Loading from a TF checkpoint file instead of a PyTorch model (slower)
>>> config = AutoConfig.from_
json_file
('./tf_model/bert_tf_model_config.json')
>>> config = AutoConfig.from_
pretrained
('./tf_model/bert_tf_model_config.json')
>>> model = AutoModelForSequenceClassification.from_pretrained('./tf_model/bert_tf_checkpoint.ckpt.index', from_tf=True, config=config)
"""
config
=
kwargs
.
pop
(
"config"
,
None
)
...
...
@@ -1483,7 +1483,7 @@ class AutoModelForQuestionAnswering:
True
>>> # Loading from a TF checkpoint file instead of a PyTorch model (slower)
>>> config = AutoConfig.from_
json_file
('./tf_model/bert_tf_model_config.json')
>>> config = AutoConfig.from_
pretrained
('./tf_model/bert_tf_model_config.json')
>>> model = AutoModelForQuestionAnswering.from_pretrained('./tf_model/bert_tf_checkpoint.ckpt.index', from_tf=True, config=config)
"""
config
=
kwargs
.
pop
(
"config"
,
None
)
...
...
@@ -1583,7 +1583,7 @@ class AutoModelForTableQuestionAnswering:
True
>>> # Loading from a TF checkpoint file instead of a PyTorch model (slower)
>>> config = AutoConfig.from_
json_file
('./tf_model/tapas_tf_checkpoint.json')
>>> config = AutoConfig.from_
pretrained
('./tf_model/tapas_tf_checkpoint.json')
>>> model = AutoModelForQuestionAnswering.from_pretrained('./tf_model/tapas_tf_checkpoint.ckpt.index', from_tf=True, config=config)
"""
config
=
kwargs
.
pop
(
"config"
,
None
)
...
...
@@ -1681,7 +1681,7 @@ class AutoModelForTokenClassification:
True
>>> # Loading from a TF checkpoint file instead of a PyTorch model (slower)
>>> config = AutoConfig.from_
json_file
('./tf_model/bert_tf_model_config.json')
>>> config = AutoConfig.from_
pretrained
('./tf_model/bert_tf_model_config.json')
>>> model = AutoModelForTokenClassification.from_pretrained('./tf_model/bert_tf_checkpoint.ckpt.index', from_tf=True, config=config)
"""
config
=
kwargs
.
pop
(
"config"
,
None
)
...
...
@@ -1781,7 +1781,7 @@ class AutoModelForMultipleChoice:
True
>>> # Loading from a TF checkpoint file instead of a PyTorch model (slower)
>>> config = AutoConfig.from_
json_file
('./tf_model/bert_tf_model_config.json')
>>> config = AutoConfig.from_
pretrained
('./tf_model/bert_tf_model_config.json')
>>> model = AutoModelForMultipleChoice.from_pretrained('./tf_model/bert_tf_checkpoint.ckpt.index', from_tf=True, config=config)
"""
config
=
kwargs
.
pop
(
"config"
,
None
)
...
...
@@ -1881,7 +1881,7 @@ class AutoModelForNextSentencePrediction:
True
>>> # Loading from a TF checkpoint file instead of a PyTorch model (slower)
>>> config = AutoConfig.from_
json_file
('./tf_model/bert_tf_model_config.json')
>>> config = AutoConfig.from_
pretrained
('./tf_model/bert_tf_model_config.json')
>>> model = AutoModelForNextSentencePrediction.from_pretrained('./tf_model/bert_tf_checkpoint.ckpt.index', from_tf=True, config=config)
"""
config
=
kwargs
.
pop
(
"config"
,
None
)
...
...
src/transformers/models/auto/modeling_tf_auto.py
View file @
2c668423
...
...
@@ -605,7 +605,7 @@ class TFAutoModel(object):
True
>>> # Loading from a PyTorch checkpoint file instead of a TensorFlow model (slower)
>>> config = AutoConfig.from_
json_file
('./pt_model/bert_pt_model_config.json')
>>> config = AutoConfig.from_
pretrained
('./pt_model/bert_pt_model_config.json')
>>> model = TFAutoModel.from_pretrained('./pt_model/bert_pytorch_model.bin', from_pt=True, config=config)
"""
config
=
kwargs
.
pop
(
"config"
,
None
)
...
...
@@ -699,7 +699,7 @@ class TFAutoModelForPreTraining(object):
True
>>> # Loading from a PyTorch checkpoint file instead of a TensorFlow model (slower)
>>> config = AutoConfig.from_
json_file
('./pt_model/bert_pt_model_config.json')
>>> config = AutoConfig.from_
pretrained
('./pt_model/bert_pt_model_config.json')
>>> model = TFAutoModelForPreTraining.from_pretrained('./pt_model/bert_pytorch_model.bin', from_pt=True, config=config)
"""
config
=
kwargs
.
pop
(
"config"
,
None
)
...
...
@@ -804,7 +804,7 @@ class TFAutoModelWithLMHead(object):
True
>>> # Loading from a PyTorch checkpoint file instead of a TensorFlow model (slower)
>>> config = AutoConfig.from_
json_file
('./pt_model/bert_pt_model_config.json')
>>> config = AutoConfig.from_
pretrained
('./pt_model/bert_pt_model_config.json')
>>> model = TFAutoModelWithLMHead.from_pretrained('./pt_model/bert_pytorch_model.bin', from_pt=True, config=config)
"""
warnings
.
warn
(
...
...
@@ -904,7 +904,7 @@ class TFAutoModelForCausalLM:
True
>>> # Loading from a PyTorch checkpoint file instead of a TensorFlow model (slower)
>>> config = AutoConfig.from_
json_file
('./pt_model/gpt2_pt_model_config.json')
>>> config = AutoConfig.from_
pretrained
('./pt_model/gpt2_pt_model_config.json')
>>> model = TFAutoModelForCausalLM.from_pretrained('./pt_model/gpt2_pytorch_model.bin', from_pt=True, config=config)
"""
config
=
kwargs
.
pop
(
"config"
,
None
)
...
...
@@ -997,7 +997,7 @@ class TFAutoModelForMaskedLM:
True
>>> # Loading from a PyTorch checkpoint file instead of a TensorFlow model (slower)
>>> config = AutoConfig.from_
json_file
('./pt_model/bert_pt_model_config.json')
>>> config = AutoConfig.from_
pretrained
('./pt_model/bert_pt_model_config.json')
>>> model = TFAutoModelForMaskedLM.from_pretrained('./pt_model/bert_pytorch_model.bin', from_pt=True, config=config)
"""
config
=
kwargs
.
pop
(
"config"
,
None
)
...
...
@@ -1093,7 +1093,7 @@ class TFAutoModelForSeq2SeqLM:
True
>>> # Loading from a PyTorch checkpoint file instead of a TensorFlow model (slower)
>>> config = AutoConfig.from_
json_file
('./pt_model/t5_pt_model_config.json')
>>> config = AutoConfig.from_
pretrained
('./pt_model/t5_pt_model_config.json')
>>> model = TFAutoModelForSeq2SeqLM.from_pretrained('./pt_model/t5_pytorch_model.bin', from_pt=True, config=config)
"""
config
=
kwargs
.
pop
(
"config"
,
None
)
...
...
@@ -1191,7 +1191,7 @@ class TFAutoModelForSequenceClassification(object):
True
>>> # Loading from a PyTorch checkpoint file instead of a TensorFlow model (slower)
>>> config = AutoConfig.from_
json_file
('./pt_model/bert_pt_model_config.json')
>>> config = AutoConfig.from_
pretrained
('./pt_model/bert_pt_model_config.json')
>>> model = TFAutoModelForSequenceClassification.from_pretrained('./pt_model/bert_pytorch_model.bin', from_pt=True, config=config)
"""
config
=
kwargs
.
pop
(
"config"
,
None
)
...
...
@@ -1288,7 +1288,7 @@ class TFAutoModelForQuestionAnswering(object):
True
>>> # Loading from a PyTorch checkpoint file instead of a TensorFlow model (slower)
>>> config = AutoConfig.from_
json_file
('./pt_model/bert_pt_model_config.json')
>>> config = AutoConfig.from_
pretrained
('./pt_model/bert_pt_model_config.json')
>>> model = TFAutoModelForQuestionAnswering.from_pretrained('./pt_model/bert_pytorch_model.bin', from_pt=True, config=config)
"""
config
=
kwargs
.
pop
(
"config"
,
None
)
...
...
@@ -1384,7 +1384,7 @@ class TFAutoModelForTokenClassification:
True
>>> # Loading from a PyTorch checkpoint file instead of a TensorFlow model (slower)
>>> config = AutoConfig.from_
json_file
('./pt_model/bert_pt_model_config.json')
>>> config = AutoConfig.from_
pretrained
('./pt_model/bert_pt_model_config.json')
>>> model = TFAutoModelForTokenClassification.from_pretrained('./pt_model/bert_pytorch_model.bin', from_pt=True, config=config)
"""
config
=
kwargs
.
pop
(
"config"
,
None
)
...
...
@@ -1482,7 +1482,7 @@ class TFAutoModelForMultipleChoice:
True
>>> # Loading from a PyTorch checkpoint file instead of a TensorFlow model (slower)
>>> config = AutoConfig.from_
json_file
('./pt_model/bert_pt_model_config.json')
>>> config = AutoConfig.from_
pretrained
('./pt_model/bert_pt_model_config.json')
>>> model = TFAutoModelForMultipleChoice.from_pretrained('./pt_model/bert_pytorch_model.bin', from_pt=True, config=config)
"""
config
=
kwargs
.
pop
(
"config"
,
None
)
...
...
@@ -1580,7 +1580,7 @@ class TFAutoModelForNextSentencePrediction:
True
>>> # Loading from a PyTorch checkpoint file instead of a TensorFlow model (slower)
>>> config = AutoConfig.from_
json_file
('./pt_model/bert_pt_model_config.json')
>>> config = AutoConfig.from_
pretrained
('./pt_model/bert_pt_model_config.json')
>>> model = TFAutoModelForNextSentencePrediction.from_pretrained('./pt_model/bert_pytorch_model.bin', from_pt=True, config=config)
"""
config
=
kwargs
.
pop
(
"config"
,
None
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment