"...git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "86a630702d8d613c5aaea0bda9bf7b519c012486"
Unverified Commit ca0b82bb authored by Yih-Dar's avatar Yih-Dar Committed by GitHub
Browse files

Fix doc examples: cannot import name (#14698)



* Fix doc examples: cannot import name

* remove copy because of some necessary minor changes (maybe add copy to the individual methods instead)

* Keep copy with some modifications
Co-authored-by: default avatarydshieh <ydshieh@users.noreply.github.com>
parent fc74c845
......@@ -2117,7 +2117,7 @@ class BigBirdPegasusDecoder(BigBirdPegasusPreTrainedModel):
Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you
provide it.
Indices can be obtained using :class:`~transformers.BigBirdPegasusTokenizer`. See
Indices can be obtained using :class:`~transformers.PegasusTokenizer`. See
:meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__`
for details.
......@@ -2862,7 +2862,7 @@ class BigBirdPegasusDecoderWrapper(BigBirdPegasusPreTrainedModel):
return self.decoder(*args, **kwargs)
# Copied from transformers.models.bart.modeling_bart.BartForCausalLM with Bart->BigBirdPegasus, 'facebook/bart-large'->"google/bigbird-pegasus-large-arxiv"
# Copied from transformers.models.bart.modeling_bart.BartForCausalLM with BartDecoderWrapper->BigBirdPegasusDecoderWrapper, BartForCausalLM->BigBirdPegasusForCausalLM, BartPreTrainedModel->BigBirdPegasusPreTrainedModel, BartTokenizer->PegasusTokenizer, 'facebook/bart-large'->"google/bigbird-pegasus-large-arxiv"
class BigBirdPegasusForCausalLM(BigBirdPegasusPreTrainedModel):
def __init__(self, config):
config = copy.deepcopy(config)
......@@ -2917,7 +2917,7 @@ class BigBirdPegasusForCausalLM(BigBirdPegasusPreTrainedModel):
Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you
provide it.
Indices can be obtained using :class:`~transformers.BigBirdPegasusTokenizer`. See
Indices can be obtained using :class:`~transformers.PegasusTokenizer`. See
:meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__`
for details.
......@@ -2985,9 +2985,9 @@ class BigBirdPegasusForCausalLM(BigBirdPegasusPreTrainedModel):
Example::
>>> from transformers import BigBirdPegasusTokenizer, BigBirdPegasusForCausalLM
>>> from transformers import PegasusTokenizer, BigBirdPegasusForCausalLM
>>> tokenizer = BigBirdPegasusTokenizer.from_pretrained("google/bigbird-pegasus-large-arxiv")
>>> tokenizer = PegasusTokenizer.from_pretrained("google/bigbird-pegasus-large-arxiv")
>>> model = BigBirdPegasusForCausalLM.from_pretrained("google/bigbird-pegasus-large-arxiv", add_cross_attention=False)
>>> assert model.config.is_decoder, f"{model.__class__} has to be configured as a decoder."
>>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment