Unverified Commit 59d684fa authored by Yih-Dar's avatar Yih-Dar Committed by GitHub
Browse files

Fix examples: 'CausalLMOutputWithCrossAttentions' object has no attribute...


Fix examples: 'CausalLMOutputWithCrossAttentions' object has no attribute 'last_hidden_state' (#14678)
Co-authored-by: default avatarydshieh <ydshieh@users.noreply.github.com>
parent 8395f14d
......@@ -1790,7 +1790,7 @@ class BartForCausalLM(BartPretrainedModel):
>>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
>>> outputs = model(**inputs)
>>> last_hidden_states = outputs.last_hidden_state
>>> logits = outputs.logits
"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
......
......@@ -2993,7 +2993,7 @@ class BigBirdPegasusForCausalLM(BigBirdPegasusPreTrainedModel):
>>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
>>> outputs = model(**inputs)
>>> last_hidden_states = outputs.last_hidden_state
>>> logits = outputs.logits
"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
......
......@@ -1528,7 +1528,7 @@ class BlenderbotForCausalLM(BlenderbotPreTrainedModel):
>>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
>>> outputs = model(**inputs)
>>> last_hidden_states = outputs.last_hidden_state
>>> logits = outputs.logits
"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
......
......@@ -1502,7 +1502,7 @@ class BlenderbotSmallForCausalLM(BlenderbotSmallPreTrainedModel):
>>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
>>> outputs = model(**inputs)
>>> last_hidden_states = outputs.last_hidden_state
>>> logits = outputs.logits
"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
......
......@@ -1525,7 +1525,7 @@ class MarianForCausalLM(MarianPreTrainedModel):
>>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
>>> outputs = model(**inputs)
>>> last_hidden_states = outputs.last_hidden_state
>>> logits = outputs.logits
"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
......
......@@ -1793,7 +1793,7 @@ class MBartForCausalLM(MBartPreTrainedModel):
>>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
>>> outputs = model(**inputs)
>>> last_hidden_states = outputs.last_hidden_state
>>> logits = outputs.logits
"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
......
......@@ -1637,7 +1637,7 @@ class PegasusForCausalLM(PegasusPreTrainedModel):
>>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
>>> outputs = model(**inputs)
>>> last_hidden_states = outputs.last_hidden_state
>>> logits = outputs.logits
"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
......
......@@ -1070,7 +1070,7 @@ class TapasForMaskedLM(TapasPreTrainedModel):
>>> labels = tokenizer(table=table, queries="How many movies has George Clooney played in?", return_tensors="pt")["input_ids"]
>>> outputs = model(**inputs, labels=labels)
>>> last_hidden_states = outputs.last_hidden_state
>>> logits = outputs.logits
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
......
......@@ -1130,7 +1130,7 @@ class TFTapasForMaskedLM(TFTapasPreTrainedModel, TFMaskedLanguageModelingLoss):
>>> labels = tokenizer(table=table, queries="How many movies has George Clooney played in?", return_tensors="tf")["input_ids"]
>>> outputs = model(**inputs, labels=labels)
>>> last_hidden_states = outputs.last_hidden_state
>>> logits = outputs.logits
"""
inputs = input_processing(
func=self.call,
......
......@@ -3294,7 +3294,7 @@ class {{cookiecutter.camelcase_modelname}}ForCausalLM({{cookiecutter.camelcase_m
>>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
>>> outputs = model(**inputs)
>>> last_hidden_states = outputs.last_hidden_state
>>> logits = outputs.logits
"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment