Unverified Commit 065729a6 authored by Matt's avatar Matt Committed by GitHub
Browse files

Remove ConversationalPipeline and Conversation object (#31165)

* Remove ConversationalPipeline and Conversation object, as they have been deprecated for some time and are due for removal

* Update not-doctested.txt

* Fix JA and ZH docs

* Fix JA and ZH docs some more

* Fix JA and ZH docs some more
parent 3a100582
......@@ -284,7 +284,6 @@ class LEDModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin,
all_generative_model_classes = (LEDForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"conversational": LEDForConditionalGeneration,
"feature-extraction": LEDModel,
"question-answering": LEDForQuestionAnswering,
"summarization": LEDForConditionalGeneration,
......
......@@ -197,7 +197,6 @@ class TFLEDModelTest(TFModelTesterMixin, PipelineTesterMixin, unittest.TestCase)
all_generative_model_classes = (TFLEDForConditionalGeneration,) if is_tf_available() else ()
pipeline_model_mapping = (
{
"conversational": TFLEDForConditionalGeneration,
"feature-extraction": TFLEDModel,
"summarization": TFLEDForConditionalGeneration,
"text2text-generation": TFLEDForConditionalGeneration,
......
......@@ -504,7 +504,6 @@ class LongT5ModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMix
all_generative_model_classes = (LongT5ForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"conversational": LongT5ForConditionalGeneration,
"feature-extraction": LongT5Model,
"summarization": LongT5ForConditionalGeneration,
"text2text-generation": LongT5ForConditionalGeneration,
......
......@@ -243,7 +243,6 @@ class M2M100ModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMix
all_generative_model_classes = (M2M100ForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"conversational": M2M100ForConditionalGeneration,
"feature-extraction": M2M100Model,
"summarization": M2M100ForConditionalGeneration,
"text2text-generation": M2M100ForConditionalGeneration,
......
......@@ -311,10 +311,6 @@ class FlaxMarianModelTest(FlaxModelTesterMixin, unittest.TestCase, FlaxGeneratio
outputs = model(input_ids)
self.assertIsNotNone(outputs)
@unittest.skip("Skipping for now, to fix @ArthurZ or @ydshieh")
def test_pipeline_conversational(self):
pass
@require_flax
@require_sentencepiece
......
......@@ -248,7 +248,6 @@ class MarianModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMix
all_generative_model_classes = (MarianMTModel,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"conversational": MarianMTModel,
"feature-extraction": MarianModel,
"summarization": MarianMTModel,
"text-generation": MarianForCausalLM,
......@@ -350,10 +349,6 @@ class MarianModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMix
def test_tie_word_embeddings_decoder(self):
pass
@unittest.skip("Skipping for now, to fix @ArthurZ or @ydshieh")
def test_pipeline_conversational(self):
pass
@unittest.skip(
reason="This architecure seem to not compute gradients properly when using GC, check: https://github.com/huggingface/transformers/pull/27124"
)
......
......@@ -184,7 +184,6 @@ class TFMarianModelTest(TFModelTesterMixin, PipelineTesterMixin, unittest.TestCa
all_generative_model_classes = (TFMarianMTModel,) if is_tf_available() else ()
pipeline_model_mapping = (
{
"conversational": TFMarianMTModel,
"feature-extraction": TFMarianModel,
"summarization": TFMarianMTModel,
"text2text-generation": TFMarianMTModel,
......@@ -208,10 +207,6 @@ class TFMarianModelTest(TFModelTesterMixin, PipelineTesterMixin, unittest.TestCa
config_and_inputs = self.model_tester.prepare_config_and_inputs_for_common()
self.model_tester.check_decoder_model_past_large_inputs(*config_and_inputs)
@unittest.skip("Skipping for now, to fix @ArthurZ or @ydshieh")
def test_pipeline_conversational(self):
pass
@require_tf
class AbstractMarianIntegrationTest(unittest.TestCase):
......
......@@ -240,7 +240,6 @@ class MBartModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixi
all_generative_model_classes = (MBartForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"conversational": MBartForConditionalGeneration,
"feature-extraction": MBartModel,
"fill-mask": MBartForConditionalGeneration,
"question-answering": MBartForQuestionAnswering,
......
......@@ -161,7 +161,6 @@ class TFMBartModelTest(TFModelTesterMixin, PipelineTesterMixin, unittest.TestCas
all_generative_model_classes = (TFMBartForConditionalGeneration,) if is_tf_available() else ()
pipeline_model_mapping = (
{
"conversational": TFMBartForConditionalGeneration,
"feature-extraction": TFMBartModel,
"summarization": TFMBartForConditionalGeneration,
"text2text-generation": TFMBartForConditionalGeneration,
......
......@@ -555,7 +555,6 @@ class MT5ModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin,
all_generative_model_classes = (MT5ForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"conversational": MT5ForConditionalGeneration,
"feature-extraction": MT5Model,
"question-answering": MT5ForQuestionAnswering,
"summarization": MT5ForConditionalGeneration,
......@@ -886,10 +885,6 @@ class MT5ModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin,
attn_weights = out[attn_name] if attn_name == attention_names[0] else out[attn_name][-1]
self.assertEqual(sum([w.sum().item() for w in attn_weights]), 0.0)
@unittest.skip("Does not support conversations.")
def test_pipeline_conversational(self):
pass
# Copied from tests.models.t5.test_modeling_t5.T5EncoderOnlyModelTester with T5->MT5
class MT5EncoderOnlyModelTester:
......
......@@ -421,7 +421,6 @@ class MvpModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin,
all_generative_model_classes = (MvpForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"conversational": MvpForConditionalGeneration,
"feature-extraction": MvpModel,
"fill-mask": MvpForConditionalGeneration,
"question-answering": MvpForQuestionAnswering,
......
......@@ -250,7 +250,6 @@ class NllbMoeModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMi
all_generative_model_classes = (NllbMoeForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"conversational": NllbMoeForConditionalGeneration,
"feature-extraction": NllbMoeModel,
"summarization": NllbMoeForConditionalGeneration,
"text2text-generation": NllbMoeForConditionalGeneration,
......
......@@ -246,7 +246,6 @@ class PegasusModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMi
all_generative_model_classes = (PegasusForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"conversational": PegasusForConditionalGeneration,
"feature-extraction": PegasusModel,
"summarization": PegasusForConditionalGeneration,
"text-generation": PegasusForCausalLM,
......
......@@ -182,7 +182,6 @@ class TFPegasusModelTest(TFModelTesterMixin, PipelineTesterMixin, unittest.TestC
all_generative_model_classes = (TFPegasusForConditionalGeneration,) if is_tf_available() else ()
pipeline_model_mapping = (
{
"conversational": TFPegasusForConditionalGeneration,
"feature-extraction": TFPegasusModel,
"summarization": TFPegasusForConditionalGeneration,
"text2text-generation": TFPegasusForConditionalGeneration,
......
......@@ -206,7 +206,6 @@ class PegasusXModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterM
all_generative_model_classes = (PegasusXForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"conversational": PegasusXForConditionalGeneration,
"feature-extraction": PegasusXModel,
"summarization": PegasusXForConditionalGeneration,
"text2text-generation": PegasusXForConditionalGeneration,
......
......@@ -227,7 +227,6 @@ class PLBartModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMix
all_generative_model_classes = (PLBartForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"conversational": PLBartForConditionalGeneration,
"feature-extraction": PLBartModel,
"summarization": PLBartForConditionalGeneration,
"text-classification": PLBartForSequenceClassification,
......
......@@ -891,7 +891,6 @@ class ProphetNetModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTeste
all_generative_model_classes = (ProphetNetForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"conversational": ProphetNetForConditionalGeneration,
"feature-extraction": ProphetNetModel,
"summarization": ProphetNetForConditionalGeneration,
"text-generation": ProphetNetForCausalLM,
......
......@@ -645,7 +645,6 @@ class SeamlessM4TModelWithTextInputTest(
pipeline_model_mapping = (
{
"automatic-speech-recognition": SeamlessM4TForSpeechToText,
"conversational": SeamlessM4TForTextToText,
"feature-extraction": SeamlessM4TModel,
"summarization": SeamlessM4TForTextToText,
"text-to-audio": SeamlessM4TForTextToSpeech,
......
......@@ -559,7 +559,6 @@ class SwitchTransformersModelTest(ModelTesterMixin, GenerationTesterMixin, Pipel
all_generative_model_classes = (SwitchTransformersForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"conversational": SwitchTransformersForConditionalGeneration,
"feature-extraction": SwitchTransformersModel,
"summarization": SwitchTransformersForConditionalGeneration,
"text2text-generation": SwitchTransformersForConditionalGeneration,
......
......@@ -558,7 +558,6 @@ class T5ModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin,
all_generative_model_classes = (T5ForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"conversational": T5ForConditionalGeneration,
"feature-extraction": T5Model,
"question-answering": T5ForQuestionAnswering,
"summarization": T5ForConditionalGeneration,
......@@ -889,10 +888,6 @@ class T5ModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin,
attn_weights = out[attn_name] if attn_name == attention_names[0] else out[attn_name][-1]
self.assertEqual(sum([w.sum().item() for w in attn_weights]), 0.0)
@unittest.skip("Does not support conversations.")
def test_pipeline_conversational(self):
pass
class T5EncoderOnlyModelTester:
def __init__(
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment