Unverified Commit 9ade8e74 authored by Nicolas Patry's avatar Nicolas Patry Committed by GitHub
Browse files

Upgrading TFAutoModelWithLMHead to (#7730)

- TFAutoModelForCausalLM
- TFAutoModelForMaskedLM
- TFAutoModelForSeq2SeqLM

as per deprecation warning. No tests as it simply removes current
warnings from tests.
parent 62b5622e
...@@ -52,11 +52,11 @@ if is_tf_available(): ...@@ -52,11 +52,11 @@ if is_tf_available():
TF_MODEL_WITH_LM_HEAD_MAPPING, TF_MODEL_WITH_LM_HEAD_MAPPING,
TFAutoModel, TFAutoModel,
TFAutoModelForCausalLM, TFAutoModelForCausalLM,
TFAutoModelForMaskedLM,
TFAutoModelForQuestionAnswering, TFAutoModelForQuestionAnswering,
TFAutoModelForSeq2SeqLM, TFAutoModelForSeq2SeqLM,
TFAutoModelForSequenceClassification, TFAutoModelForSequenceClassification,
TFAutoModelForTokenClassification, TFAutoModelForTokenClassification,
TFAutoModelWithLMHead,
) )
if is_torch_available(): if is_torch_available():
...@@ -2577,31 +2577,31 @@ SUPPORTED_TASKS = { ...@@ -2577,31 +2577,31 @@ SUPPORTED_TASKS = {
}, },
"fill-mask": { "fill-mask": {
"impl": FillMaskPipeline, "impl": FillMaskPipeline,
"tf": TFAutoModelWithLMHead if is_tf_available() else None, "tf": TFAutoModelForMaskedLM if is_tf_available() else None,
"pt": AutoModelForMaskedLM if is_torch_available() else None, "pt": AutoModelForMaskedLM if is_torch_available() else None,
"default": {"model": {"pt": "distilroberta-base", "tf": "distilroberta-base"}}, "default": {"model": {"pt": "distilroberta-base", "tf": "distilroberta-base"}},
}, },
"summarization": { "summarization": {
"impl": SummarizationPipeline, "impl": SummarizationPipeline,
"tf": TFAutoModelWithLMHead if is_tf_available() else None, "tf": TFAutoModelForSeq2SeqLM if is_tf_available() else None,
"pt": AutoModelForSeq2SeqLM if is_torch_available() else None, "pt": AutoModelForSeq2SeqLM if is_torch_available() else None,
"default": {"model": {"pt": "sshleifer/distilbart-cnn-12-6", "tf": "t5-small"}}, "default": {"model": {"pt": "sshleifer/distilbart-cnn-12-6", "tf": "t5-small"}},
}, },
"translation_en_to_fr": { "translation_en_to_fr": {
"impl": TranslationPipeline, "impl": TranslationPipeline,
"tf": TFAutoModelWithLMHead if is_tf_available() else None, "tf": TFAutoModelForSeq2SeqLM if is_tf_available() else None,
"pt": AutoModelForSeq2SeqLM if is_torch_available() else None, "pt": AutoModelForSeq2SeqLM if is_torch_available() else None,
"default": {"model": {"pt": "t5-base", "tf": "t5-base"}}, "default": {"model": {"pt": "t5-base", "tf": "t5-base"}},
}, },
"translation_en_to_de": { "translation_en_to_de": {
"impl": TranslationPipeline, "impl": TranslationPipeline,
"tf": TFAutoModelWithLMHead if is_tf_available() else None, "tf": TFAutoModelForSeq2SeqLM if is_tf_available() else None,
"pt": AutoModelForSeq2SeqLM if is_torch_available() else None, "pt": AutoModelForSeq2SeqLM if is_torch_available() else None,
"default": {"model": {"pt": "t5-base", "tf": "t5-base"}}, "default": {"model": {"pt": "t5-base", "tf": "t5-base"}},
}, },
"translation_en_to_ro": { "translation_en_to_ro": {
"impl": TranslationPipeline, "impl": TranslationPipeline,
"tf": TFAutoModelWithLMHead if is_tf_available() else None, "tf": TFAutoModelForSeq2SeqLM if is_tf_available() else None,
"pt": AutoModelForSeq2SeqLM if is_torch_available() else None, "pt": AutoModelForSeq2SeqLM if is_torch_available() else None,
"default": {"model": {"pt": "t5-base", "tf": "t5-base"}}, "default": {"model": {"pt": "t5-base", "tf": "t5-base"}},
}, },
...@@ -2613,7 +2613,7 @@ SUPPORTED_TASKS = { ...@@ -2613,7 +2613,7 @@ SUPPORTED_TASKS = {
}, },
"text-generation": { "text-generation": {
"impl": TextGenerationPipeline, "impl": TextGenerationPipeline,
"tf": TFAutoModelWithLMHead if is_tf_available() else None, "tf": TFAutoModelForCausalLM if is_tf_available() else None,
"pt": AutoModelForCausalLM if is_torch_available() else None, "pt": AutoModelForCausalLM if is_torch_available() else None,
"default": {"model": {"pt": "gpt2", "tf": "gpt2"}}, "default": {"model": {"pt": "gpt2", "tf": "gpt2"}},
}, },
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment