Unverified Commit ed858f53 authored by Druhin Abrol's avatar Druhin Abrol Committed by GitHub
Browse files

Removed XLMModel inheritance from FlaubertModel(torch+tf) (#19432)



* FlaubertModel inheritance from XLMModel removed

* Fix style and add FlaubertPreTrainedModel to __init__

* Fix formatting issue

* Fix Typo and repo-consistency

* Fix style

* add FlaubertPreTrainedModel to TYPE_HINT

* fix repo consistency

* Update src/transformers/models/flaubert/modeling_flaubert.py
Co-authored-by: default avatarSylvain Gugger <35901082+sgugger@users.noreply.github.com>

* Update src/transformers/models/flaubert/modeling_flaubert.py
Co-authored-by: default avatarSylvain Gugger <35901082+sgugger@users.noreply.github.com>

* Update src/transformers/models/flaubert/modeling_flaubert.py
Co-authored-by: default avatarSylvain Gugger <35901082+sgugger@users.noreply.github.com>

* Update src/transformers/models/flaubert/modeling_flaubert.py
Co-authored-by: default avatarSylvain Gugger <35901082+sgugger@users.noreply.github.com>

* Update src/transformers/models/flaubert/modeling_tf_flaubert.py
Co-authored-by: default avatarSylvain Gugger <35901082+sgugger@users.noreply.github.com>

* Update src/transformers/models/flaubert/modeling_flaubert.py
Co-authored-by: default avatarSylvain Gugger <35901082+sgugger@users.noreply.github.com>

* Update src/transformers/models/flaubert/modeling_tf_flaubert.py
Co-authored-by: default avatarSylvain Gugger <35901082+sgugger@users.noreply.github.com>

* Update src/transformers/models/flaubert/modeling_flaubert.py
Co-authored-by: default avatarSylvain Gugger <35901082+sgugger@users.noreply.github.com>

* removed redundant Copied from comments

* added missing copied from comments
Co-authored-by: default avatarSylvain Gugger <35901082+sgugger@users.noreply.github.com>
parent 5fda1fbd
...@@ -1273,6 +1273,7 @@ else: ...@@ -1273,6 +1273,7 @@ else:
"FlaubertForTokenClassification", "FlaubertForTokenClassification",
"FlaubertModel", "FlaubertModel",
"FlaubertWithLMHeadModel", "FlaubertWithLMHeadModel",
"FlaubertPreTrainedModel",
] ]
) )
_import_structure["models.flava"].extend( _import_structure["models.flava"].extend(
...@@ -4141,6 +4142,7 @@ if TYPE_CHECKING: ...@@ -4141,6 +4142,7 @@ if TYPE_CHECKING:
FlaubertForSequenceClassification, FlaubertForSequenceClassification,
FlaubertForTokenClassification, FlaubertForTokenClassification,
FlaubertModel, FlaubertModel,
FlaubertPreTrainedModel,
FlaubertWithLMHeadModel, FlaubertWithLMHeadModel,
) )
from .models.flava import ( from .models.flava import (
......
...@@ -41,6 +41,7 @@ else: ...@@ -41,6 +41,7 @@ else:
"FlaubertForTokenClassification", "FlaubertForTokenClassification",
"FlaubertModel", "FlaubertModel",
"FlaubertWithLMHeadModel", "FlaubertWithLMHeadModel",
"FlaubertPreTrainedModel",
] ]
try: try:
...@@ -79,6 +80,7 @@ if TYPE_CHECKING: ...@@ -79,6 +80,7 @@ if TYPE_CHECKING:
FlaubertForSequenceClassification, FlaubertForSequenceClassification,
FlaubertForTokenClassification, FlaubertForTokenClassification,
FlaubertModel, FlaubertModel,
FlaubertPreTrainedModel,
FlaubertWithLMHeadModel, FlaubertWithLMHeadModel,
) )
......
...@@ -2065,6 +2065,13 @@ class FlaubertModel(metaclass=DummyObject): ...@@ -2065,6 +2065,13 @@ class FlaubertModel(metaclass=DummyObject):
requires_backends(self, ["torch"]) requires_backends(self, ["torch"])
class FlaubertPreTrainedModel(metaclass=DummyObject):
_backends = ["torch"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])
class FlaubertWithLMHeadModel(metaclass=DummyObject): class FlaubertWithLMHeadModel(metaclass=DummyObject):
_backends = ["torch"] _backends = ["torch"]
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment