"vscode:/vscode.git/clone" did not exist on "c9693db2fcd6876bfc4b00dd9088808896fff94c"
Unverified Commit 6775b211 authored by lewtun's avatar lewtun Committed by GitHub
Browse files

Remove Longformers from ONNX-supported models (#15273)

parent 7a1412e1
......@@ -43,7 +43,7 @@ and are designed to be easily extendable to other architectures.
Ready-made configurations include the following architectures:
<!--This table is automatically generated by make style, do not fill manually!-->
<!--This table is automatically generated by `make fix-copies`, do not fill manually!-->
- ALBERT
- BART
......@@ -53,7 +53,6 @@ Ready-made configurations include the following architectures:
- GPT Neo
- I-BERT
- LayoutLM
- Longformer
- Marian
- mBART
- OpenAI GPT-2
......
......@@ -13,10 +13,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
""" Longformer configuration"""
from collections import OrderedDict
from typing import List, Mapping, Union
from typing import List, Union
from ...onnx import OnnxConfig
from ...utils import logging
from ..roberta.configuration_roberta import RobertaConfig
......@@ -69,18 +67,3 @@ class LongformerConfig(RobertaConfig):
def __init__(self, attention_window: Union[List[int], int] = 512, sep_token_id: int = 2, **kwargs):
super().__init__(sep_token_id=sep_token_id, **kwargs)
self.attention_window = attention_window
class LongformerOnnxConfig(OnnxConfig):
@property
def inputs(self) -> Mapping[str, Mapping[int, str]]:
return OrderedDict(
[
("input_ids", {0: "batch", 1: "sequence"}),
("attention_mask", {0: "batch", 1: "sequence"}),
]
)
@property
def outputs(self) -> Mapping[str, Mapping[int, str]]:
return OrderedDict([("last_hidden_state", {0: "batch", 1: "sequence"}), ("pooler_output", {0: "batch"})])
......@@ -11,7 +11,6 @@ from ..models.gpt2 import GPT2OnnxConfig
from ..models.gpt_neo import GPTNeoOnnxConfig
from ..models.ibert import IBertOnnxConfig
from ..models.layoutlm import LayoutLMOnnxConfig
from ..models.longformer import LongformerOnnxConfig
from ..models.marian import MarianOnnxConfig
from ..models.mbart import MBartOnnxConfig
from ..models.roberta import RobertaOnnxConfig
......@@ -154,15 +153,6 @@ class FeaturesManager:
"question-answering",
onnx_config_cls=DistilBertOnnxConfig,
),
"longformer": supported_features_mapping(
"default",
"masked-lm",
"sequence-classification",
# "multiple-choice",
"token-classification",
"question-answering",
onnx_config_cls=LongformerOnnxConfig,
),
"marian": supported_features_mapping(
"default",
"default-with-past",
......
......@@ -174,7 +174,6 @@ PYTORCH_EXPORT_MODELS = {
("ibert", "kssteven/ibert-roberta-base"),
("camembert", "camembert-base"),
("distilbert", "distilbert-base-cased"),
# ("longFormer", "longformer-base-4096"),
("roberta", "roberta-base"),
("xlm-roberta", "xlm-roberta-base"),
("layoutlm", "microsoft/layoutlm-base-uncased"),
......
......@@ -210,7 +210,7 @@ def check_onnx_model_list(overwrite=False):
"""Check the model list in the serialization.mdx is consistent with the state of the lib and maybe `overwrite`."""
current_list, start_index, end_index, lines = _find_text_in_file(
filename=os.path.join(PATH_TO_DOCS, "serialization.mdx"),
start_prompt="<!--This table is automatically generated by make style, do not fill manually!-->",
start_prompt="<!--This table is automatically generated by `make fix-copies`, do not fill manually!-->",
end_prompt="The ONNX conversion is supported for the PyTorch versions of the models.",
)
new_list = get_onnx_model_list()
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment