@@ -299,30 +279,6 @@ class OnnxConfigWithPast(OnnxConfig, ABC):
...
@@ -299,30 +279,6 @@ class OnnxConfigWithPast(OnnxConfig, ABC):
returnNone
returnNone
@property
defnum_layers(self)->int:
"""
The number of layers attribute retrieved from the model config. Override this for model configs where the
number of layers attribute is not called `num_layers`.
"""
ifnothasattr(self._config,"num_layers"):
raiseAttributeError(
"could not find the number of layers attribute in the model configuration, override the num_layers property of the model OnnxConfig to solve this"
)
returnself._config.num_layers
@property
defnum_attention_heads(self)->int:
"""
The number of attention heads attribute retrieved from the model config. Override this for model configs where
the number of attention heads attribute is not called `num_attention_heads`.
"""
ifnothasattr(self._config,"num_attention_heads"):
raiseAttributeError(
"could not find the number of attention heads attribute in the model configuration, override the num_attention_heads property of the model OnnxConfig to solve this"
)
returnself._config.num_attention_heads
defgenerate_dummy_inputs(
defgenerate_dummy_inputs(
self,
self,
tokenizer:PreTrainedTokenizer,
tokenizer:PreTrainedTokenizer,
...
@@ -331,217 +287,32 @@ class OnnxConfigWithPast(OnnxConfig, ABC):
...
@@ -331,217 +287,32 @@ class OnnxConfigWithPast(OnnxConfig, ABC):
is_pair:bool=False,
is_pair:bool=False,
framework:Optional[TensorType]=None,
framework:Optional[TensorType]=None,
)->Mapping[str,Any]:
)->Mapping[str,Any]:
# If dynamic axis (-1) we forward with a fixed dimension of 2 samples to avoid optimizations made by ONNX
# TODO: should we set seq_length = 1 when self.use_past = True?
"could not find the number of encoder and decoder layers attributes in the model configuration, override the num_layers property of the model OnnxConfig to solve this"
"could not find the number of attention heads for the encoder and the decoder attributes in the model configuration, override the num_attention_heads property of the model OnnxConfig to solve this"