Unverified Commit 2976f69b authored by gakada's avatar gakada Committed by GitHub
Browse files

Use max_length in AutoSeq2SeqLM (#551)

parent 92929bd2
...@@ -351,7 +351,7 @@ class HuggingFaceAutoLM(BaseLM): ...@@ -351,7 +351,7 @@ class HuggingFaceAutoLM(BaseLM):
"""Return the maximum sequence length of the model. """Return the maximum sequence length of the model.
NOTE: Different model configurations have different max sequence length NOTE: Different model configurations have different max sequence length
attribute names. attribute names.
- n_positions: (CTRLConfig) - n_positions: (CTRLConfig, T5Config)
- max_position_embeddings: (BartConfig, RoFormerConfig) - max_position_embeddings: (BartConfig, RoFormerConfig)
- n_ctx: (GPT2Config) - n_ctx: (GPT2Config)
NOTE: For relative position encoded models you should specify the max NOTE: For relative position encoded models you should specify the max
...@@ -543,15 +543,6 @@ class AutoSeq2SeqLM(HuggingFaceAutoLM): ...@@ -543,15 +543,6 @@ class AutoSeq2SeqLM(HuggingFaceAutoLM):
AUTO_MODEL_CLASS = transformers.AutoModelForSeq2SeqLM AUTO_MODEL_CLASS = transformers.AutoModelForSeq2SeqLM
AUTO_PEFT_CLASS = peft.PeftModel AUTO_PEFT_CLASS = peft.PeftModel
@property
def max_length(self) -> int:
"""Return the maximum sequence length of the model.
TODO: Currently only works for relative position encoded Seq2Seq models.
"""
if self._max_length is not None:
return self._max_length
return self._DEFAULT_MAX_LENGTH
def loglikelihood( def loglikelihood(
self, requests: List[Tuple[str, str]] self, requests: List[Tuple[str, str]]
) -> List[Tuple[float, bool]]: ) -> List[Tuple[float, bool]]:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment