Unverified Commit ce36a6bd authored by Lintang Sutawika's avatar Lintang Sutawika Committed by GitHub
Browse files

max_length not used

Using `max_length=...` in --model_args somehow is not used. This is suppose to fix that.
parent 9d36354e
...@@ -1304,8 +1304,10 @@ class HFLM(TemplateLM): ...@@ -1304,8 +1304,10 @@ class HFLM(TemplateLM):
context_enc = context_enc.to(self.device) context_enc = context_enc.to(self.device)
attn_masks = attn_masks.to(self.device) attn_masks = attn_masks.to(self.device)
if "max_length" not in kwargs: if self.max_length is None:
kwargs["max_length"] = context_enc.shape[1] + max_gen_toks kwargs["max_length"] = context_enc.shape[1] + max_gen_toks
else:
kwargs["max_length"] = self.max_length
# perform batched generation # perform batched generation
cont = self._model_generate( cont = self._model_generate(
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment