Unverified Commit cc771eca authored by Rich's avatar Rich Committed by GitHub
Browse files

Fix AttributeError in huggingface.py When 'model_type' is Missing (#1489)



* model_type attribute error

Getting attribute error when using a model without a 'model_type'

* fix w/ and w/out the 'model_type' specification

* use getattr(), also fix other config.model_type reference

* Update huggingface.py

---------
Co-authored-by: default avatarHailey Schoelkopf <65563625+haileyschoelkopf@users.noreply.github.com>
parent a08eb870
......@@ -250,7 +250,7 @@ class HFLM(TemplateLM):
elif self.tokenizer.eos_token:
self.tokenizer.pad_token_id = self.tokenizer.eos_token_id
else:
if self.config.model_type == "qwen":
if getattr(self.config, "model_type", None) == "qwen":
# Qwen's trust_remote_code tokenizer does not allow for adding special tokens
self.tokenizer.pad_token = "<|endoftext|>"
elif (
......@@ -268,11 +268,11 @@ class HFLM(TemplateLM):
# TODO: override this for Gemma
self.add_bos_token = add_bos_token
if self.config.model_type == "gemma":
if getattr(self.config, "model_type", None) == "gemma":
self.add_bos_token = True
eval_logger.info(
"Model is of type 'gemma', will use a BOS token as Gemma underperforms without it."
f"Model type is '{self.config.model_type}', a BOS token will be used as Gemma underperforms without it."
)
self.add_bos_token = True
self._max_length = max_length
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment