Unverified Commit 3a9bcc3f authored by Baber Abbasi's avatar Baber Abbasi Committed by GitHub
Browse files

pacify pre-commit (#3268)

parent a35eb973
...@@ -76,7 +76,9 @@ class OptimumLM(HFLM): ...@@ -76,7 +76,9 @@ class OptimumLM(HFLM):
"PIPELINE_PARALLEL" "PIPELINE_PARALLEL"
) )
model_cls = OVModelForCausalLM if self.backend == "causal" else OVModelForSeq2SeqLM model_cls = (
OVModelForCausalLM if self.backend == "causal" else OVModelForSeq2SeqLM
)
self._model = model_cls.from_pretrained( self._model = model_cls.from_pretrained(
pretrained, pretrained,
revision=revision, revision=revision,
......
...@@ -66,7 +66,7 @@ if __name__ == "__main__": ...@@ -66,7 +66,7 @@ if __name__ == "__main__":
line = line.format(lang=lang_abbr) line = line.format(lang=lang_abbr)
if "{ans_regex}" in line: if "{ans_regex}" in line:
ans_regex = lang_lib_list[-1].replace( ans_regex = lang_lib_list[-1].replace(
"({})", "\(?([ABCDEFGHIJ])\)?" "({})", r"\(?([ABCDEFGHIJ])\)?"
) )
if lang_abbr == "en": if lang_abbr == "en":
ans_regex = ans_regex.lstrip("the").strip() ans_regex = ans_regex.lstrip("the").strip()
......
...@@ -11,9 +11,21 @@ from lm_eval.api.registry import get_model ...@@ -11,9 +11,21 @@ from lm_eval.api.registry import get_model
SUPPORTED_ARCHITECTURES_TASKS = [ SUPPORTED_ARCHITECTURES_TASKS = [
("causal", "facebook/opt-125m", "lambada_openai",), (
("causal", "hf-internal-testing/tiny-random-gpt2", "wikitext",), "causal",
("seq2seq", "hf-internal-testing/tiny-random-t5", "sst2",), "facebook/opt-125m",
"lambada_openai",
),
(
"causal",
"hf-internal-testing/tiny-random-gpt2",
"wikitext",
),
(
"seq2seq",
"hf-internal-testing/tiny-random-t5",
"sst2",
),
] ]
...@@ -21,9 +33,7 @@ SUPPORTED_ARCHITECTURES_TASKS = [ ...@@ -21,9 +33,7 @@ SUPPORTED_ARCHITECTURES_TASKS = [
def test_evaluator(backend, model_id, task): def test_evaluator(backend, model_id, task):
with tempfile.TemporaryDirectory() as tmpdirname: with tempfile.TemporaryDirectory() as tmpdirname:
model_cls = OVModelForCausalLM if backend == "causal" else OVModelForSeq2SeqLM model_cls = OVModelForCausalLM if backend == "causal" else OVModelForSeq2SeqLM
model = model_cls.from_pretrained( model = model_cls.from_pretrained(model_id, export=True, use_cache=True)
model_id, export=True, use_cache=True
)
model.save_pretrained(tmpdirname) model.save_pretrained(tmpdirname)
tokenizer = AutoTokenizer.from_pretrained(model_id) tokenizer = AutoTokenizer.from_pretrained(model_id)
tokenizer.save_pretrained(tmpdirname) tokenizer.save_pretrained(tmpdirname)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment