Unverified Commit b177c82c authored by Linsong Chu's avatar Linsong Chu Committed by GitHub
Browse files

fix duplicated kwargs in some model init (#1495)

parent cc771eca
......@@ -56,9 +56,9 @@ class MambaLMWrapper(HFLM):
super().__init__(
pretrained=pretrained,
# set appropriate defaults for tokenizer, max length, etc
backend=kwargs.get("backend", "causal"),
tokenizer=kwargs.get("tokenizer", "EleutherAI/gpt-neox-20b"),
max_length=kwargs.get("max_length", 2048),
backend=kwargs.pop("backend", "causal"),
tokenizer=kwargs.pop("tokenizer", "EleutherAI/gpt-neox-20b"),
max_length=kwargs.pop("max_length", 2048),
**kwargs,
)
......
......@@ -28,7 +28,7 @@ class OptimumLM(HFLM):
super().__init__(
device=self.openvino_device,
backend=kwargs.get("backend", "causal"),
backend=kwargs.pop("backend", "causal"),
**kwargs,
)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment