Commit 4ee812a0 authored by Baber's avatar Baber
Browse files

nit

parent 21fce77a
...@@ -21,13 +21,13 @@ class RWKVWRAPPER(HFLM): ...@@ -21,13 +21,13 @@ class RWKVWRAPPER(HFLM):
self.is_hf = is_hf or (True if pretrained.endswith("hf") else False) self.is_hf = is_hf or (True if pretrained.endswith("hf") else False)
assert kwargs["tokenizer"] is not None, "`tokenizer` is required" assert kwargs["tokenizer"] is not None, "`tokenizer` is required"
assert kwargs["batch_size"] in [1, "1"], "`batch_size` must be 1" assert kwargs["batch_size"] in [1, "1"], "`batch_size` must be 1"
self.tokenizer = kwargs["tokenizer"] tokenizer = kwargs.pop("tokenizer")
self.pretrained = pretrained pretrained = pretrained
super().__init__( super().__init__(
pretrained=pretrained, pretrained=pretrained,
# set appropriate defaults for tokenizer, max length, etc # set appropriate defaults for tokenizer, max length, etc
backend=kwargs.pop("backend", "causal"), backend=kwargs.pop("backend", "causal"),
tokenizer=self.tokenizer, tokenizer=tokenizer,
max_length=kwargs.pop("max_length", 4096), max_length=kwargs.pop("max_length", 4096),
**kwargs, **kwargs,
) )
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment