Commit 49f43f9f authored by daniel-furman's avatar daniel-furman
Browse files

first stab at wrap_chat_template, various

parent 2d3c835c
......@@ -734,6 +734,14 @@ class HFLM(LM):
stopping_criteria = stop_sequences_criteria(
self.tokenizer, stop, 1, context.shape[0]
)
print(self.model.generate(
input_ids=context,
max_length=max_length,
stopping_criteria=stopping_criteria,
pad_token_id=self.tokenizer.pad_token_id,
use_cache=True,
**generation_kwargs,
))
return self.model.generate(
input_ids=context,
max_length=max_length,
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment