"git@developer.sourcefind.cn:gaoqiong/migraphx.git" did not exist on "cb01e2808e687042e572922bffe325b870ade470"
Commit 047dde8c authored by daniel-furman's avatar daniel-furman
Browse files

llama test

parent b6c75ed1
...@@ -673,18 +673,21 @@ class HFLM(LM): ...@@ -673,18 +673,21 @@ class HFLM(LM):
chat = [ chat = [
{"role": "system", "content": "You are a helpful, respectful and honest assistant."}, {"role": "system", "content": "You are a helpful, respectful and honest assistant."},
{"role": "user", "content": context}, {"role": "user", "content": context},
{"role": "assistant", "content": continuation},
] ]
context = self.tokenizer.apply_chat_template( single_tokenized_conversation = self.tokenizer.apply_chat_template(
chat, chat,
tokenize=False, tokenize=False,
add_generation_prompt=True, add_generation_prompt=True,
) )
rfind_continuation = single_tokenized_conversation.rfind(continuation)
context = single_tokenized_conversation[:rfind_continuation]
continuation = single_tokenized_conversation[rfind_continuation:]
req.args = (context, continuation) req.args = (context, continuation)
new_reqs.append(req) new_reqs.append(req)
return new_reqs return new_reqs
def _model_call(self, inps, attn_mask=None, labels=None): def _model_call(self, inps, attn_mask=None, labels=None):
""" """
:param inps: torch.Tensor :param inps: torch.Tensor
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment