"tests/vscode:/vscode.git/clone" did not exist on "b133abb82e128bb091ffd1c299b427f3b3958cb3"
Unverified Commit 97fff98c authored by blzheng's avatar blzheng Committed by GitHub
Browse files

[CPU] Fix phi4-mm prompt issue in bench_serving (#9900)

parent ba066ca0
...@@ -995,17 +995,25 @@ def sample_mmmu_requests( ...@@ -995,17 +995,25 @@ def sample_mmmu_requests(
prompt = f"Question: {question}\n\nAnswer: " prompt = f"Question: {question}\n\nAnswer: "
if apply_chat_template: if apply_chat_template:
try: try:
prompt = tokenizer.apply_chat_template( is_phi4_multimodal = (
[ "phi-4-multimodal" in tokenizer.name_or_path.lower()
{ )
"role": "user", if is_phi4_multimodal:
"content": [ # <|endoftext10|> is the image token used in the phi-4-multimodal model.
content = prompt.replace("image 1", "<|endoftext10|>")
else:
content = [
{ {
"type": "image_url", "type": "image_url",
"image_url": {"url": image_data}, "image_url": {"url": image_data},
}, },
{"type": "text", "text": prompt}, {"type": "text", "text": prompt},
], ]
prompt = tokenizer.apply_chat_template(
[
{
"role": "user",
"content": content,
} }
], ],
add_generation_prompt=True, add_generation_prompt=True,
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment