Unverified Commit 9a7e83e8 authored by tarinkk's avatar tarinkk Committed by GitHub
Browse files

Fix enable chunked prefill for Llama4 (#5575)

parent 417b44eb
......@@ -73,8 +73,11 @@ class ModelConfig:
)
if enable_multimodal is None:
if self.hf_config.architectures == "Llama4ForConditionalGeneration":
if self.hf_config.architectures[0] == "Llama4ForConditionalGeneration":
enable_multimodal = False
logger.info(
"Multimodal is disabled for Llama4. To enable it, set --enable-llama4-multimodal."
)
else:
enable_multimodal = True
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment