Unverified Commit f8eaaab8 authored by miter's avatar miter Committed by GitHub
Browse files

[fix] logical_to_all_physical_map index 256 is out of bounds in EP parallel. (#6767)


Signed-off-by: default avatarmiter <miterv@outlook.com>
parent 697b0f71
...@@ -1714,21 +1714,33 @@ class DeepseekV2ForCausalLM(nn.Module): ...@@ -1714,21 +1714,33 @@ class DeepseekV2ForCausalLM(nn.Module):
or self.config.n_routed_experts != 256 or self.config.n_routed_experts != 256
): ):
self.num_fused_shared_experts = 0 self.num_fused_shared_experts = 0
global_server_args_dict["disable_shared_experts_fusion"] = 1 global_server_args_dict["disable_shared_experts_fusion"] = True
log_info_on_rank0( log_info_on_rank0(
logger, logger,
"Only Deepseek V3/R1 on NV-platform can use shared experts fusion optimization. Shared experts fusion optimization is disabled.", "Only Deepseek V3/R1 on NV-platform can use shared experts fusion optimization. Shared experts fusion optimization is disabled.",
) )
elif (global_server_args_dict["enable_deepep_moe"] or global_server_args_dict["enable_ep_moe"]):
self.num_fused_shared_experts = 0
global_server_args_dict["disable_shared_experts_fusion"] = True
log_info_on_rank0(
logger,
"Deepseek V3/R1 can not use shared experts fusion optimization when in deepep_moe or ep_moe mode. Shared experts fusion optimization is disabled.",
)
elif self.num_fused_shared_experts == 0: elif self.num_fused_shared_experts == 0:
if ( if (
_is_cuda _is_cuda
and torch.cuda.get_device_capability("cuda") >= (9, 0) and torch.cuda.get_device_capability("cuda") >= (9, 0)
and self.config.architectures[0] == architecture and self.config.architectures[0] == architecture
and self.config.n_routed_experts == 256 and self.config.n_routed_experts == 256
and (not global_server_args_dict["enable_deepep_moe"]) and (
not (
global_server_args_dict["enable_deepep_moe"]
or global_server_args_dict["enable_ep_moe"]
)
)
): ):
self.num_fused_shared_experts = self.config.n_shared_experts self.num_fused_shared_experts = self.config.n_shared_experts
global_server_args_dict["disable_shared_experts_fusion"] = 0 global_server_args_dict["disable_shared_experts_fusion"] = False
log_info_on_rank0( log_info_on_rank0(
logger, logger,
"Deepseek V3/R1 with fp8 can use shared experts fusion optimization when SM version >=90. Shared experts fusion optimization is enabled.", "Deepseek V3/R1 with fp8 can use shared experts fusion optimization when SM version >=90. Shared experts fusion optimization is enabled.",
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment