"vscode:/vscode.git/clone" did not exist on "e2858cefebaa13d8179d770d2910e625ae322174"
Unverified Commit 0ba2c589 authored by Ke Bao's avatar Ke Bao Committed by GitHub
Browse files

Remove cuda graph batch size adjustment for dp attention (#2484)

parent fccbfa37
...@@ -221,12 +221,10 @@ class ServerArgs: ...@@ -221,12 +221,10 @@ class ServerArgs:
if self.enable_dp_attention: if self.enable_dp_attention:
self.dp_size = self.tp_size self.dp_size = self.tp_size
self.chunked_prefill_size = self.chunked_prefill_size // 2 self.chunked_prefill_size = self.chunked_prefill_size // 2
self.cuda_graph_max_bs = min(self.cuda_graph_max_bs, 96)
self.schedule_conservativeness = self.schedule_conservativeness * 0.3 self.schedule_conservativeness = self.schedule_conservativeness * 0.3
self.disable_overlap_schedule = True self.disable_overlap_schedule = True
logger.warning( logger.warning(
f"DP attention is enabled. The chunked prefill size is adjusted to {self.chunked_prefill_size} to avoid MoE kernel issues. " f"DP attention is enabled. The chunked prefill size is adjusted to {self.chunked_prefill_size} to avoid MoE kernel issues. "
f"The CUDA graph max batch size is adjusted to {self.cuda_graph_max_bs}. "
f"The schedule conservativeness is adjusted to {self.schedule_conservativeness}. " f"The schedule conservativeness is adjusted to {self.schedule_conservativeness}. "
"Data parallel size is adjusted to be the same as tensor parallel size. " "Data parallel size is adjusted to be the same as tensor parallel size. "
"Overlap scheduler is disabled." "Overlap scheduler is disabled."
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment