Unverified Commit d81ac443 authored by HAI's avatar HAI Committed by GitHub
Browse files

MI30x: More graph captures for larger batch sizes and concurrencies (#3420)

parent 2491cc92
...@@ -33,6 +33,9 @@ from sglang.srt.model_executor.forward_batch_info import ( ...@@ -33,6 +33,9 @@ from sglang.srt.model_executor.forward_batch_info import (
ForwardBatch, ForwardBatch,
ForwardMode, ForwardMode,
) )
from sglang.srt.utils import is_hip
is_hip_ = is_hip()
if TYPE_CHECKING: if TYPE_CHECKING:
from sglang.srt.model_executor.model_runner import ModelRunner from sglang.srt.model_executor.model_runner import ModelRunner
...@@ -129,6 +132,8 @@ def get_batch_sizes_to_capture(model_runner: ModelRunner): ...@@ -129,6 +132,8 @@ def get_batch_sizes_to_capture(model_runner: ModelRunner):
if bs <= model_runner.req_to_token_pool.size if bs <= model_runner.req_to_token_pool.size
and bs <= server_args.cuda_graph_max_bs and bs <= server_args.cuda_graph_max_bs
] ]
if is_hip_:
capture_bs += [i * 8 for i in range(21, 33)]
compile_bs = ( compile_bs = (
[bs for bs in capture_bs if bs <= server_args.torch_compile_max_bs] [bs for bs in capture_bs if bs <= server_args.torch_compile_max_bs]
if server_args.enable_torch_compile if server_args.enable_torch_compile
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment