Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
change
sglang
Commits
48afa8f1
Unverified
Commit
48afa8f1
authored
Aug 13, 2025
by
ronnie_zheng
Committed by
GitHub
Aug 12, 2025
Browse files
[feat] Enable Ascend profiling on SGLang (#8610)
Co-authored-by:
liyou_b
<
2953090824@qq.com
>
parent
2ecbd8b8
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
28 additions
and
8 deletions
+28
-8
python/sglang/srt/managers/scheduler_profiler_mixin.py
python/sglang/srt/managers/scheduler_profiler_mixin.py
+28
-8
No files found.
python/sglang/srt/managers/scheduler_profiler_mixin.py
View file @
48afa8f1
...
@@ -8,6 +8,18 @@ import torch
...
@@ -8,6 +8,18 @@ import torch
from
sglang.srt.managers.io_struct
import
ProfileReq
,
ProfileReqOutput
,
ProfileReqType
from
sglang.srt.managers.io_struct
import
ProfileReq
,
ProfileReqOutput
,
ProfileReqType
from
sglang.srt.model_executor.forward_batch_info
import
ForwardMode
from
sglang.srt.model_executor.forward_batch_info
import
ForwardMode
from
sglang.srt.utils
import
is_npu
_is_npu
=
is_npu
()
if
_is_npu
:
import
torch_npu
patches
=
[
[
"profiler.profile"
,
torch_npu
.
profiler
.
profile
],
[
"profiler.ProfilerActivity.CUDA"
,
torch_npu
.
profiler
.
ProfilerActivity
.
NPU
],
[
"profiler.ProfilerActivity.CPU"
,
torch_npu
.
profiler
.
ProfilerActivity
.
CPU
],
]
torch_npu
.
_apply_patches
(
patches
)
logger
=
logging
.
getLogger
(
__name__
)
logger
=
logging
.
getLogger
(
__name__
)
...
@@ -136,6 +148,13 @@ class SchedulerProfilerMixin:
...
@@ -136,6 +148,13 @@ class SchedulerProfilerMixin:
activities
=
torchprof_activities
,
activities
=
torchprof_activities
,
with_stack
=
with_stack
if
with_stack
is
not
None
else
True
,
with_stack
=
with_stack
if
with_stack
is
not
None
else
True
,
record_shapes
=
record_shapes
if
record_shapes
is
not
None
else
False
,
record_shapes
=
record_shapes
if
record_shapes
is
not
None
else
False
,
on_trace_ready
=
(
None
if
not
_is_npu
else
torch_npu
.
profiler
.
tensorboard_trace_handler
(
self
.
torch_profiler_output_dir
)
),
)
)
self
.
torch_profiler
.
start
()
self
.
torch_profiler
.
start
()
self
.
profile_in_progress
=
True
self
.
profile_in_progress
=
True
...
@@ -166,15 +185,16 @@ class SchedulerProfilerMixin:
...
@@ -166,15 +185,16 @@ class SchedulerProfilerMixin:
logger
.
info
(
"Stop profiling"
+
stage_suffix
+
"..."
)
logger
.
info
(
"Stop profiling"
+
stage_suffix
+
"..."
)
if
self
.
torch_profiler
is
not
None
:
if
self
.
torch_profiler
is
not
None
:
self
.
torch_profiler
.
stop
()
self
.
torch_profiler
.
stop
()
self
.
torch_profiler
.
export_chrome_trace
(
if
not
_is_npu
:
os
.
path
.
join
(
self
.
torch_profiler
.
export_chrome_trace
(
self
.
torch_profiler_output_dir
,
os
.
path
.
join
(
self
.
profile_id
self
.
torch_profiler_output_dir
,
+
f
"-TP-
{
self
.
tp_rank
}
"
self
.
profile_id
+
stage_suffix
+
f
"-TP-
{
self
.
tp_rank
}
"
+
".trace.json.gz"
,
+
stage_suffix
+
".trace.json.gz"
,
)
)
)
)
torch
.
distributed
.
barrier
(
self
.
tp_cpu_group
)
torch
.
distributed
.
barrier
(
self
.
tp_cpu_group
)
if
self
.
rpd_profiler
is
not
None
:
if
self
.
rpd_profiler
is
not
None
:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment