Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
zhaoyu6
sglang
Commits
9465b668
"vscode:/vscode.git/clone" did not exist on "940ac9ed6f1291925429e9fb9f13fe3a5901c19f"
Unverified
Commit
9465b668
authored
Jun 24, 2024
by
Lianmin Zheng
Committed by
GitHub
Jun 24, 2024
Browse files
Allow running with vllm==0.4.3 (#561)
parent
05471f21
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
49 additions
and
8 deletions
+49
-8
python/sglang/srt/constrained/__init__.py
python/sglang/srt/constrained/__init__.py
+11
-5
python/sglang/srt/layers/fused_moe.py
python/sglang/srt/layers/fused_moe.py
+38
-3
No files found.
python/sglang/srt/constrained/__init__.py
View file @
9465b668
import
json
import
json
from
typing
import
Dict
,
Optional
,
Union
from
typing
import
Dict
,
Optional
,
Union
from
outlines.caching
import
cache
as
disk_cache
from
outlines.caching
import
disable_cache
from
outlines.fsm.guide
import
RegexGuide
from
outlines.fsm.regex
import
FSMInfo
,
make_byte_level_fsm
,
make_deterministic_fsm
from
outlines.models.transformers
import
TransformerTokenizer
from
pydantic
import
BaseModel
from
pydantic
import
BaseModel
try
:
from
outlines.caching
import
cache
as
disk_cache
from
outlines.fsm.guide
import
RegexGuide
from
outlines.caching
import
disable_cache
from
outlines.fsm.guide
import
RegexGuide
from
outlines.fsm.regex
import
FSMInfo
,
make_byte_level_fsm
,
make_deterministic_fsm
from
outlines.models.transformers
import
TransformerTokenizer
except
ImportError
as
e
:
print
(
f
'
\n
Error:
{
e
}
. Please install a new version of outlines by `pip install "outlines>=0.0.44"`
\n
'
)
raise
try
:
try
:
from
outlines.fsm.json_schema
import
build_regex_from_object
from
outlines.fsm.json_schema
import
build_regex_from_object
except
ImportError
:
except
ImportError
:
...
...
python/sglang/srt/layers/fused_moe.py
View file @
9465b668
...
@@ -512,8 +512,13 @@ def fused_moe(
...
@@ -512,8 +512,13 @@ def fused_moe(
# Check constraints.
# Check constraints.
assert
gating_output
.
shape
[
1
]
==
w1
.
shape
[
0
],
"Number of experts mismatch"
assert
gating_output
.
shape
[
1
]
==
w1
.
shape
[
0
],
"Number of experts mismatch"
topk_weights
,
topk_ids
=
fused_topk
(
hidden_states
,
gating_output
,
topk
,
if
hasattr
(
ops
,
"topk_softmax"
):
renormalize
)
topk_weights
,
topk_ids
=
fused_topk
(
hidden_states
,
gating_output
,
topk
,
renormalize
)
else
:
topk_weights
,
topk_ids
=
fused_topk_v0_4_3
(
hidden_states
,
gating_output
,
topk
,
renormalize
)
return
fused_experts
(
hidden_states
,
return
fused_experts
(
hidden_states
,
w1
,
w1
,
w2
,
w2
,
...
@@ -525,4 +530,34 @@ def fused_moe(
...
@@ -525,4 +530,34 @@ def fused_moe(
w1_scale
=
w1_scale
,
w1_scale
=
w1_scale
,
w2_scale
=
w2_scale
,
w2_scale
=
w2_scale
,
a1_scale
=
a1_scale
,
a1_scale
=
a1_scale
,
a2_scale
=
a2_scale
)
a2_scale
=
a2_scale
)
\ No newline at end of file
def
fused_topk_v0_4_3
(
hidden_states
:
torch
.
Tensor
,
gating_output
:
torch
.
Tensor
,
topk
:
int
,
renormalize
:
bool
,
):
import
vllm._moe_C
as
moe_kernels
M
,
_
=
hidden_states
.
shape
topk_weights
=
torch
.
empty
(
M
,
topk
,
dtype
=
torch
.
float32
,
device
=
hidden_states
.
device
)
topk_ids
=
torch
.
empty
(
M
,
topk
,
dtype
=
torch
.
int32
,
device
=
hidden_states
.
device
)
token_expert_indicies
=
torch
.
empty
(
M
,
topk
,
dtype
=
torch
.
int32
,
device
=
hidden_states
.
device
)
moe_kernels
.
topk_softmax
(
topk_weights
,
topk_ids
,
token_expert_indicies
,
gating_output
.
float
(),
# TODO(woosuk): Optimize this.
)
del
token_expert_indicies
# Not used. Will be used in the future.
if
renormalize
:
topk_weights
=
topk_weights
/
topk_weights
.
sum
(
dim
=-
1
,
keepdim
=
True
)
return
topk_weights
,
topk_ids
\ No newline at end of file
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment