Unverified Commit 44cb0607 authored by Yineng Zhang's avatar Yineng Zhang Committed by GitHub
Browse files

chore: upgrade flashinfer 0.4.0 (#11364)

parent 88bb627d
......@@ -24,7 +24,7 @@ dependencies = [
"datasets",
"einops",
"fastapi",
"flashinfer_python==0.4.0rc3",
"flashinfer_python==0.4.0",
"hf_transfer",
"huggingface_hub",
"interegular",
......
......@@ -70,7 +70,7 @@ srt = [
"torchaudio==2.8.0",
"torchvision",
"cuda-python",
"flashinfer_python==0.4.0rc3",
"flashinfer_python==0.4.0",
]
# HIP (Heterogeneous-computing Interface for Portability) for AMD
......
......@@ -703,7 +703,7 @@ def _set_envs_and_config(server_args: ServerArgs):
if server_args.attention_backend == "flashinfer":
assert_pkg_version(
"flashinfer_python",
"0.4.0rc3",
"0.4.0",
"Please uninstall the old version and "
"reinstall the latest version by following the instructions "
"at https://docs.flashinfer.ai/installation.html.",
......
......@@ -1060,7 +1060,7 @@ def fast_mla_decode_plan(
try:
# Standard version with just the required arguments (no use_profiler)
self._cached_module.plan.default(
self._cached_module.plan(
self._float_workspace_buffer,
self._int_workspace_buffer,
self._pin_memory_int_workspace_buffer,
......
......@@ -74,3 +74,5 @@ fi
# Show current packages
$PIP_CMD list
python3 -c "import torch; print(torch.version.cuda)"
python3 -m flashinfer clear-cache
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment