Unverified Commit 5f2595be authored by Yineng Zhang's avatar Yineng Zhang Committed by GitHub
Browse files

hotfix: checking for HIP (#2485)

parent 0ba2c589
......@@ -19,7 +19,7 @@ from sglang.srt.layers.quantization.base_config import (
)
from sglang.srt.utils import set_weight_attrs
if torch.cuda.is_available() or torch.hip.is_available():
if torch.cuda.is_available():
from sglang.srt.layers.fused_moe_triton.fused_moe import fused_experts
else:
fused_experts = None # type: ignore
......
......@@ -92,7 +92,7 @@ def is_flashinfer_available():
"""
if not get_bool_env_var("SGLANG_IS_FLASHINFER_AVAILABLE", default="true"):
return False
return torch.cuda.is_available() and not is_hip()
return torch.cuda.is_available() and torch.version.cuda
def is_ipv6(address):
......@@ -1071,9 +1071,6 @@ def get_device_name(device_id: int = 0) -> str:
if hasattr(torch, "cuda") and torch.cuda.is_available():
return torch.cuda.get_device_name(device_id)
if hasattr(torch, "hip") and torch.hip.is_available():
return torch.hip.get_device_name(device_id)
if hasattr(torch, "xpu") and torch.xpu.is_available():
return torch.xpu.get_device_name(device_id)
......@@ -1086,9 +1083,6 @@ def get_device_capability(device_id: int = 0) -> Tuple[int, int]:
if hasattr(torch, "cuda") and torch.cuda.is_available():
major, minor = torch.cuda.get_device_capability(device_id)
if hasattr(torch, "hip") and torch.hip.is_available():
major, minor = torch.cuda.get_device_capability(device_id)
if hasattr(torch, "xpu") and torch.xpu.is_available():
major, minor, *_ = torch.xpu.get_device_capability(device_id)["version"].split(
"."
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment