Unverified Commit 18c27131 authored by yiakwy-xpu-ml-framework-team's avatar yiakwy-xpu-ml-framework-team Committed by GitHub
Browse files

[tools] add fp8 max/min constant in utils (#3959)

parent ccdd10c8
...@@ -14,6 +14,7 @@ ...@@ -14,6 +14,7 @@
"""Common utilities.""" """Common utilities."""
import base64 import base64
import builtins
import ctypes import ctypes
import dataclasses import dataclasses
import io import io
...@@ -72,12 +73,25 @@ logger = logging.getLogger(__name__) ...@@ -72,12 +73,25 @@ logger = logging.getLogger(__name__)
show_time_cost = False show_time_cost = False
time_infos = {} time_infos = {}
HIP_FP8_E4M3_FNUZ_MAX = 224.0
# https://pytorch.org/docs/stable/notes/hip.html#checking-for-hip # https://pytorch.org/docs/stable/notes/hip.html#checking-for-hip
def is_hip() -> bool: def is_hip() -> bool:
return torch.version.hip is not None return torch.version.hip is not None
if is_hip():
FP8_E4M3_MAX = HIP_FP8_E4M3_FNUZ_MAX
else:
FP8_E4M3_MAX = torch.finfo(torch.float8_e4m3fn).max
FP8_E4M3_MIN = -FP8_E4M3_MAX
builtins.FP8_E4M3_MAX = FP8_E4M3_MAX
builtins.FP8_E4M3_MIN = FP8_E4M3_MIN
def is_rocm() -> bool: def is_rocm() -> bool:
return torch.cuda.is_available() and torch.version.hip return torch.cuda.is_available() and torch.version.hip
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment