Unverified Commit 31f32b37 authored by Tim Moon's avatar Tim Moon Committed by GitHub
Browse files

Explicitly use `python3` and `pip3` executables (#1486)



* Explicitly use python3 and pip3
Signed-off-by: default avatarTim Moon <tmoon@nvidia.com>

* Run pre-commit as Python module
Signed-off-by: default avatarTim Moon <tmoon@nvidia.com>

* Replace some missed references to "python" or "pip"
Signed-off-by: default avatarTim Moon <tmoon@nvidia.com>

---------
Signed-off-by: default avatarTim Moon <tmoon@nvidia.com>
Signed-off-by: default avatarTim Moon <4406448+timmoon10@users.noreply.github.com>
parent 8487e506
......@@ -45,15 +45,15 @@ def _load_library():
"TransformerEngine package version mismatch. Found"
f" {module_name} v{version(module_name)}, transformer-engine"
f" v{version('transformer-engine')}, and transformer-engine-cu12"
f" v{version('transformer-engine-cu12')}. Install transformer-engine using 'pip install"
" transformer-engine[pytorch]==VERSION'"
f" v{version('transformer-engine-cu12')}. Install transformer-engine using "
"'pip3 install transformer-engine[pytorch]==VERSION'"
)
if is_package_installed("transformer-engine-cu12"):
if not is_package_installed(module_name):
_logger.info(
"Could not find package %s. Install transformer-engine using 'pip"
" install transformer-engine[pytorch]==VERSION'",
"Could not find package %s. Install transformer-engine using "
"'pip3 install transformer-engine[pytorch]==VERSION'",
module_name,
)
......
......@@ -142,7 +142,7 @@ except PackageNotFoundError:
if torch.cuda.is_available() and get_device_compute_capability() >= (8, 0) and _NVTE_FLASH_ATTN:
fa_logger.debug(
"flash-attn v2 is not installed. To use, please install it by"
""" "pip install flash-attn".""",
""" "pip3 install flash-attn".""",
)
else:
if torch.cuda.is_available() and get_device_compute_capability() >= (10, 0):
......@@ -197,8 +197,8 @@ _use_flash_attn_3 = False
# TODO(cyang): update FA to 2.7.3 when its FA3 compilation issue is resolved
# https://github.com/Dao-AILab/flash-attention/issues/1452
_flash_attn_3_installation_steps = """\
(1) pip install "git+https://github.com/Dao-AILab/flash-attention.git@v2.7.2#egg=flashattn-hopper&subdirectory=hopper"
(2) python_path=`python -c "import site; print(site.getsitepackages()[0])"`
(1) pip3 install "git+https://github.com/Dao-AILab/flash-attention.git@v2.7.2#egg=flashattn-hopper&subdirectory=hopper"
(2) python_path=`python3 -c "import site; print(site.getsitepackages()[0])"`
(3) mkdir -p $python_path/flashattn_hopper
(4) wget -P $python_path/flashattn_hopper https://raw.githubusercontent.com/Dao-AILab/flash-attention/v2.7.2/hopper/flash_attn_interface.py"""
try:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment