Unverified Commit d89ba5b8 authored by Yichen Yan's avatar Yichen Yan Committed by GitHub
Browse files

[Build] Migrate to scikit-build-core (#939)



* cleanup

* init

* build first wheel that may not work

* build cython ext

* fix tvm build

* use sabi

* update rpath to support auditwheel

* pass editible build

* update ci

* fix warnings

* do not use ccache in self host runner

* test local uv cache

* test pip index

* update lib search to respect new lib location

* fix

* update ci

* enable cuda by default

* update src map

* fix

* fix

* fix

* Generate version with backend and git information at build time

* copy tvm_cython to wheels

* fix tvm lib search

* fmt

* remove unused

* auto detect ccache

* add back backend-related files

* remove jit cython adaptor to simplify code

* fmt

* fix ci

* ci fix 2

* ci fix 3

* workaround metal

* ci fix 4

* fmt

* fmt

* Revert "ci fix 4"

This reverts commit d1de8291c3e40927955f3ad3cf87a75c78813676.

* tmp

* fix metal

* trivial cleanup

* add detailed build-time version for cuda

* add back mlc

* Restore wheel info and other trivial updates

* update

* fix cuda

* upd

* fix metal ci

* test for ga build

* test for nvidia/cuda

* test ubuntu 20

* fix

* fix

* Do not use `uv build`

* fix

* fix

* log toolchain version

* merge wheel

* update

* debug

* fix

* update

* skip rocm

* update artifacts each

* fix

* fix

* add mac

* fix cache

* fix cache

* fix cache

* reset and add comment

* upd

* fix git version

* update deps

* trivial update

* use in-tree build dir and install to src to speedup editable build

* Revert "use in-tree build dir and install to src to speedup editable build"

This reverts commit 6ab87b05c5eed811210136b8dca4fc3677dd51f2.

* add build-dir

* update docs

* remove old scrips

* [1/n] cleanup scripts

* [Lint]: [pre-commit.ci] auto fixes [...]

* fix and update

* wait for tvm fix

* revert some tmp fix

* fix

* fix

* spell

* doc update

* test cibuildwheel

* fix and test macos on ci

* Update .github/workflows/dist.yml
Co-authored-by: default avatarXuehai Pan <XuehaiPan@outlook.com>

* fix

* test ga event

* cleanup

* bump tvm to support api3

* test final version

* add cron

* Update .github/workflows/dist.yml
Co-authored-by: default avatarXuehai Pan <XuehaiPan@outlook.com>

* fix

* test ccache for metal cibuildwheel

* test newer macos

* finish

---------
Co-authored-by: default avatarpre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: default avatarXuehai Pan <XuehaiPan@outlook.com>
parent bab57f23
multi_python_version=("3.8" "3.9" "3.10" "3.11" "3.12")
for python_version in "${multi_python_version[@]}"; do
echo "Installing Python ${python_version}..."
apt-get install -y python${python_version}
done
pip install -r requirements-build.txt
# if dist and build directories exist, remove them
if [ -d dist ]; then
rm -r dist
fi
# Build source distribution (disabled for now)
# python setup.py sdist --formats=gztar,zip
# Build wheels for different Python versions
echo "Building wheels for multiple Python versions..."
tox -e py38-pypi,py39-pypi,py310-pypi,py311-pypi,py312-pypi
if [ $? -ne 0 ]; then
echo "Error: Failed to build the wheels."
exit 1
else
echo "Wheels built successfully."
fi
\ No newline at end of file
[project]
name = "tilelang"
authors = [{name = "Tile-AI"}]
maintainers = [{name = "Lei Wang", email = "leiwang1999@outlook.com"}]
description = "A tile level programming language to generate high performance code."
readme.file = "README.md"
license = "MIT"
keywords = ["BLAS", "CUDA", "HIP", "Code Generation", "TVM"]
classifiers = [
"Environment :: GPU",
"Operating System :: POSIX :: Linux",
"Operating System :: OS Independent",
"Operating System :: MacOS",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"Scientific/Engineering :: Artificial Intelligence",
]
readme.content-type = "text/markdown"
requires-python = ">=3.8"
dynamic = ["version"]
# Somehow this does not work, hard-code for now
# dynamic = ["version", "dependencies"]
# [tool.setuptools.dynamic]
# dependencies = {file = ["requirements.txt"]}
dependencies = [
"numpy>=1.23.5",
"tqdm>=4.62.3",
"typing_extensions>=4.10.0",
"cloudpickle",
"ml_dtypes",
"psutil",
"torch",
]
[project.optional-dependencies]
# mldtypes should be greater than 0.5.1
# if you want to enable fp4
fp4 = ["ml_dtypes>=0.5.1"]
[build-system]
requires = [
"build",
"cmake>=3.26",
"packaging",
"setuptools>=61",
"wheel",
"patchelf",
"setuptools>=63",
"Cython>=3.0.0",
"scikit-build-core",
]
build-backend = "setuptools.build_meta"
build-backend = "scikit_build_core.build"
[tool.scikit-build]
wheel.py-api = "cp38"
cmake.version = ">=3.26.1"
build-dir = "build"
# editable.rebuild = true
# Include backend and git info in version
metadata.version.provider = "version_provider"
metadata.version.provider-path = "."
experimental = true
[tool.scikit-build.wheel.packages]
tilelang = "tilelang"
"tilelang/src" = "src"
"tilelang/3rdparty" = "3rdparty"
# TODO: we might want to not include these in wheel?
"tilelang/benchmark" = "benchmark"
"tilelang/examples" = "examples"
"tilelang/testing" = "testing"
[tool.yapf]
based_on_style = "yapf"
......@@ -67,3 +132,50 @@ ignore = [
[tool.ruff.lint.per-file-ignores]
"3rdparty/**/*" = ["ALL"]
"examples/deepseek_v32/inference/**/*" = ["ALL"]
[tool.cibuildwheel]
archs = ["auto64"]
# wait for tvm fix
build = "cp38-*"
[tool.cibuildwheel.macos]
archs = ["arm64"]
[tool.cibuildwheel.linux]
# Pin to glibc 2.17 for x86 and 2.28 for aarch64 for now
manylinux-x86_64-image = "manylinux2014"
manylinux-aarch64-image = "manylinux_2_28"
skip = "*-musllinux*"
environment-pass = ["CUDA_VERSION"]
repair-wheel-command = [
"auditwheel repair --exclude libcuda.so.1 --exclude /usr/local/cuda\\* -w {dest_dir} {wheel}",
"pipx run abi3audit --strict --report {wheel}",
]
# Install CUDA runtime and stub driver library
# manylinux_2_28 uses gcc 14, which needs CUDA 12.8
before-all = """
set -eux
case "$(uname -m)" in
"x86_64")
yum-config-manager --add-repo https://developer.download.nvidia.cn/compute/cuda/repos/rhel7/x86_64/cuda-rhel7.repo
;;
"aarch64")
dnf config-manager --add-repo https://developer.download.nvidia.com/compute/cuda/repos/rhel8/sbsa/cuda-rhel8.repo
;;
*)
exit 1
;;
esac
# Assume CUDA_VERSION=xx.y
v=${CUDA_VERSION:-12.4}
v=${v:0:4}
v=${v/./-}
yum install -y cuda-minimal-build-${v} cuda-driver-devel-${v} cuda-nvrtc-devel-${v}
"""
[tool.cibuildwheel.linux.environment]
# Equlivant to `source /opt/rh/gcc-toolset-12/enable`, safe when gcc-toolset-12 is not installed
PATH = "/usr/local/cuda/bin:$PATH"
# Should be mirrored in pyproject.toml
Cython>=3.0.0
build
cmake>=3.26
packaging
setuptools>=61
torch
wheel
tox
auditwheel
patchelf
ninja
# lint requirements
-r requirements-lint.txt
# build requirements
# Requirements to run local build with `--no-build-isolation` or other developments
Cython>=3.0.0
build
cmake>=3.26
# runtime requirements
cffi
cpplint
Cython
docutils
dtlib
numpy>=1.23.5
pytest>=6.2.4
pytest_xdist>=2.2.1
packaging>=21.0
PyYAML
tqdm>=4.62.3
typing_extensions>=4.10.0
requests
cloudpickle
ml_dtypes
psutil
scipy
packaging
setuptools>=61
torch
tabulate
wheel
setuptools
\ No newline at end of file
tox
ninja
auditwheel; platform_system == 'Linux'
patchelf; platform_system == 'Linux'
delocate; platform_system == 'Darwin'
# runtime requirements
Cython>=3.0.0
numpy>=1.23.5
tqdm>=4.62.3
typing_extensions>=4.10.0
cloudpickle
# mldtypes should be greater than 0.5.1
# if you want to enable fp4
ml_dtypes
psutil
torch
import fcntl
import functools
import hashlib
import io
import subprocess
import shutil
from setuptools import setup, find_packages, Extension
from setuptools.command.build_py import build_py
from setuptools.command.sdist import sdist
from typing import List, Optional
import re
import tarfile
from io import BytesIO
from pathlib import Path
import os
import sys
import site
import sysconfig
import urllib.request
from packaging.version import Version
import platform
import multiprocessing
from setuptools.command.build_ext import build_ext
import importlib
import logging
# Configure logging with basic settings
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
logger = logging.getLogger(__name__)
def _read_bool_env(name: str, default: bool = False) -> bool:
if env := os.environ.get(name):
env = env.lower()
if env in ['on', '1', 'true']:
return True
elif env in ['', 'off', '0', 'false']:
return False
return default
# Environment variables False/True
PYPI_BUILD = _read_bool_env('PYPI_BUILD')
PACKAGE_NAME = "tilelang"
ROOT_DIR = os.path.dirname(__file__)
CYCACHE = Path(os.path.join(ROOT_DIR, "tilelang", "jit", "adapter", "cython", ".cycache"))
if not CYCACHE.exists():
# tvm may needs this, we won't always build cython backend so mkdir here.
CYCACHE.mkdir(exist_ok=True)
IS_LINUX = platform.system() == 'Linux'
MAYBE_METAL = platform.mac_ver()[2] == 'arm64'
# Add LLVM control environment variable
USE_LLVM = _read_bool_env('USE_LLVM')
# Add ROCM control environment variable
USE_ROCM = _read_bool_env("USE_ROCM")
# Add ROCM control environment variable
USE_METAL = _read_bool_env("USE_METAL", MAYBE_METAL)
# Add ROCM control environment variable
USE_CUDA = _read_bool_env("USE_CUDA", IS_LINUX and not USE_ROCM)
# Build with Debug mode
DEBUG_MODE = _read_bool_env('DEBUG_MODE')
# Include commit ID in wheel filename and package metadata
WITH_COMMITID = _read_bool_env("WITH_COMMITID")
TVM_PREBUILD_ITEMS = [
"libtvm_runtime.so",
"libtvm.so",
"libtilelang.so",
"libtilelang_module.so",
] if IS_LINUX else [
"libtvm_runtime.dylib",
"libtvm.dylib",
"libtilelang.dylib",
"libtilelang_module.dylib",
]
# from tvm's internal cython?
TVM_PREBUILD_ITEMS_TO_DELETE = [] if IS_LINUX else [
'libtvm_runtime.dylib.dSYM',
'libtvm.dylib.dSYM',
]
def load_module_from_path(module_name, path):
spec = importlib.util.spec_from_file_location(module_name, path)
module = importlib.util.module_from_spec(spec)
sys.modules[module_name] = module
spec.loader.exec_module(module)
return module
envs = load_module_from_path('env', os.path.join(ROOT_DIR, PACKAGE_NAME, 'env.py'))
CUDA_HOME = envs.CUDA_HOME
ROCM_HOME = envs.ROCM_HOME
# Check if both CUDA and ROCM are enabled
if USE_ROCM and not ROCM_HOME:
raise ValueError(
"ROCM support is enabled (USE_ROCM=True) but ROCM_HOME is not set or detected.")
if USE_CUDA and not CUDA_HOME:
raise ValueError(
"CUDA support is enabled by default on linux if `USE_ROCM=False`," \
" but CUDA_HOME is not set or detected.")
# Ensure one of CUDA or ROCM is available
if IS_LINUX and not (CUDA_HOME or ROCM_HOME):
raise ValueError(
"Failed to automatically detect CUDA or ROCM installation. Please set the CUDA_HOME or ROCM_HOME environment variable manually (e.g., export CUDA_HOME=/usr/local/cuda or export ROCM_HOME=/opt/rocm)."
)
def get_path(*filepath) -> str:
return os.path.join(ROOT_DIR, *filepath)
def get_requirements(file_path: str = "requirements.txt") -> List[str]:
"""Get Python package dependencies from requirements.txt."""
with open(get_path(file_path)) as f:
requirements = f.read().strip().split("\n")
return requirements
def find_version(version_file_path: str) -> str:
"""Extract version information from the given filepath.
Adapted from https://github.com/ray-project/ray/blob/0b190ee1160eeca9796bc091e07eaebf4c85b511/python/setup.py
"""
# Read and store the version information from the VERSION file
# Use 'strip()' to remove any leading/trailing whitespace or newline characters
if not os.path.exists(version_file_path):
raise FileNotFoundError(f"Version file not found at {version_file_path}")
with open(version_file_path, "r") as version_file:
version = version_file.read().strip()
return version
def get_nvcc_cuda_version():
"""Get the CUDA version from nvcc.
Adapted from https://github.com/NVIDIA/apex/blob/8b7a1ff183741dd8f9b87e7bafd04cfde99cea28/setup.py
"""
nvcc_path = os.path.join(CUDA_HOME, "bin", "nvcc")
nvcc_output = subprocess.check_output([nvcc_path, "-V"], universal_newlines=True)
output = nvcc_output.split()
release_idx = output.index("release") + 1
nvcc_cuda_version = Version(output[release_idx].split(",")[0])
return nvcc_cuda_version
def get_rocm_version():
"""Get the ROCM version from rocminfo."""
rocm_output = subprocess.check_output(["rocminfo"], universal_newlines=True)
# Parse ROCM version from output
# Example output: ROCM version: x.y.z-...
match = re.search(r'ROCm Version: (\d+\.\d+\.\d+)', rocm_output)
if match:
return Version(match.group(1))
else:
rocm_path = os.environ.get("ROCM_PATH", "/opt/rocm")
rocm_version_file = os.path.join(rocm_path, "lib", "cmake", "rocm",
"rocm-config-version.cmake")
if os.path.exists(rocm_version_file):
with open(rocm_version_file, "r") as f:
content = f.read()
match = re.search(r'set\(PACKAGE_VERSION "(\d+\.\d+\.\d+)"', content)
if match:
return Version(match.group(1))
# return a default
return Version("5.0.0")
def get_tilelang_version(with_cuda=USE_CUDA,
with_system_info=not MAYBE_METAL,
with_commit_id=False) -> str:
version = find_version(get_path(".", "VERSION"))
local_version_parts = []
if with_system_info:
local_version_parts.append(get_system_info().replace("-", "."))
if with_cuda:
if USE_ROCM:
if ROCM_HOME:
rocm_version = str(get_rocm_version())
rocm_version_str = rocm_version.replace(".", "")[:3]
local_version_parts.append(f"rocm{rocm_version_str}")
else:
if CUDA_HOME:
cuda_version = str(get_nvcc_cuda_version())
cuda_version_str = cuda_version.replace(".", "")[:3]
local_version_parts.append(f"cu{cuda_version_str}")
if local_version_parts:
version += f"+{'.'.join(local_version_parts)}"
if with_commit_id:
commit_id = None
try:
commit_id = subprocess.check_output(['git', 'rev-parse', 'HEAD'],
stderr=subprocess.DEVNULL,
encoding='utf-8').strip()
except subprocess.SubprocessError as error:
logger.warning(f"Ignore commit id because failed to get git commit id: {str(error)}")
if commit_id:
# Truncate commit ID to 8 characters to keep version string reasonable
short_commit_id = commit_id[:8]
if local_version_parts:
version += f".{short_commit_id}"
else:
version += f"+{short_commit_id}"
return version
@functools.lru_cache(maxsize=None)
def get_cplus_compiler():
"""Return the path to the default C/C++ compiler.
Returns
-------
out: Optional[str]
The path to the default C/C++ compiler, or None if none was found.
"""
env_cxx = os.environ.get("CXX") or os.environ.get("CC")
if env_cxx:
return env_cxx
cc_names = ["g++", "clang++", "c++"]
dirs_in_path = os.get_exec_path()
for cc in cc_names:
for d in dirs_in_path:
cc_path = os.path.join(d, cc)
if os.path.isfile(cc_path) and os.access(cc_path, os.X_OK):
return cc_path
return None
@functools.lru_cache(maxsize=None)
def get_cython_compiler() -> Optional[str]:
"""Return the path to the Cython compiler.
Returns
-------
out: Optional[str]
The path to the Cython compiler, or None if none was found.
"""
cython_names = ["cython", "cython3"]
# Check system PATH
dirs_in_path = list(os.get_exec_path())
# Add user site-packages bin directory
user_base = site.getuserbase()
if user_base:
user_bin = os.path.join(user_base, "bin")
if os.path.exists(user_bin):
dirs_in_path = [user_bin] + dirs_in_path
# If in a virtual environment, add its bin directory
if sys.prefix != sys.base_prefix:
venv_bin = os.path.join(sys.prefix, "bin")
if os.path.exists(venv_bin):
dirs_in_path = [venv_bin] + dirs_in_path
for cython_name in cython_names:
for d in dirs_in_path:
cython_path = os.path.join(d, cython_name)
if os.path.isfile(cython_path) and os.access(cython_path, os.X_OK):
return cython_path
return None
@functools.lru_cache(maxsize=None)
def get_cmake_path() -> str:
"""Return the path to the CMake compiler.
"""
# found which cmake is used
cmake_path = shutil.which("cmake")
if not os.path.exists(cmake_path):
raise Exception("CMake is not installed, please install it first.")
return cmake_path
def get_system_info():
system = platform.system().lower()
if system == "linux":
try:
with open("/etc/os-release") as f:
os_release = f.read()
version_id_match = re.search(r'VERSION_ID="(\d+\.\d+)"', os_release)
if version_id_match:
version_id = version_id_match.group(1)
distro = "ubuntu"
return f"{distro}-{version_id}"
except FileNotFoundError:
pass
return system
def read_readme() -> str:
"""Read the README file if present."""
p = get_path("README.md")
if os.path.isfile(p):
return io.open(get_path("README.md"), "r", encoding="utf-8").read()
else:
return ""
def download_and_extract_llvm(version, is_aarch64=False, extract_path="3rdparty"):
"""
Downloads and extracts the specified version of LLVM for the given platform.
Args:
version (str): The version of LLVM to download.
is_aarch64 (bool): True if the target platform is aarch64, False otherwise.
extract_path (str): The directory path where the archive will be extracted.
Returns:
str: The path where the LLVM archive was extracted.
"""
ubuntu_version = "16.04"
if version >= "16.0.0":
ubuntu_version = "20.04"
elif version >= "13.0.0":
ubuntu_version = "18.04"
base_url = (f"https://github.com/llvm/llvm-project/releases/download/llvmorg-{version}")
file_name = f"clang+llvm-{version}-{'aarch64-linux-gnu' if is_aarch64 else f'x86_64-linux-gnu-ubuntu-{ubuntu_version}'}.tar.xz"
download_url = f"{base_url}/{file_name}"
# Download the file
logger.info(f"Downloading {file_name} from {download_url}")
with urllib.request.urlopen(download_url) as response:
if response.status != 200:
raise Exception(f"Download failed with status code {response.status}")
file_content = response.read()
# Ensure the extract path exists
os.makedirs(extract_path, exist_ok=True)
# if the file already exists, remove it
if os.path.exists(os.path.join(extract_path, file_name)):
os.remove(os.path.join(extract_path, file_name))
# Extract the file
logger.info(f"Extracting {file_name} to {extract_path}")
with tarfile.open(fileobj=BytesIO(file_content), mode="r:xz") as tar:
tar.extractall(path=extract_path)
logger.info("Download and extraction completed successfully.")
return os.path.abspath(os.path.join(extract_path, file_name.replace(".tar.xz", "")))
package_data = {
"tilelang": ["py.typed", "*pyx"],
}
LLVM_VERSION = "10.0.1"
IS_AARCH64 = False # Set to True if on an aarch64 platform
EXTRACT_PATH = "3rdparty" # Default extraction path
def update_submodules():
"""Updates git submodules if in a git repository."""
def is_git_repo():
try:
# Check if current directory is a git repository
subprocess.check_output(["git", "rev-parse", "--is-inside-work-tree"],
stderr=subprocess.STDOUT)
return True
except (subprocess.CalledProcessError, FileNotFoundError):
return False
if not is_git_repo():
logger.info("Info: Not a git repository, skipping submodule update.")
return
try:
subprocess.check_call(["git", "submodule", "update", "--init", "--recursive"])
except subprocess.CalledProcessError as error:
raise RuntimeError("Failed to update submodules") from error
def setup_llvm_for_tvm():
"""Downloads and extracts LLVM, then configures TVM to use it."""
# Assume the download_and_extract_llvm function and its dependencies are defined elsewhere in this script
extract_path = download_and_extract_llvm(LLVM_VERSION, IS_AARCH64, EXTRACT_PATH)
llvm_config_path = os.path.join(extract_path, "bin", "llvm-config")
return extract_path, llvm_config_path
def patch_libs(libpath):
"""
tvm and tilelang libs are copied from elsewhere into wheels
and have a hard-coded rpath.
Set rpath to the directory of libs so auditwheel works well.
"""
if not IS_LINUX:
return
# check if patchelf is installed
# find patchelf in the system
patchelf_path = shutil.which("patchelf")
if not patchelf_path:
logger.warning(
"patchelf is not installed, which is required for auditwheel to work for compatible wheels."
)
return
subprocess.run([patchelf_path, '--set-rpath', '$ORIGIN', libpath])
class TileLangBuildPyCommand(build_py):
"""Customized setuptools install command - builds TVM after setting up LLVM."""
def run(self):
build_py.run(self)
self.run_command("build_ext")
build_ext_cmd = self.get_finalized_command("build_ext")
build_temp_dir = build_ext_cmd.build_temp
ext_modules = build_ext_cmd.extensions
for ext in ext_modules:
extdir = build_ext_cmd.get_ext_fullpath(ext.name)
logger.info(f"Extension {ext.name} output directory: {extdir}")
ext_output_dir = os.path.dirname(extdir)
logger.info(f"Extension output directory (parent): {ext_output_dir}")
logger.info(f"Build temp directory: {build_temp_dir}")
# copy cython files
CYTHON_SRC = [
"tilelang/jit/adapter/cython/cython_wrapper.pyx",
"tilelang/jit/adapter/cython/.cycache",
]
for item in CYTHON_SRC:
source_dir = os.path.join(ROOT_DIR, item)
target_dir = os.path.join(self.build_lib, item)
if os.path.isdir(source_dir):
self.mkpath(target_dir)
self.copy_tree(source_dir, target_dir)
else:
target_dir = os.path.dirname(target_dir)
if not os.path.exists(target_dir):
os.makedirs(target_dir)
shutil.copy2(source_dir, target_dir)
# copy the tl_templates
TILELANG_SRC = [
"src/tl_templates",
]
for item in TILELANG_SRC:
source_dir = os.path.join(ROOT_DIR, item)
target_dir = os.path.join(self.build_lib, PACKAGE_NAME, item)
if os.path.isdir(source_dir):
self.mkpath(target_dir)
self.copy_tree(source_dir, target_dir)
else:
target_dir = os.path.dirname(target_dir)
if not os.path.exists(target_dir):
os.makedirs(target_dir)
shutil.copy2(source_dir, target_dir)
potential_dirs = [
ext_output_dir,
self.build_lib,
build_temp_dir,
os.path.join(ROOT_DIR, "build"),
]
for item in TVM_PREBUILD_ITEMS:
source_lib_file = None
for dir in potential_dirs:
candidate = os.path.join(dir, item)
if os.path.exists(candidate):
source_lib_file = candidate
break
if source_lib_file:
patch_libs(source_lib_file)
target_dir_release = os.path.join(self.build_lib, PACKAGE_NAME, "lib")
target_dir_develop = os.path.join(PACKAGE_NAME, "lib")
os.makedirs(target_dir_release, exist_ok=True)
os.makedirs(target_dir_develop, exist_ok=True)
shutil.copy2(source_lib_file, target_dir_release)
logger.info(f"Copied {source_lib_file} to {target_dir_release}")
shutil.copy2(source_lib_file, target_dir_develop)
logger.info(f"Copied {source_lib_file} to {target_dir_develop}")
os.remove(source_lib_file)
else:
logger.info(f"WARNING: {item} not found in any expected directories!")
for item in TVM_PREBUILD_ITEMS_TO_DELETE:
source_lib_file = None
for dir in potential_dirs:
candidate = os.path.join(dir, item)
if os.path.exists(candidate):
shutil.rmtree(candidate)
break
TVM_CONFIG_ITEMS = [
f"{build_temp_dir}/config.cmake",
]
for item in TVM_CONFIG_ITEMS:
source_dir = os.path.join(ROOT_DIR, item)
# only copy the file
file_name = os.path.basename(item)
target_dir = os.path.join(self.build_lib, PACKAGE_NAME, file_name)
target_dir = os.path.dirname(target_dir)
if not os.path.exists(target_dir):
os.makedirs(target_dir)
if os.path.exists(source_dir):
shutil.copy2(source_dir, target_dir)
else:
logger.info(f"INFO: {source_dir} does not exist.")
TVM_PACAKGE_ITEMS = [
"3rdparty/tvm/src",
"3rdparty/tvm/python",
"3rdparty/tvm/licenses",
"3rdparty/tvm/conftest.py",
"3rdparty/tvm/CONTRIBUTORS.md",
"3rdparty/tvm/KEYS",
"3rdparty/tvm/LICENSE",
"3rdparty/tvm/README.md",
"3rdparty/tvm/mypy.ini",
"3rdparty/tvm/pyproject.toml",
"3rdparty/tvm/version.py",
]
for item in TVM_PACAKGE_ITEMS:
source_dir = os.path.join(ROOT_DIR, item)
target_dir = os.path.join(self.build_lib, PACKAGE_NAME, item)
if os.path.isdir(source_dir):
self.mkpath(target_dir)
self.copy_tree(source_dir, target_dir)
else:
target_dir = os.path.dirname(target_dir)
if not os.path.exists(target_dir):
os.makedirs(target_dir)
shutil.copy2(source_dir, target_dir)
# Copy CUTLASS to the package directory
CUTLASS_PREBUILD_ITEMS = [
"3rdparty/cutlass/include",
"3rdparty/cutlass/tools",
]
for item in CUTLASS_PREBUILD_ITEMS:
source_dir = os.path.join(ROOT_DIR, item)
target_dir = os.path.join(self.build_lib, PACKAGE_NAME, item)
if os.path.isdir(source_dir):
self.mkpath(target_dir)
self.copy_tree(source_dir, target_dir)
else:
target_dir = os.path.dirname(target_dir)
if not os.path.exists(target_dir):
os.makedirs(target_dir)
shutil.copy2(source_dir, target_dir)
# copy compoable kernel to the package directory
CK_PREBUILD_ITEMS = [
"3rdparty/composable_kernel/include",
"3rdparty/composable_kernel/library",
]
for item in CK_PREBUILD_ITEMS:
source_dir = os.path.join(ROOT_DIR, item)
target_dir = os.path.join(self.build_lib, PACKAGE_NAME, item)
if os.path.isdir(source_dir):
self.mkpath(target_dir)
self.copy_tree(source_dir, target_dir)
else:
target_dir = os.path.dirname(target_dir)
if not os.path.exists(target_dir):
os.makedirs(target_dir)
shutil.copy2(source_dir, target_dir)
# copy compoable kernel to the package directory
TL_CONFIG_ITEMS = ["CMakeLists.txt", "VERSION", "README.md", "LICENSE"]
for item in TL_CONFIG_ITEMS:
source_dir = os.path.join(ROOT_DIR, item)
target_dir = os.path.join(self.build_lib, PACKAGE_NAME, item)
# if is VERSION file, replace the content with the new version with commit id
if not PYPI_BUILD and item == "VERSION":
version = get_tilelang_version(
with_cuda=False, with_system_info=False, with_commit_id=WITH_COMMITID)
target_dir = os.path.dirname(target_dir)
if not os.path.exists(target_dir):
os.makedirs(target_dir)
with open(os.path.join(target_dir, item), "w") as f:
print(f"Writing {version} to {os.path.join(target_dir, item)}")
f.write(version)
continue
if os.path.isdir(source_dir):
self.mkpath(target_dir)
self.copy_tree(source_dir, target_dir)
else:
target_dir = os.path.dirname(target_dir)
if not os.path.exists(target_dir):
os.makedirs(target_dir)
shutil.copy2(source_dir, target_dir)
class TileLangSdistCommand(sdist):
"""Customized setuptools sdist command - includes the pyproject.toml file."""
def make_distribution(self):
self.distribution.metadata.name = PACKAGE_NAME
self.distribution.metadata.version = get_tilelang_version(
with_cuda=False, with_system_info=False, with_commit_id=WITH_COMMITID)
super().make_distribution()
class CMakeExtension(Extension):
"""
A specialized setuptools Extension class for building a CMake project.
:param name: Name of the extension module.
:param sourcedir: Directory containing the top-level CMakeLists.txt.
"""
def __init__(self, name, sourcedir="", **kwargs):
# We pass an empty 'sources' list because
# the actual build is handled by CMake, not setuptools.
super().__init__(name=name, sources=[], **kwargs)
# Convert the source directory to an absolute path
# so that CMake can correctly locate the CMakeLists.txt.
self.sourcedir = os.path.abspath(sourcedir)
class CythonExtension(Extension):
"""
A specialized setuptools Extension class for building a Cython project.
"""
def __init__(self, name, sourcedir=""):
super().__init__(name=name, sources=[])
self.sourcedir = os.path.abspath(sourcedir)
class TileLangExtensionBuild(build_ext):
"""
Custom build_ext command for CMake-based projects.
This class overrides the 'run' method to ensure that CMake is available,
and then iterates over all extensions defined as CMakeExtension,
delegating the actual build logic to 'build_cmake'.
"""
def run(self):
# Check if CMake is installed and accessible by attempting to run 'cmake --version'.
try:
cmake_path = get_cmake_path()
if not cmake_path:
raise Exception("CMake is not installed, please install it first.")
subprocess.check_output([cmake_path, "--version"])
except OSError as error:
# If CMake is not found, raise an error.
raise RuntimeError(
"CMake must be installed to build the following extensions") from error
update_submodules()
# Build each extension (of type CMakeExtension) using our custom method.
for ext in self.extensions:
if isinstance(ext, CythonExtension):
self.build_cython(ext)
elif isinstance(ext, CMakeExtension):
self.build_cmake(ext)
else:
raise ValueError(f"Unsupported extension type: {type(ext)}")
# To make it works with editable install,
# we need to copy the lib*.so files to the tilelang/lib directory
import glob
files = glob.glob("*.so" if IS_LINUX else "*.dylib")
if os.path.exists(PACKAGE_NAME):
target_lib_dir = os.path.join(PACKAGE_NAME, "lib")
for file in files:
if not os.path.exists(target_lib_dir):
os.makedirs(target_lib_dir)
shutil.copy(file, target_lib_dir)
# remove the original file
os.remove(file)
def build_cython(self, ext):
"""
Build a single Cython-based extension.
:param ext: The extension (an instance of CythonExtension).
"""
cython_compiler = get_cython_compiler()
if not cython_compiler:
logger.info("Cython compiler not found, install it first")
subprocess.check_call(["pip", "install", "cython"])
cython_compiler = get_cython_compiler()
if not cython_compiler:
raise Exception("Cython is not installed, please install it first.")
logger.info(f"Using Cython compiler: {cython_compiler}")
cython_warpper_dir = os.path.join(ext.sourcedir, "tilelang", "jit", "adapter", "cython")
cython_wrapper_path = os.path.join(cython_warpper_dir, "cython_wrapper.pyx")
py_version = f"py{sys.version_info.major}{sys.version_info.minor}"
cache_dir = Path(cython_warpper_dir) / ".cycache" / py_version
os.makedirs(cache_dir, exist_ok=True)
with open(cython_wrapper_path, "r") as f:
cython_wrapper_code = f.read()
source_path = cache_dir / "cython_wrapper.cpp"
library_path = cache_dir / "cython_wrapper.so"
md5_path = cache_dir / "md5.txt"
code_hash = hashlib.sha256(cython_wrapper_code.encode()).hexdigest()
cache_path = cache_dir / f"{code_hash}.so"
lock_file = cache_path.with_suffix('.lock')
# Check if cached version exists and is valid
need_compile = True
if md5_path.exists() and library_path.exists():
with open(md5_path, "r") as f:
cached_hash = f.read().strip()
if cached_hash == code_hash:
logger.info("Cython JIT adapter is up to date, no need to compile...")
need_compile = False
else:
logger.info("Cython JIT adapter is out of date, need to recompile...")
else:
logger.info("No cached version found for Cython JIT adapter, need to compile...")
if need_compile:
logger.info("Waiting for lock to compile Cython JIT adapter...")
with open(lock_file, 'w') as lock:
fcntl.flock(lock.fileno(), fcntl.LOCK_EX)
try:
# After acquiring the lock, check again if the file has been compiled by another process
if md5_path.exists() and library_path.exists():
with open(md5_path, "r") as f:
cached_hash = f.read().strip()
if cached_hash == code_hash:
logger.info(
"Another process has already compiled the file, using it..."
)
need_compile = False
if need_compile:
logger.info("Compiling Cython JIT adapter...")
temp_path = cache_dir / f"temp_{code_hash}.so"
with open(md5_path, "w") as f:
f.write(code_hash)
# compile the cython_wrapper.pyx file into .cpp
cython = get_cython_compiler()
if cython is None:
raise Exception("Cython is not installed, please install it first.")
os.system(f"{cython} {cython_wrapper_path} --cplus -o {source_path}")
python_include_path = sysconfig.get_path("include")
cc = get_cplus_compiler()
if MAYBE_METAL:
cc += ' -Wl,-undefined,dynamic_lookup'
command = f"{cc} -shared -pthread -fPIC -fwrapv -O2 -Wall -fno-strict-aliasing -I{python_include_path} {source_path} -o {temp_path}"
logger.info(command)
os.system(command)
# rename the temp file to the library file
temp_path.rename(library_path)
except Exception as e:
if 'temp_path' in locals() and temp_path.exists():
temp_path.unlink()
raise Exception(f"Failed to compile Cython JIT adapter: {e}") from e
finally:
if lock_file.exists():
lock_file.unlink()
# add the .so file to the sys.path
cache_dir_str = str(cache_dir)
if cache_dir_str not in sys.path:
sys.path.append(cache_dir_str)
def build_cmake(self, ext):
"""
Build a single CMake-based extension by generating a CMake config and invoking CMake/Ninja.
Generates or updates a config.cmake in the build directory (based on the extension's sourcedir),
injecting LLVM/CUDA/ROCm and Python settings, then runs CMake to configure and build the target.
When running an in-place build the resulting library is placed under ./tilelang/lib; otherwise the
standard extension output directory is used.
Parameters:
ext: The CMakeExtension to build; its `sourcedir` should contain the TVM/CMake `config.cmake`
template under `3rdparty/tvm/cmake/`.
Raises:
subprocess.CalledProcessError: If the CMake configuration or build commands fail.
OSError: If filesystem operations (read/write) fail.
"""
# Only setup LLVM if it's enabled
llvm_config_path = "OFF"
if USE_LLVM:
# Setup LLVM for TVM and retrieve the path to llvm-config
_, llvm_config_path = setup_llvm_for_tvm()
# Determine the directory where the final .so or .pyd library should go.
extdir = os.path.abspath(os.path.dirname(self.get_ext_fullpath(ext.name)))
# To make it compatible with in-place build and avoid redundant link during incremental build,
# we need to change the build destination to tilelang/lib, where it's actually loaded
if self.inplace:
extdir = os.path.abspath('./tilelang/lib/')
# Prepare arguments for the CMake configuration step.
# -DCMAKE_LIBRARY_OUTPUT_DIRECTORY sets where built libraries go
# -DPYTHON_EXECUTABLE ensures that the correct Python is used
cmake_args = [
f"-DCMAKE_LIBRARY_OUTPUT_DIRECTORY={extdir}",
f"-DPython_EXECUTABLE={sys.executable}",
f"-DCMAKE_BUILD_TYPE={'Debug' if DEBUG_MODE else 'Release'}",
"-G",
"Ninja",
]
if USE_CUDA and not USE_ROCM:
cmake_args.append(f"-DCMAKE_CUDA_COMPILER={os.path.join(CUDA_HOME, 'bin', 'nvcc')}")
# Create the temporary build directory (if it doesn't exist).
if self.inplace:
build_temp = os.path.abspath('./build')
else:
build_temp = os.path.abspath(self.build_temp)
os.makedirs(build_temp, exist_ok=True)
# Paths to the source and destination config.cmake files
src_config = Path(ext.sourcedir) / "3rdparty" / "tvm" / "cmake" / "config.cmake"
dst_config = Path(build_temp) / "config.cmake"
# Read the default config template
content_lines = src_config.read_text().splitlines()
# Add common LLVM configuration
content_lines.append(f"set(USE_LLVM {llvm_config_path})")
# Append GPU backend configuration based on environment
if USE_METAL:
content_lines += [
"set(USE_METAL ON)",
"set(USE_ROCM OFF)",
]
elif USE_ROCM:
content_lines += [
f"set(USE_ROCM {ROCM_HOME})",
"set(USE_CUDA OFF)",
]
elif CUDA_HOME:
content_lines += [
f"set(USE_CUDA {CUDA_HOME})",
"set(USE_ROCM OFF)",
]
# Create the final file content
new_content = "\n".join(content_lines) + "\n"
# Write the file only if it does not exist or has changed
if not dst_config.exists() or dst_config.read_text() != new_content:
dst_config.write_text(new_content)
print(f"[Config] Updated: {dst_config}")
else:
print(f"[Config] No changes: {dst_config}")
cmake_path = get_cmake_path()
# Run CMake to configure the project with the given arguments.
if not os.path.exists(os.path.join(build_temp, "build.ninja")):
logger.info(
f"[CMake] Generating build.ninja: {cmake_path} {ext.sourcedir} {' '.join(cmake_args)}"
)
subprocess.check_call([cmake_path, ext.sourcedir] + cmake_args, cwd=build_temp)
else:
logger.info(f"[CMake] build.ninja already exists in {build_temp}")
num_jobs = max(1, int(multiprocessing.cpu_count() * 0.75))
logger.info(
f"[Build] Using {num_jobs} jobs | cmake: {cmake_path} (exists: {os.path.exists(cmake_path)}) | build dir: {build_temp}"
)
subprocess.check_call(
[cmake_path, "--build", ".", "--config", "Release", "-j",
str(num_jobs)],
cwd=build_temp)
ext_modules = [
CMakeExtension("TileLangCXX", sourcedir="."),
]
if not MAYBE_METAL:
ext_modules.append(CythonExtension("TileLangCython", sourcedir="."))
setup(
name=PACKAGE_NAME,
version=(get_tilelang_version(with_cuda=False, with_system_info=False, with_commit_id=False)
if PYPI_BUILD else get_tilelang_version(with_commit_id=WITH_COMMITID)),
packages=find_packages(where="."),
package_dir={"": "."},
author="Tile-AI",
description="A tile level programming language to generate high performance code.",
long_description=read_readme(),
long_description_content_type="text/markdown",
platforms=[
"Environment :: GPU :: NVIDIA CUDA" if not USE_ROCM else "Environment :: GPU :: AMD ROCm",
"Operating System :: POSIX :: Linux",
],
license="MIT",
keywords="BLAS, CUDA, HIP, Code Generation, TVM",
url="https://github.com/tile-ai/tilelang",
classifiers=[
"Programming Language :: Python :: 3.8",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
],
python_requires=">=3.8",
install_requires=get_requirements(),
package_data=package_data,
include_package_data=False,
ext_modules=[
CMakeExtension("TileLangCXX", sourcedir="."),
CythonExtension("TileLangCython", sourcedir="."),
],
cmdclass={
"build_py": TileLangBuildPyCommand,
"sdist": TileLangSdistCommand,
"build_ext": TileLangExtensionBuild,
},
)
......@@ -5,6 +5,10 @@ import ctypes
import logging
from tqdm import tqdm
from importlib.metadata import version
__version__ = version('tilelang')
class TqdmLoggingHandler(logging.Handler):
"""Custom logging handler that directs log output to tqdm progress bar to avoid interference."""
......@@ -57,9 +61,10 @@ from .env import enable_cache, disable_cache, is_cache_enabled # noqa: F401
from .env import env as env # noqa: F401
import tvm
import tvm.base
import tvm.base # noqa: F401
from tvm import DataType # noqa: F401
# Setup tvm search path before importing tvm
from . import libinfo
......@@ -71,8 +76,8 @@ def _load_tile_lang_lib():
# pylint: disable=protected-access
lib_name = "tilelang" if tvm.base._RUNTIME_ONLY else "tilelang_module"
# pylint: enable=protected-access
lib_path = libinfo.find_lib_path(lib_name, optional=False)
return ctypes.CDLL(lib_path[0]), lib_path[0]
lib_path = libinfo.find_lib_path(lib_name)
return ctypes.CDLL(lib_path), lib_path
# only load once here
......@@ -101,8 +106,6 @@ from .transform import PassConfigKey # noqa: F401
from .engine import lower, register_cuda_postproc, register_hip_postproc # noqa: F401
from .version import __version__ # noqa: F401
from .math import * # noqa: F403
from . import ir # noqa: F401
......
......@@ -30,7 +30,7 @@ from tilelang.autotuner.param import CompileArgs, ProfileArgs, AutotuneResult
from tilelang.autotuner.capture import get_autotune_inputs
from tilelang.utils.target import determine_target
from tilelang.jit.param import _P, _RProg
from tilelang.version import __version__
from tilelang import __version__
class TimeoutException(Exception):
......
......@@ -16,7 +16,7 @@ from tvm.tir import PrimFunc
from tilelang.engine.param import KernelParam
from tilelang import env
from tilelang.jit import JITKernel
from tilelang.version import __version__
from tilelang import __version__
KERNEL_PATH = "kernel.cu"
WRAPPED_KERNEL_PATH = "wrapped_kernel.cu"
......
......@@ -4,6 +4,7 @@ import pathlib
import logging
import shutil
import glob
import site
from dataclasses import dataclass
from typing import Optional
......@@ -19,6 +20,19 @@ TL_TEMPLATE_NOT_FOUND_MESSAGE = ("TileLang is not installed or found in the expe
", which may lead to compilation bugs when utilize tilelang backend."
TVM_LIBRARY_NOT_FOUND_MESSAGE = ("TVM is not installed or found in the expected path")
SITE_PACKAGES = site.getsitepackages()
TL_LIBS = [os.path.join(i, 'tilelang/lib') for i in site.getsitepackages()]
TL_LIBS = [i for i in TL_LIBS if os.path.exists(i)]
TL_ROOT = os.path.dirname(os.path.abspath(__file__))
DEV = False
THIRD_PARTY_ROOT = os.path.join(TL_ROOT, '3rdparty')
if not os.path.exists(THIRD_PARTY_ROOT):
DEV = True
THIRD_PARTY_ROOT = os.path.join(TL_ROOT, '..', '3rdparty')
def _find_cuda_home() -> str:
"""Find the CUDA install path.
......@@ -261,85 +275,51 @@ env = Environment()
CUDA_HOME = env.CUDA_HOME
ROCM_HOME = env.ROCM_HOME
def prepend_pythonpath(path):
if not os.environ.get("PYTHONPATH", None):
os.environ["PYTHONPATH"] = path
else:
os.environ["PYTHONPATH"] = path + os.pathsep + os.environ["PYTHONPATH"]
sys.path.insert(0, path)
# Initialize TVM paths
if env.TVM_IMPORT_PYTHON_PATH is not None:
os.environ["PYTHONPATH"] = env.TVM_IMPORT_PYTHON_PATH + ":" + os.environ.get("PYTHONPATH", "")
sys.path.insert(0, env.TVM_IMPORT_PYTHON_PATH)
prepend_pythonpath(env.TVM_IMPORT_PYTHON_PATH)
else:
install_tvm_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "3rdparty", "tvm")
if os.path.exists(install_tvm_path) and install_tvm_path not in sys.path:
os.environ["PYTHONPATH"] = (
install_tvm_path + "/python:" + os.environ.get("PYTHONPATH", ""))
sys.path.insert(0, install_tvm_path + "/python")
env.TVM_IMPORT_PYTHON_PATH = install_tvm_path + "/python"
develop_tvm_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "..", "3rdparty", "tvm")
if os.path.exists(develop_tvm_path) and develop_tvm_path not in sys.path:
os.environ["PYTHONPATH"] = (
develop_tvm_path + "/python:" + os.environ.get("PYTHONPATH", ""))
sys.path.insert(0, develop_tvm_path + "/python")
env.TVM_IMPORT_PYTHON_PATH = develop_tvm_path + "/python"
develop_tvm_library_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "..", "build", "tvm")
install_tvm_library_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "lib")
tvm_path = os.path.join(THIRD_PARTY_ROOT, "tvm")
assert os.path.exists(tvm_path), tvm_path
if tvm_path not in sys.path:
tvm_python_binding = os.path.join(tvm_path, 'python')
prepend_pythonpath(tvm_python_binding)
env.TVM_IMPORT_PYTHON_PATH = tvm_python_binding
if os.environ.get("TVM_LIBRARY_PATH") is None:
if os.path.exists(develop_tvm_library_path):
os.environ["TVM_LIBRARY_PATH"] = develop_tvm_library_path
elif os.path.exists(install_tvm_library_path):
os.environ["TVM_LIBRARY_PATH"] = install_tvm_library_path
else:
logger.warning(TVM_LIBRARY_NOT_FOUND_MESSAGE)
# pip install build library path
lib_path = os.path.join(env.TILELANG_PACKAGE_PATH, "lib")
existing_path = os.environ.get("TVM_LIBRARY_PATH")
if existing_path:
os.environ["TVM_LIBRARY_PATH"] = f"{existing_path}:{lib_path}"
else:
os.environ["TVM_LIBRARY_PATH"] = lib_path
env.TVM_LIBRARY_PATH = os.environ.get("TVM_LIBRARY_PATH", None)
os.environ['TVM_LIBRARY_PATH'] = env.TVM_LIBRARY_PATH = os.pathsep.join(TL_LIBS)
# Initialize CUTLASS paths
if os.environ.get("TL_CUTLASS_PATH", None) is None:
install_cutlass_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "3rdparty", "cutlass")
develop_cutlass_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "..", "3rdparty", "cutlass")
if os.path.exists(install_cutlass_path):
os.environ["TL_CUTLASS_PATH"] = install_cutlass_path + "/include"
env.CUTLASS_INCLUDE_DIR = install_cutlass_path + "/include"
elif (os.path.exists(develop_cutlass_path) and develop_cutlass_path not in sys.path):
os.environ["TL_CUTLASS_PATH"] = develop_cutlass_path + "/include"
env.CUTLASS_INCLUDE_DIR = develop_cutlass_path + "/include"
cutlass_inc_path = os.path.join(THIRD_PARTY_ROOT, 'cutlass', 'include')
if os.path.exists(cutlass_inc_path):
os.environ["TL_CUTLASS_PATH"] = env.CUTLASS_INCLUDE_DIR = cutlass_inc_path
else:
logger.warning(CUTLASS_NOT_FOUND_MESSAGE)
# Initialize COMPOSABLE_KERNEL paths
if os.environ.get("TL_COMPOSABLE_KERNEL_PATH", None) is None:
install_ck_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "3rdparty", "composable_kernel")
develop_ck_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "..", "3rdparty", "composable_kernel")
if os.path.exists(install_ck_path):
os.environ["TL_COMPOSABLE_KERNEL_PATH"] = install_ck_path + "/include"
env.COMPOSABLE_KERNEL_INCLUDE_DIR = install_ck_path + "/include"
elif (os.path.exists(develop_ck_path) and develop_ck_path not in sys.path):
os.environ["TL_COMPOSABLE_KERNEL_PATH"] = develop_ck_path + "/include"
env.COMPOSABLE_KERNEL_INCLUDE_DIR = develop_ck_path + "/include"
ck_inc_path = os.path.join(THIRD_PARTY_ROOT, 'composable_kernel', 'include')
if os.path.exists(ck_inc_path):
os.environ["TL_COMPOSABLE_KERNEL_PATH"] = env.COMPOSABLE_KERNEL_INCLUDE_DIR = ck_inc_path
else:
logger.warning(COMPOSABLE_KERNEL_NOT_FOUND_MESSAGE)
# Initialize TL_TEMPLATE_PATH
if os.environ.get("TL_TEMPLATE_PATH", None) is None:
install_tl_template_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "src")
develop_tl_template_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "src")
if os.path.exists(install_tl_template_path):
os.environ["TL_TEMPLATE_PATH"] = install_tl_template_path
env.TILELANG_TEMPLATE_PATH = install_tl_template_path
elif (os.path.exists(develop_tl_template_path) and develop_tl_template_path not in sys.path):
os.environ["TL_TEMPLATE_PATH"] = develop_tl_template_path
env.TILELANG_TEMPLATE_PATH = develop_tl_template_path
tl_template_path = os.path.join(THIRD_PARTY_ROOT, "..", "src")
if os.path.exists(tl_template_path):
os.environ["TL_TEMPLATE_PATH"] = env.TILELANG_TEMPLATE_PATH = tl_template_path
else:
logger.warning(TL_TEMPLATE_NOT_FOUND_MESSAGE)
......
"""The profiler and convert to torch utils"""
import ctypes
import fcntl
import hashlib
import logging
import site
import sys
import sysconfig
import torch
import os
from pathlib import Path
from typing import List, Optional, Union, Callable, Dict, Tuple, Any
from tilelang import tvm as tvm
......@@ -25,155 +18,15 @@ from tilelang.jit.adapter.utils import is_cuda_target, is_hip_target, is_cpu_tar
from tilelang.utils.target import determine_target
from tilelang.utils.language import retrieve_func_from_module
from tilelang.utils.tensor import map_torch_type
from tilelang.contrib.cc import get_cplus_compiler, is_darwin
logger = logging.getLogger(__name__)
def get_cython_compiler() -> Optional[str]:
"""Return the path to the Cython compiler.
Returns
-------
out: Optional[str]
The path to the Cython compiler, or None if none was found.
"""
cython_names = ["cython", "cython3"]
# Check system PATH
dirs_in_path = list(os.get_exec_path())
# Add user site-packages bin directory
user_base = site.getuserbase()
if user_base:
user_bin = os.path.join(user_base, "bin")
if os.path.exists(user_bin):
dirs_in_path = [user_bin] + dirs_in_path
# If in a virtual environment, add its bin directory
if sys.prefix != sys.base_prefix:
venv_bin = os.path.join(sys.prefix, "bin")
if os.path.exists(venv_bin):
dirs_in_path = [venv_bin] + dirs_in_path
for cython_name in cython_names:
for d in dirs_in_path:
cython_path = os.path.join(d, cython_name)
if os.path.isfile(cython_path) and os.access(cython_path, os.X_OK):
return cython_path
return None
# Add cache management functions at module level
def get_cache_dir() -> Path:
"""Get the cache directory for the current Python version."""
py_version = f"py{sys.version_info.major}{sys.version_info.minor}"
# current directory
current_dir = os.path.dirname(os.path.abspath(__file__))
cache_dir = Path(current_dir) / ".cycache" / py_version
cache_dir.mkdir(parents=True, exist_ok=True)
return cache_dir
def get_cached_lib(source_code: str) -> Tuple[Optional[ctypes.CDLL], Path]:
"""Try to load cached library or return None if not found."""
code_hash = hashlib.sha256(source_code.encode()).hexdigest()
cache_path = get_cache_dir() / f"{code_hash}.so"
lock_file = cache_path.with_suffix('.lock')
with open(lock_file, 'w') as lock:
fcntl.flock(lock.fileno(), fcntl.LOCK_EX)
try:
if cache_path.exists():
try:
if cache_path.stat().st_size > 1024:
return ctypes.CDLL(str(cache_path)), cache_path
else:
cache_path.unlink() # remove the incomplete file
except Exception as e:
logger.error(f"Failed to load cached library: {e}")
return None, cache_path
return None, cache_path
finally:
fcntl.flock(lock.fileno(), fcntl.LOCK_UN)
# read the cython_wrapper.pyx file
current_dir = os.path.dirname(os.path.abspath(__file__))
cython_wrapper_path = os.path.join(current_dir, "cython_wrapper.pyx")
with open(cython_wrapper_path, "r") as f:
cython_wrapper_code = f.read()
cache_dir = get_cache_dir()
source_path = cache_dir / "cython_wrapper.cpp"
library_path = cache_dir / "cython_wrapper.so"
md5_path = cache_dir / "md5.txt"
code_hash = hashlib.sha256(cython_wrapper_code.encode()).hexdigest()
cache_path = cache_dir / f"{code_hash}.so"
lock_file = cache_path.with_suffix('.lock')
# Check if cached version exists and is valid
need_compile = True
if md5_path.exists() and library_path.exists():
with open(md5_path, "r") as f:
cached_hash = f.read().strip()
if cached_hash == code_hash:
logger.debug("Cython JIT adapter is up to date, no need to compile...")
need_compile = False
else:
logger.info("Cython JIT adapter is out of date, need to recompile...")
else:
logger.info("No cached version found for Cython JIT adapter, need to compile...")
if need_compile:
logger.info("Waiting for lock to compile Cython JIT adapter...")
with open(lock_file, 'w') as lock:
fcntl.flock(lock.fileno(), fcntl.LOCK_EX)
try:
# After acquiring the lock, check again if the file has been compiled by another process
if md5_path.exists() and library_path.exists():
with open(md5_path, "r") as f:
cached_hash = f.read().strip()
if cached_hash == code_hash:
logger.info(
"Another process has already compiled the file, using it...")
need_compile = False
if need_compile:
logger.info("Compiling Cython JIT adapter...")
temp_path = cache_dir / f"temp_{code_hash}.so"
with open(md5_path, "w") as f:
f.write(code_hash)
# compile the cython_wrapper.pyx file into .cpp
cython = get_cython_compiler()
if cython is None:
raise Exception("Cython is not installed, please install it first.")
os.system(f"{cython} {cython_wrapper_path} --cplus -o {source_path}")
python_include_path = sysconfig.get_path("include")
cc = get_cplus_compiler()
dynamic_flag = '-Wl,-undefined,dynamic_lookup' if is_darwin(
) else '-Wl,--unresolved-symbols=ignore-all'
command = f"{cc} -shared -pthread -fPIC -fwrapv -O2 -Wall -fno-strict-aliasing {dynamic_flag} -I{python_include_path} {source_path} -o {temp_path}"
os.system(command)
# rename the temp file to the library file
temp_path.rename(library_path)
except Exception as e:
if 'temp_path' in locals() and temp_path.exists():
temp_path.unlink()
raise Exception(f"Failed to compile Cython JIT adapter: {e}") from e
finally:
if lock_file.exists():
lock_file.unlink()
# add the .so file to the sys.path
cache_dir_str = str(cache_dir)
if cache_dir_str not in sys.path:
sys.path.append(cache_dir_str)
from cython_wrapper import CythonKernelWrapper
try:
# Load cython_wrapper.api3.so in env.py
from cython_wrapper import CythonKernelWrapper
except ImportError:
# TODO: tolerance a build without cython backend
raise
class CythonKernelAdapter(BaseKernelAdapter):
......
"""Library information. This is a standalone file that can be used to get various info.
Modified from: https://github.com/mlc-ai/mlc-llm/blob/main/python/mlc_llm/libinfo.py
"""
#! pylint: disable=protected-access
import os
import sys
import os
TILELANG_LIBRARY_PATH = os.environ.get("TILELANG_LIBRARY_PATH", None)
def get_env_paths(env_var, splitter):
"""Get path in env variable"""
if os.environ.get(env_var, None):
return [p.strip() for p in os.environ[env_var].split(splitter)]
return []
from .env import TL_LIBS
def get_dll_directories():
"""Get extra tile lang dll directories"""
curr_dir = os.path.dirname(os.path.realpath(os.path.expanduser(__file__)))
source_dir = os.path.abspath(os.path.join(curr_dir, ".."))
dll_path = [
curr_dir,
os.path.join(source_dir, "build"), # local build
os.path.join(source_dir, "build", "Release"),
os.path.join(curr_dir, "lib"), # pypi build
]
if TILELANG_LIBRARY_PATH:
dll_path.append(TILELANG_LIBRARY_PATH)
if "CONDA_PREFIX" in os.environ:
dll_path.append(os.path.join(os.environ["CONDA_PREFIX"], "lib"))
if sys.platform.startswith("linux") or sys.platform.startswith("freebsd"):
dll_path.extend(get_env_paths("LD_LIBRARY_PATH", ":"))
elif sys.platform.startswith("darwin"):
dll_path.extend(get_env_paths("DYLD_LIBRARY_PATH", ":"))
elif sys.platform.startswith("win32"):
dll_path.extend(get_env_paths("PATH", ";"))
return [os.path.abspath(p) for p in dll_path if os.path.isdir(p)]
def find_lib_path(name, optional=False):
def find_lib_path(name: str, py_ext=False):
"""Find tile lang library
Parameters
......@@ -50,7 +15,9 @@ def find_lib_path(name, optional=False):
optional: boolean
Whether the library is required
"""
if sys.platform.startswith("linux") or sys.platform.startswith("freebsd"):
if py_ext:
lib_name = f"{name}.abi3.so"
elif sys.platform.startswith("linux") or sys.platform.startswith("freebsd"):
lib_name = f"lib{name}.so"
elif sys.platform.startswith("win32"):
lib_name = f"{name}.dll"
......@@ -59,11 +26,11 @@ def find_lib_path(name, optional=False):
else:
lib_name = f"lib{name}.so"
dll_paths = get_dll_directories()
lib_dll_path = [os.path.join(p, lib_name) for p in dll_paths]
lib_found = [p for p in lib_dll_path if os.path.exists(p) and os.path.isfile(p)]
if not lib_found and not optional:
for lib_root in TL_LIBS:
lib_dll_path = os.path.join(lib_root, lib_name)
if os.path.exists(lib_dll_path) and os.path.isfile(lib_dll_path):
return lib_dll_path
else:
message = (f"Cannot find libraries: {lib_name}\n" + "List of candidates:\n" +
"\n".join(lib_dll_path))
"\n".join(TL_LIBS))
raise RuntimeError(message)
return lib_found
import os
import subprocess
from typing import Union
# Get the absolute path of the current Python script's directory
current_dir = os.path.dirname(os.path.abspath(__file__))
# Get the absolute path of the project root directory (one level above the current directory)
develop_project_root_dir = os.path.abspath(os.path.join(current_dir, ".."))
installed_project_root_dir = os.path.abspath(os.path.join(current_dir))
# Define the path to the VERSION file located in the project root directory
develop_version_file_path = os.path.join(develop_project_root_dir, "VERSION")
installed_version_file_path = os.path.join(installed_project_root_dir, "VERSION")
if os.path.exists(develop_version_file_path):
version_file_path = develop_version_file_path
elif os.path.exists(installed_version_file_path):
version_file_path = installed_version_file_path
else:
raise FileNotFoundError("VERSION file not found in the project root directory")
# Read and store the version information from the VERSION file
# Use 'strip()' to remove any leading/trailing whitespace or newline characters
with open(version_file_path, "r") as version_file:
__version__ = version_file.read().strip()
def get_git_commit_id() -> Union[str, None]:
"""Get the current git commit hash by running git in the current file's directory."""
try:
return subprocess.check_output(['git', 'rev-parse', 'HEAD'],
cwd=os.path.dirname(os.path.abspath(__file__)),
stderr=subprocess.DEVNULL,
encoding='utf-8').strip()
# FileNotFoundError is raised when git is not installed
except (subprocess.SubprocessError, FileNotFoundError):
return None
# Append git commit hash to version if not already present
# NOTE(lei): Although the local commit id cannot capture locally staged changes,
# the local commit id can help mitigate issues caused by incorrect cache to some extent,
# so it should still be kept.
# Check WITH_COMMITID environment variable to control whether to include commit ID
WITH_COMMITID = os.environ.get("WITH_COMMITID", "True").lower() == "true"
if WITH_COMMITID and "+" not in __version__ and (commit_id := get_git_commit_id()):
# Use short commit ID (8 characters) for better compatibility
short_commit_id = commit_id[:8]
__version__ = f"{__version__}+{short_commit_id}"
# Define the public API for the module
__all__ = ["__version__"]
[tox]
envlist = py38,py39,py310,py311,py312
isolated_build = False
[testenv:py{38,39,310,311,312}]
skip_install = false
deps =
wheel
build
setenv =
WITH_COMMITID = TRUE
PYTHON_EXECUTABLE = {envpython}
Python3_EXECUTABLE = {envpython}
commands =
python -m build --wheel -o {toxinidir}/dist
[testenv:py{38,39,310,311,312}-pypi]
skip_install = false
setenv =
PYPI_BUILD = TRUE
WITH_COMMITID = FALSE
PYTHON_EXECUTABLE = {envpython}
Python3_EXECUTABLE = {envpython}
commands =
python setup.py bdist_wheel --plat-name=manylinux2014_x86_64
[testenv:audit_manylinux2014]
skip_install = true
allowlist_externals =
bash
deps =
auditwheel
patchelf
commands =
bash -c 'auditwheel repair -L=/lib --exclude=/usr/local/cuda* --exclude=libcuda.so.1 --plat=manylinux2014_x86_64 dist/*'
[testenv:py38]
basepython = python3.8
[testenv:py39]
basepython = python3.9
[testenv:py310]
basepython = python3.10
[testenv:py311]
basepython = python3.11
[testenv:py312]
basepython = python3.12
from __future__ import annotations
import os
import platform
import subprocess
from typing import Optional
from pathlib import Path
ROOT = Path(__file__).parent
base_version = (ROOT / 'VERSION').read_text().strip()
def _read_cmake_bool(i: str | None, default=False):
if i is None:
return default
return i.lower() not in ('0', 'false', 'off', 'no', 'n', '')
def get_git_commit_id() -> Optional[str]:
"""Get the current git commit hash by running git in the current file's directory."""
r = subprocess.run(['git', 'rev-parse', 'HEAD'],
cwd=ROOT,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding='utf-8')
if r.returncode == 0:
return r.stdout.strip()
else:
return 'unknown'
def dynamic_metadata(
field: str,
settings: dict[str, object] | None = None,
) -> str:
assert field == 'version'
version = base_version
if not _read_cmake_bool(os.environ.get('NO_VERSION_LABEL')):
exts = []
backend = None
if _read_cmake_bool(os.environ.get('NO_TOOLCHAIN_VERSION')):
pass
elif platform.system() == 'Darwin':
# only on macosx_11_0_arm64, not necessary
# backend = 'metal'
pass
elif _read_cmake_bool(os.environ.get('USE_ROCM', '')):
backend = 'rocm'
elif 'USE_CUDA' in os.environ and not _read_cmake_bool(os.environ.get('USE_CUDA')):
backend = 'cpu'
else: # cuda
# Read nvcc version from env.
# This is not exactly how it should be,
# but works for now if building in a nvidia/cuda image.
if cuda_version := os.environ.get('CUDA_VERSION'):
major, minor, *_ = cuda_version.split('.')
backend = f'cu{major}{minor}'
else:
backend = 'cuda'
if backend:
exts.append(backend)
if _read_cmake_bool(os.environ.get('NO_GIT_VERSION')):
pass
elif git_hash := get_git_commit_id():
exts.append(f'git{git_hash[:8]}')
if exts:
version += '+' + '.'.join(exts)
return version
__all__ = ["dynamic_metadata"]
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment