"vscode:/vscode.git/clone" did not exist on "319bc6b1bdd1325a3ce99084f9823ca29d213d96"
setup.py 34.2 KB
Newer Older
1
2
3
import fcntl
import functools
import hashlib
4
5
6
7
8
9
import io
import subprocess
import shutil
from setuptools import setup, find_packages, Extension
from setuptools.command.build_py import build_py
from setuptools.command.sdist import sdist
10
from typing import List, Optional
11
12
13
import re
import tarfile
from io import BytesIO
14
from pathlib import Path
15
16
import os
import sys
17
18
import site
import sysconfig
19
import urllib.request
20
from packaging.version import Version
21
22
23
import platform
import multiprocessing
from setuptools.command.build_ext import build_ext
24
import importlib
25
26
27
28
29
30
31
32
33
import logging

# Configure logging with basic settings
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
    datefmt='%Y-%m-%d %H:%M:%S')

logger = logging.getLogger(__name__)
34
35
36
37
38
39

# Environment variables False/True
PYPI_BUILD = os.environ.get("PYPI_BUILD", "False").lower() == "true"
PACKAGE_NAME = "tilelang"
ROOT_DIR = os.path.dirname(__file__)

40
41
# Add LLVM control environment variable
USE_LLVM = os.environ.get("USE_LLVM", "False").lower() == "true"
42
43
# Add ROCM control environment variable
USE_ROCM = os.environ.get("USE_ROCM", "False").lower() == "true"
44
45
# Build with Debug mode
DEBUG_MODE = os.environ.get("DEBUG_MODE", "False").lower() == "true"
46
47
# Include commit ID in wheel filename and package metadata
WITH_COMMITID = os.environ.get("WITH_COMMITID", "True").lower() == "true"
48

49
50
51
52
53
54
55
56
57
58

def load_module_from_path(module_name, path):
    spec = importlib.util.spec_from_file_location(module_name, path)
    module = importlib.util.module_from_spec(spec)
    sys.modules[module_name] = module
    spec.loader.exec_module(module)
    return module


envs = load_module_from_path('env', os.path.join(ROOT_DIR, PACKAGE_NAME, 'env.py'))
59

60
CUDA_HOME = envs.CUDA_HOME
61
62
63
64
ROCM_HOME = envs.ROCM_HOME

# Check if both CUDA and ROCM are enabled
if USE_ROCM and not ROCM_HOME:
65
66
    raise ValueError(
        "ROCM support is enabled (USE_ROCM=True) but ROCM_HOME is not set or detected.")
67
68

if not USE_ROCM and not CUDA_HOME:
69
70
    raise ValueError(
        "CUDA support is enabled by default (USE_ROCM=False) but CUDA_HOME is not set or detected.")
71

72
73
# Ensure one of CUDA or ROCM is available
if not (CUDA_HOME or ROCM_HOME):
74
75
76
    raise ValueError(
        "Failed to automatically detect CUDA or ROCM installation. Please set the CUDA_HOME or ROCM_HOME environment variable manually (e.g., export CUDA_HOME=/usr/local/cuda or export ROCM_HOME=/opt/rocm)."
    )
77

78
79
80
81
# TileLang only supports Linux platform
assert sys.platform.startswith("linux"), "TileLang only supports Linux platform (including WSL)."


82
83
84
85
86
def _is_linux_like():
    return (sys.platform == "darwin" or sys.platform.startswith("linux") or
            sys.platform.startswith("freebsd"))


87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
def get_path(*filepath) -> str:
    return os.path.join(ROOT_DIR, *filepath)


def get_requirements(file_path: str = "requirements.txt") -> List[str]:
    """Get Python package dependencies from requirements.txt."""
    with open(get_path(file_path)) as f:
        requirements = f.read().strip().split("\n")
    return requirements


def find_version(version_file_path: str) -> str:
    """Extract version information from the given filepath.

    Adapted from https://github.com/ray-project/ray/blob/0b190ee1160eeca9796bc091e07eaebf4c85b511/python/setup.py
    """
    # Read and store the version information from the VERSION file
    # Use 'strip()' to remove any leading/trailing whitespace or newline characters
    if not os.path.exists(version_file_path):
        raise FileNotFoundError(f"Version file not found at {version_file_path}")
    with open(version_file_path, "r") as version_file:
        version = version_file.read().strip()
    return version


def get_nvcc_cuda_version():
    """Get the CUDA version from nvcc.

    Adapted from https://github.com/NVIDIA/apex/blob/8b7a1ff183741dd8f9b87e7bafd04cfde99cea28/setup.py
    """
117
118
    nvcc_path = os.path.join(CUDA_HOME, "bin", "nvcc")
    nvcc_output = subprocess.check_output([nvcc_path, "-V"], universal_newlines=True)
119
120
    output = nvcc_output.split()
    release_idx = output.index("release") + 1
121
    nvcc_cuda_version = Version(output[release_idx].split(",")[0])
122
123
124
    return nvcc_cuda_version


125
126
127
128
129
130
131
def get_rocm_version():
    """Get the ROCM version from rocminfo."""
    rocm_output = subprocess.check_output(["rocminfo"], universal_newlines=True)
    # Parse ROCM version from output
    # Example output: ROCM version: x.y.z-...
    match = re.search(r'ROCm Version: (\d+\.\d+\.\d+)', rocm_output)
    if match:
132
        return Version(match.group(1))
133
134
    else:
        rocm_path = os.environ.get("ROCM_PATH", "/opt/rocm")
135
136
        rocm_version_file = os.path.join(rocm_path, "lib", "cmake", "rocm",
                                         "rocm-config-version.cmake")
137
138
139
140
141
        if os.path.exists(rocm_version_file):
            with open(rocm_version_file, "r") as f:
                content = f.read()
                match = re.search(r'set\(PACKAGE_VERSION "(\d+\.\d+\.\d+)"', content)
                if match:
142
                    return Version(match.group(1))
143
    # return a default
144
    return Version("5.0.0")
145
146


147
def get_tilelang_version(with_cuda=True, with_system_info=True, with_commit_id=False) -> str:
148
149
150
151
    version = find_version(get_path(".", "VERSION"))
    local_version_parts = []
    if with_system_info:
        local_version_parts.append(get_system_info().replace("-", "."))
152

153
    if with_cuda:
154
155
156
157
158
159
160
161
162
163
164
        if USE_ROCM:
            if ROCM_HOME:
                rocm_version = str(get_rocm_version())
                rocm_version_str = rocm_version.replace(".", "")[:3]
                local_version_parts.append(f"rocm{rocm_version_str}")
        else:
            if CUDA_HOME:
                cuda_version = str(get_nvcc_cuda_version())
                cuda_version_str = cuda_version.replace(".", "")[:3]
                local_version_parts.append(f"cu{cuda_version_str}")

165
166
    if local_version_parts:
        version += f"+{'.'.join(local_version_parts)}"
167
168
169
170
171
172
173
174

    if with_commit_id:
        commit_id = None
        try:
            commit_id = subprocess.check_output(['git', 'rev-parse', 'HEAD'],
                                                stderr=subprocess.DEVNULL,
                                                encoding='utf-8').strip()
        except subprocess.SubprocessError as error:
175
            logger.warning(f"Ignore commit id because failed to get git commit id: {str(error)}")
176
        if commit_id:
177
178
179
180
181
182
            # Truncate commit ID to 8 characters to keep version string reasonable
            short_commit_id = commit_id[:8]
            if local_version_parts:
                version += f".{short_commit_id}"
            else:
                version += f"+{short_commit_id}"
183

184
185
186
    return version


187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
@functools.lru_cache(maxsize=None)
def get_cplus_compiler():
    """Return the path to the default C/C++ compiler.

    Returns
    -------
    out: Optional[str]
        The path to the default C/C++ compiler, or None if none was found.
    """

    if not _is_linux_like():
        return None

    env_cxx = os.environ.get("CXX") or os.environ.get("CC")
    if env_cxx:
        return env_cxx
    cc_names = ["g++", "clang++", "c++"]
    dirs_in_path = os.get_exec_path()
    for cc in cc_names:
        for d in dirs_in_path:
            cc_path = os.path.join(d, cc)
            if os.path.isfile(cc_path) and os.access(cc_path, os.X_OK):
                return cc_path
    return None


213
@functools.lru_cache(maxsize=None)
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
def get_cython_compiler() -> Optional[str]:
    """Return the path to the Cython compiler.

    Returns
    -------
    out: Optional[str]
        The path to the Cython compiler, or None if none was found.
    """

    cython_names = ["cython", "cython3"]

    # Check system PATH
    dirs_in_path = list(os.get_exec_path())

    # Add user site-packages bin directory
    user_base = site.getuserbase()
    if user_base:
        user_bin = os.path.join(user_base, "bin")
        if os.path.exists(user_bin):
            dirs_in_path = [user_bin] + dirs_in_path

    # If in a virtual environment, add its bin directory
    if sys.prefix != sys.base_prefix:
        venv_bin = os.path.join(sys.prefix, "bin")
        if os.path.exists(venv_bin):
            dirs_in_path = [venv_bin] + dirs_in_path

    for cython_name in cython_names:
        for d in dirs_in_path:
            cython_path = os.path.join(d, cython_name)
            if os.path.isfile(cython_path) and os.access(cython_path, os.X_OK):
                return cython_path
    return None


249
250
251
252
253
254
255
256
257
258
259
@functools.lru_cache(maxsize=None)
def get_cmake_path() -> str:
    """Return the path to the CMake compiler.
    """
    # found which cmake is used
    cmake_path = shutil.which("cmake")
    if not os.path.exists(cmake_path):
        raise Exception("CMake is not installed, please install it first.")
    return cmake_path


260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
def get_system_info():
    system = platform.system().lower()
    if system == "linux":
        try:
            with open("/etc/os-release") as f:
                os_release = f.read()
            version_id_match = re.search(r'VERSION_ID="(\d+\.\d+)"', os_release)
            if version_id_match:
                version_id = version_id_match.group(1)
                distro = "ubuntu"
                return f"{distro}-{version_id}"
        except FileNotFoundError:
            pass
    return system


def read_readme() -> str:
    """Read the README file if present."""
    p = get_path("README.md")
    if os.path.isfile(p):
        return io.open(get_path("README.md"), "r", encoding="utf-8").read()
    else:
        return ""


def download_and_extract_llvm(version, is_aarch64=False, extract_path="3rdparty"):
    """
    Downloads and extracts the specified version of LLVM for the given platform.
    Args:
        version (str): The version of LLVM to download.
        is_aarch64 (bool): True if the target platform is aarch64, False otherwise.
        extract_path (str): The directory path where the archive will be extracted.

    Returns:
        str: The path where the LLVM archive was extracted.
    """
    ubuntu_version = "16.04"
    if version >= "16.0.0":
        ubuntu_version = "20.04"
    elif version >= "13.0.0":
        ubuntu_version = "18.04"

    base_url = (f"https://github.com/llvm/llvm-project/releases/download/llvmorg-{version}")
    file_name = f"clang+llvm-{version}-{'aarch64-linux-gnu' if is_aarch64 else f'x86_64-linux-gnu-ubuntu-{ubuntu_version}'}.tar.xz"

    download_url = f"{base_url}/{file_name}"

    # Download the file
308
    logger.info(f"Downloading {file_name} from {download_url}")
309
310
311
312
313
314
315
316
317
318
319
320
    with urllib.request.urlopen(download_url) as response:
        if response.status != 200:
            raise Exception(f"Download failed with status code {response.status}")
        file_content = response.read()
    # Ensure the extract path exists
    os.makedirs(extract_path, exist_ok=True)

    # if the file already exists, remove it
    if os.path.exists(os.path.join(extract_path, file_name)):
        os.remove(os.path.join(extract_path, file_name))

    # Extract the file
321
    logger.info(f"Extracting {file_name} to {extract_path}")
322
323
324
    with tarfile.open(fileobj=BytesIO(file_content), mode="r:xz") as tar:
        tar.extractall(path=extract_path)

325
    logger.info("Download and extraction completed successfully.")
326
327
328
329
    return os.path.abspath(os.path.join(extract_path, file_name.replace(".tar.xz", "")))


package_data = {
330
    "tilelang": ["py.typed", "*pyx"],
331
332
333
334
335
336
337
338
}

LLVM_VERSION = "10.0.1"
IS_AARCH64 = False  # Set to True if on an aarch64 platform
EXTRACT_PATH = "3rdparty"  # Default extraction path


def update_submodules():
339
340
341
342
343
344
345
346
347
348
349
350
    """Updates git submodules if in a git repository."""

    def is_git_repo():
        try:
            # Check if current directory is a git repository
            subprocess.check_output(["git", "rev-parse", "--is-inside-work-tree"],
                                    stderr=subprocess.STDOUT)
            return True
        except (subprocess.CalledProcessError, FileNotFoundError):
            return False

    if not is_git_repo():
351
        logger.info("Info: Not a git repository, skipping submodule update.")
352
353
        return

354
355
356
357
358
359
360
361
362
363
364
365
366
367
    try:
        subprocess.check_call(["git", "submodule", "update", "--init", "--recursive"])
    except subprocess.CalledProcessError as error:
        raise RuntimeError("Failed to update submodules") from error


def setup_llvm_for_tvm():
    """Downloads and extracts LLVM, then configures TVM to use it."""
    # Assume the download_and_extract_llvm function and its dependencies are defined elsewhere in this script
    extract_path = download_and_extract_llvm(LLVM_VERSION, IS_AARCH64, EXTRACT_PATH)
    llvm_config_path = os.path.join(extract_path, "bin", "llvm-config")
    return extract_path, llvm_config_path


368
369
370
371
372
373
def patch_libs(libpath):
    """
    tvm and tilelang libs are copied from elsewhere into wheels
    and have a hard-coded rpath.
    Set rpath to the directory of libs so auditwheel works well.
    """
374
375
376
377
378
379
380
381
382
    # check if patchelf is installed
    # find patchelf in the system
    patchelf_path = shutil.which("patchelf")
    if not patchelf_path:
        logger.warning(
            "patchelf is not installed, which is required for auditwheel to work for compatible wheels."
        )
        return
    subprocess.run([patchelf_path, '--set-rpath', '$ORIGIN', libpath])
383
384


385
386
387
388
389
390
391
392
class TileLangBuilPydCommand(build_py):
    """Customized setuptools install command - builds TVM after setting up LLVM."""

    def run(self):
        build_py.run(self)
        self.run_command("build_ext")
        build_ext_cmd = self.get_finalized_command("build_ext")
        build_temp_dir = build_ext_cmd.build_temp
393
        ext_modules = build_ext_cmd.extensions
394
        for ext in ext_modules:
395
            extdir = build_ext_cmd.get_ext_fullpath(ext.name)
396
            logger.info(f"Extension {ext.name} output directory: {extdir}")
397
398

        ext_output_dir = os.path.dirname(extdir)
399
400
        logger.info(f"Extension output directory (parent): {ext_output_dir}")
        logger.info(f"Build temp directory: {build_temp_dir}")
401

402
403
404
        # copy cython files
        CYTHON_SRC = [
            "tilelang/jit/adapter/cython/cython_wrapper.pyx",
405
            "tilelang/jit/adapter/cython/.cycache",
406
407
408
409
410
411
        ]
        for item in CYTHON_SRC:
            source_dir = os.path.join(ROOT_DIR, item)
            target_dir = os.path.join(self.build_lib, item)
            if os.path.isdir(source_dir):
                self.mkpath(target_dir)
412
                self.copy_tree(source_dir, target_dir)
413
414
415
416
            else:
                target_dir = os.path.dirname(target_dir)
                if not os.path.exists(target_dir):
                    os.makedirs(target_dir)
417
                shutil.copy2(source_dir, target_dir)
418

419
        # copy the tl_templates
420
421
422
423
424
425
426
427
        TILELANG_SRC = [
            "src/tl_templates",
        ]
        for item in TILELANG_SRC:
            source_dir = os.path.join(ROOT_DIR, item)
            target_dir = os.path.join(self.build_lib, PACKAGE_NAME, item)
            if os.path.isdir(source_dir):
                self.mkpath(target_dir)
428
                self.copy_tree(source_dir, target_dir)
429
430
431
432
433
            else:
                target_dir = os.path.dirname(target_dir)
                if not os.path.exists(target_dir):
                    os.makedirs(target_dir)
                shutil.copy2(source_dir, target_dir)
434

435
        TVM_PREBUILD_ITEMS = [
436
437
438
439
            "libtvm_runtime.so",
            "libtvm.so",
            "libtilelang.so",
            "libtilelang_module.so",
440
        ]
441

442
443
444
445
446
447
448
        potential_dirs = [
            ext_output_dir,
            self.build_lib,
            build_temp_dir,
            os.path.join(ROOT_DIR, "build"),
        ]

449
        for item in TVM_PREBUILD_ITEMS:
450
451
452
453
454
455
456
457
            source_lib_file = None
            for dir in potential_dirs:
                candidate = os.path.join(dir, item)
                if os.path.exists(candidate):
                    source_lib_file = candidate
                    break

            if source_lib_file:
458
                patch_libs(source_lib_file)
459
460
461
462
463
                target_dir_release = os.path.join(self.build_lib, PACKAGE_NAME, "lib")
                target_dir_develop = os.path.join(PACKAGE_NAME, "lib")
                os.makedirs(target_dir_release, exist_ok=True)
                os.makedirs(target_dir_develop, exist_ok=True)
                shutil.copy2(source_lib_file, target_dir_release)
464
                logger.info(f"Copied {source_lib_file} to {target_dir_release}")
465
                shutil.copy2(source_lib_file, target_dir_develop)
466
                logger.info(f"Copied {source_lib_file} to {target_dir_develop}")
467
468
                os.remove(source_lib_file)
            else:
469
                logger.info(f"WARNING: {item} not found in any expected directories!")
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484

        TVM_CONFIG_ITEMS = [
            f"{build_temp_dir}/config.cmake",
        ]
        for item in TVM_CONFIG_ITEMS:
            source_dir = os.path.join(ROOT_DIR, item)
            # only copy the file
            file_name = os.path.basename(item)
            target_dir = os.path.join(self.build_lib, PACKAGE_NAME, file_name)
            target_dir = os.path.dirname(target_dir)
            if not os.path.exists(target_dir):
                os.makedirs(target_dir)
            if os.path.exists(source_dir):
                shutil.copy2(source_dir, target_dir)
            else:
485
                logger.info(f"INFO: {source_dir} does not exist.")
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504

        TVM_PACAKGE_ITEMS = [
            "3rdparty/tvm/src",
            "3rdparty/tvm/python",
            "3rdparty/tvm/licenses",
            "3rdparty/tvm/conftest.py",
            "3rdparty/tvm/CONTRIBUTORS.md",
            "3rdparty/tvm/KEYS",
            "3rdparty/tvm/LICENSE",
            "3rdparty/tvm/README.md",
            "3rdparty/tvm/mypy.ini",
            "3rdparty/tvm/pyproject.toml",
            "3rdparty/tvm/version.py",
        ]
        for item in TVM_PACAKGE_ITEMS:
            source_dir = os.path.join(ROOT_DIR, item)
            target_dir = os.path.join(self.build_lib, PACKAGE_NAME, item)
            if os.path.isdir(source_dir):
                self.mkpath(target_dir)
505
                self.copy_tree(source_dir, target_dir)
506
507
508
509
510
511
512
513
            else:
                target_dir = os.path.dirname(target_dir)
                if not os.path.exists(target_dir):
                    os.makedirs(target_dir)
                shutil.copy2(source_dir, target_dir)

        # Copy CUTLASS to the package directory
        CUTLASS_PREBUILD_ITEMS = [
Lei Wang's avatar
Lei Wang committed
514
515
            "3rdparty/cutlass/include",
            "3rdparty/cutlass/tools",
516
517
518
519
520
521
        ]
        for item in CUTLASS_PREBUILD_ITEMS:
            source_dir = os.path.join(ROOT_DIR, item)
            target_dir = os.path.join(self.build_lib, PACKAGE_NAME, item)
            if os.path.isdir(source_dir):
                self.mkpath(target_dir)
522
                self.copy_tree(source_dir, target_dir)
523
524
525
526
527
528
529
            else:
                target_dir = os.path.dirname(target_dir)
                if not os.path.exists(target_dir):
                    os.makedirs(target_dir)
                shutil.copy2(source_dir, target_dir)
        # copy compoable kernel to the package directory
        CK_PREBUILD_ITEMS = [
Lei Wang's avatar
Lei Wang committed
530
531
            "3rdparty/composable_kernel/include",
            "3rdparty/composable_kernel/library",
532
533
534
535
536
537
        ]
        for item in CK_PREBUILD_ITEMS:
            source_dir = os.path.join(ROOT_DIR, item)
            target_dir = os.path.join(self.build_lib, PACKAGE_NAME, item)
            if os.path.isdir(source_dir):
                self.mkpath(target_dir)
538
                self.copy_tree(source_dir, target_dir)
539
540
541
542
543
544
545
546
547
548
549
            else:
                target_dir = os.path.dirname(target_dir)
                if not os.path.exists(target_dir):
                    os.makedirs(target_dir)
                shutil.copy2(source_dir, target_dir)

        # copy compoable kernel to the package directory
        TL_CONFIG_ITEMS = ["CMakeLists.txt", "VERSION", "README.md", "LICENSE"]
        for item in TL_CONFIG_ITEMS:
            source_dir = os.path.join(ROOT_DIR, item)
            target_dir = os.path.join(self.build_lib, PACKAGE_NAME, item)
550
551
552
            # if is VERSION file, replace the content with the new version with commit id
            if not PYPI_BUILD and item == "VERSION":
                version = get_tilelang_version(
553
                    with_cuda=False, with_system_info=False, with_commit_id=WITH_COMMITID)
554
555
556
557
558
559
560
561
                target_dir = os.path.dirname(target_dir)
                if not os.path.exists(target_dir):
                    os.makedirs(target_dir)
                with open(os.path.join(target_dir, item), "w") as f:
                    print(f"Writing {version} to {os.path.join(target_dir, item)}")
                    f.write(version)
                continue

562
563
            if os.path.isdir(source_dir):
                self.mkpath(target_dir)
564
                self.copy_tree(source_dir, target_dir)
565
566
567
568
569
570
571
572
573
574
575
576
577
            else:
                target_dir = os.path.dirname(target_dir)
                if not os.path.exists(target_dir):
                    os.makedirs(target_dir)
                shutil.copy2(source_dir, target_dir)


class TileLangSdistCommand(sdist):
    """Customized setuptools sdist command - includes the pyproject.toml file."""

    def make_distribution(self):
        self.distribution.metadata.name = PACKAGE_NAME
        self.distribution.metadata.version = get_tilelang_version(
578
            with_cuda=False, with_system_info=False, with_commit_id=WITH_COMMITID)
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
        super().make_distribution()


class CMakeExtension(Extension):
    """
    A specialized setuptools Extension class for building a CMake project.

    :param name: Name of the extension module.
    :param sourcedir: Directory containing the top-level CMakeLists.txt.
    """

    def __init__(self, name, sourcedir=""):
        # We pass an empty 'sources' list because
        # the actual build is handled by CMake, not setuptools.
        super().__init__(name=name, sources=[])

        # Convert the source directory to an absolute path
        # so that CMake can correctly locate the CMakeLists.txt.
        self.sourcedir = os.path.abspath(sourcedir)


600
601
602
603
604
605
606
607
608
609
610
class CythonExtension(Extension):
    """
    A specialized setuptools Extension class for building a Cython project.
    """

    def __init__(self, name, sourcedir=""):
        super().__init__(name=name, sources=[])
        self.sourcedir = os.path.abspath(sourcedir)


class TilelangExtensionBuild(build_ext):
611
612
613
614
615
616
617
618
619
620
621
    """
    Custom build_ext command for CMake-based projects.

    This class overrides the 'run' method to ensure that CMake is available,
    and then iterates over all extensions defined as CMakeExtension,
    delegating the actual build logic to 'build_cmake'.
    """

    def run(self):
        # Check if CMake is installed and accessible by attempting to run 'cmake --version'.
        try:
622
623
624
625
            cmake_path = get_cmake_path()
            if not cmake_path:
                raise Exception("CMake is not installed, please install it first.")
            subprocess.check_output([cmake_path, "--version"])
626
        except OSError as error:
627
            # If CMake is not found, raise an error.
628
629
            raise RuntimeError(
                "CMake must be installed to build the following extensions") from error
630
631
632
633
634

        update_submodules()

        # Build each extension (of type CMakeExtension) using our custom method.
        for ext in self.extensions:
635
636
637
638
639
640
            if isinstance(ext, CythonExtension):
                self.build_cython(ext)
            elif isinstance(ext, CMakeExtension):
                self.build_cmake(ext)
            else:
                raise ValueError(f"Unsupported extension type: {type(ext)}")
641

642
643
644
645
646
647
648
649
650
651
652
653
654
        # To make it works with editable install,
        # we need to copy the lib*.so files to the tilelang/lib directory
        import glob
        files = glob.glob("*.so")
        if os.path.exists(PACKAGE_NAME):
            target_lib_dir = os.path.join(PACKAGE_NAME, "lib")
            for file in files:
                if not os.path.exists(target_lib_dir):
                    os.makedirs(target_lib_dir)
                shutil.copy(file, target_lib_dir)
                # remove the original file
                os.remove(file)

655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
    def build_cython(self, ext):
        """
        Build a single Cython-based extension.

        :param ext: The extension (an instance of CythonExtension).
        """
        cython_compiler = get_cython_compiler()
        if not cython_compiler:
            logger.info("Cython compiler not found, install it first")
            subprocess.check_call(["pip", "install", "cython"])
            cython_compiler = get_cython_compiler()
            if not cython_compiler:
                raise Exception("Cython is not installed, please install it first.")

        logger.info(f"Using Cython compiler: {cython_compiler}")
        cython_warpper_dir = os.path.join(ext.sourcedir, "tilelang", "jit", "adapter", "cython")
        cython_wrapper_path = os.path.join(cython_warpper_dir, "cython_wrapper.pyx")
        py_version = f"py{sys.version_info.major}{sys.version_info.minor}"
        cache_dir = Path(cython_warpper_dir) / ".cycache" / py_version
        os.makedirs(cache_dir, exist_ok=True)

        with open(cython_wrapper_path, "r") as f:
            cython_wrapper_code = f.read()
            source_path = cache_dir / "cython_wrapper.cpp"
            library_path = cache_dir / "cython_wrapper.so"
            md5_path = cache_dir / "md5.txt"
            code_hash = hashlib.sha256(cython_wrapper_code.encode()).hexdigest()
            cache_path = cache_dir / f"{code_hash}.so"
            lock_file = cache_path.with_suffix('.lock')

            # Check if cached version exists and is valid
            need_compile = True
            if md5_path.exists() and library_path.exists():
                with open(md5_path, "r") as f:
                    cached_hash = f.read().strip()
                    if cached_hash == code_hash:
691
                        logger.info("Cython JIT adapter is up to date, no need to compile...")
692
693
                        need_compile = False
                    else:
694
                        logger.info("Cython JIT adapter is out of date, need to recompile...")
695
            else:
696
                logger.info("No cached version found for Cython JIT adapter, need to compile...")
697
698

            if need_compile:
699
                logger.info("Waiting for lock to compile Cython JIT adapter...")
700
701
702
703
704
705
706
707
708
709
710
711
712
713
                with open(lock_file, 'w') as lock:
                    fcntl.flock(lock.fileno(), fcntl.LOCK_EX)
                    try:
                        # After acquiring the lock, check again if the file has been compiled by another process
                        if md5_path.exists() and library_path.exists():
                            with open(md5_path, "r") as f:
                                cached_hash = f.read().strip()
                                if cached_hash == code_hash:
                                    logger.info(
                                        "Another process has already compiled the file, using it..."
                                    )
                                    need_compile = False

                        if need_compile:
714
                            logger.info("Compiling Cython JIT adapter...")
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
                            temp_path = cache_dir / f"temp_{code_hash}.so"

                            with open(md5_path, "w") as f:
                                f.write(code_hash)

                            # compile the cython_wrapper.pyx file into .cpp
                            cython = get_cython_compiler()
                            if cython is None:
                                raise Exception("Cython is not installed, please install it first.")
                            os.system(f"{cython} {cython_wrapper_path} --cplus -o {source_path}")
                            python_include_path = sysconfig.get_path("include")
                            cc = get_cplus_compiler()
                            command = f"{cc} -shared -pthread -fPIC -fwrapv -O2 -Wall -fno-strict-aliasing -I{python_include_path} {source_path} -o {temp_path}"
                            os.system(command)

                            # rename the temp file to the library file
                            temp_path.rename(library_path)
                    except Exception as e:
                        if 'temp_path' in locals() and temp_path.exists():
                            temp_path.unlink()
735
                        raise Exception(f"Failed to compile Cython JIT adapter: {e}") from e
736
737
738
739
740
741
742
743
744
                    finally:
                        if lock_file.exists():
                            lock_file.unlink()

            # add the .so file to the sys.path
            cache_dir_str = str(cache_dir)
            if cache_dir_str not in sys.path:
                sys.path.append(cache_dir_str)

745
746
    def build_cmake(self, ext):
        """
747
        Build a single CMake-based extension by generating a CMake config and invoking CMake/Ninja.
748

749
750
751
752
        Generates or updates a config.cmake in the build directory (based on the extension's sourcedir),
        injecting LLVM/CUDA/ROCm and Python settings, then runs CMake to configure and build the target.
        When running an in-place build the resulting library is placed under ./tilelang/lib; otherwise the
        standard extension output directory is used.
753

754
755
756
        Parameters:
            ext: The CMakeExtension to build; its `sourcedir` should contain the TVM/CMake `config.cmake`
                 template under `3rdparty/tvm/cmake/`.
757

758
759
760
        Raises:
            subprocess.CalledProcessError: If the CMake configuration or build commands fail.
            OSError: If filesystem operations (read/write) fail.
761
        """
762
763
764
765
766
        # Only setup LLVM if it's enabled
        llvm_config_path = "OFF"
        if USE_LLVM:
            # Setup LLVM for TVM and retrieve the path to llvm-config
            _, llvm_config_path = setup_llvm_for_tvm()
767
768
769
770

        # Determine the directory where the final .so or .pyd library should go.
        extdir = os.path.abspath(os.path.dirname(self.get_ext_fullpath(ext.name)))

771
772
773
774
775
        # To make it compatible with in-place build and avoid redundant link during incremental build,
        # we need to change the build destination to tilelang/lib, where it's actually loaded
        if self.inplace:
            extdir = os.path.abspath('./tilelang/lib/')

776
777
778
779
        # Prepare arguments for the CMake configuration step.
        # -DCMAKE_LIBRARY_OUTPUT_DIRECTORY sets where built libraries go
        # -DPYTHON_EXECUTABLE ensures that the correct Python is used
        cmake_args = [
780
781
782
783
784
            f"-DCMAKE_LIBRARY_OUTPUT_DIRECTORY={extdir}",
            f"-DPython_EXECUTABLE={sys.executable}",
            f"-DCMAKE_BUILD_TYPE={'Debug' if DEBUG_MODE else 'Release'}",
            "-G",
            "Ninja",
785
        ]
786
787
        if not USE_ROCM:
            cmake_args.append(f"-DCMAKE_CUDA_COMPILER={os.path.join(CUDA_HOME, 'bin', 'nvcc')}")
788
789

        # Create the temporary build directory (if it doesn't exist).
790
791
792
793
        if self.inplace:
            build_temp = os.path.abspath('./build')
        else:
            build_temp = os.path.abspath(self.build_temp)
794
795
        os.makedirs(build_temp, exist_ok=True)

796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
        # Paths to the source and destination config.cmake files
        src_config = Path(ext.sourcedir) / "3rdparty" / "tvm" / "cmake" / "config.cmake"
        dst_config = Path(build_temp) / "config.cmake"

        # Read the default config template
        content_lines = src_config.read_text().splitlines()

        # Add common LLVM configuration
        content_lines.append(f"set(USE_LLVM {llvm_config_path})")

        # Append GPU backend configuration based on environment
        if USE_ROCM:
            content_lines += [
                f"set(USE_ROCM {ROCM_HOME})",
                "set(USE_CUDA OFF)",
            ]
        else:
            content_lines += [
                f"set(USE_CUDA {CUDA_HOME})",
                "set(USE_ROCM OFF)",
            ]

        # Create the final file content
        new_content = "\n".join(content_lines) + "\n"

        # Write the file only if it does not exist or has changed
        if not dst_config.exists() or dst_config.read_text() != new_content:
            dst_config.write_text(new_content)
            print(f"[Config] Updated: {dst_config}")
        else:
            print(f"[Config] No changes: {dst_config}")
827

828
        cmake_path = get_cmake_path()
829
        # Run CMake to configure the project with the given arguments.
830
831
832
833
834
835
836
        if not os.path.exists(os.path.join(build_temp, "build.ninja")):
            logger.info(
                f"[CMake] Generating build.ninja: {cmake_path} {ext.sourcedir} {' '.join(cmake_args)}"
            )
            subprocess.check_call([cmake_path, ext.sourcedir] + cmake_args, cwd=build_temp)
        else:
            logger.info(f"[CMake] build.ninja already exists in {build_temp}")
837

838
        num_jobs = max(1, int(multiprocessing.cpu_count() * 0.75))
839
840
841
842
843
844
845
846
        logger.info(
            f"[Build] Using {num_jobs} jobs | cmake: {cmake_path} (exists: {os.path.exists(cmake_path)}) | build dir: {build_temp}"
        )

        subprocess.check_call(
            [cmake_path, "--build", ".", "--config", "Release", "-j",
             str(num_jobs)],
            cwd=build_temp)
847
848
849
850


setup(
    name=PACKAGE_NAME,
851
852
    version=(get_tilelang_version(with_cuda=False, with_system_info=False, with_commit_id=False)
             if PYPI_BUILD else get_tilelang_version(with_commit_id=WITH_COMMITID)),
853
854
    packages=find_packages(where="."),
    package_dir={"": "."},
855
    author="Tile-AI",
856
857
858
859
    description="A tile level programming language to generate high performance code.",
    long_description=read_readme(),
    long_description_content_type="text/markdown",
    platforms=[
860
        "Environment :: GPU :: NVIDIA CUDA" if not USE_ROCM else "Environment :: GPU :: AMD ROCm",
861
862
863
864
        "Operating System :: POSIX :: Linux",
    ],
    license="MIT",
    keywords="BLAS, CUDA, HIP, Code Generation, TVM",
865
    url="https://github.com/tile-ai/tilelang",
866
867
868
869
870
871
872
873
874
875
876
    classifiers=[
        "Programming Language :: Python :: 3.8",
        "License :: OSI Approved :: MIT License",
        "Operating System :: OS Independent",
        "Intended Audience :: Developers",
        "Intended Audience :: Science/Research",
    ],
    python_requires=">=3.8",
    install_requires=get_requirements(),
    package_data=package_data,
    include_package_data=False,
877
878
879
880
    ext_modules=[
        CMakeExtension("TileLangCXX", sourcedir="."),
        CythonExtension("TileLangCython", sourcedir="."),
    ],
881
882
883
    cmdclass={
        "build_py": TileLangBuilPydCommand,
        "sdist": TileLangSdistCommand,
884
        "build_ext": TilelangExtensionBuild,
885
886
    },
)