builder.py 6.71 KB
Newer Older
1
import importlib
2
import os
3
4
import time
from abc import ABC, abstractmethod
5
6
7
8
from pathlib import Path
from typing import List


9
10
11
class Builder(ABC):
    """
    Builder is the base class to build extensions for PyTorch.
12

13
14
15
    Args:
        name (str): the name of the kernel to be built
        prebuilt_import_path (str): the path where the extension is installed during pip install
16
    """
17

18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
    def __init__(self, name: str, prebuilt_import_path: str):
        self.name = name
        self.prebuilt_import_path = prebuilt_import_path
        self.version_dependent_macros = ['-DVERSION_GE_1_1', '-DVERSION_GE_1_3', '-DVERSION_GE_1_5']

        assert prebuilt_import_path.startswith('colossalai._C'), \
            f'The prebuilt_import_path should start with colossalai._C, but got {self.prebuilt_import_path}'

    def relative_to_abs_path(self, code_path: str) -> str:
        """
        This function takes in a path relative to the colossalai root directory and return the absolute path.
        """
        op_builder_module_path = Path(__file__).parent

        # if we install from source
        # the current file path will be op_builder/builder.py
        # if we install via pip install colossalai
        # the current file path will be colossalai/kernel/op_builder/builder.py
        # this is because that the op_builder inside colossalai is a symlink
        # this symlink will be replaced with actual files if we install via pypi
        # thus we cannot tell the colossalai root directory by checking whether the op_builder
        # is a symlink, we can only tell whether it is inside or outside colossalai
        if str(op_builder_module_path).endswith('colossalai/kernel/op_builder'):
            root_path = op_builder_module_path.parent.parent
42
        else:
43
44
45
46
            root_path = op_builder_module_path.parent.joinpath('colossalai')

        code_abs_path = root_path.joinpath(code_path)
        return str(code_abs_path)
47
48
49
50
51
52
53
54
55
56
57

    def get_cuda_home_include(self):
        """
        return include path inside the cuda home.
        """
        from torch.utils.cpp_extension import CUDA_HOME
        if CUDA_HOME is None:
            raise RuntimeError("CUDA_HOME is None, please set CUDA_HOME to compile C++/CUDA kernels in ColossalAI.")
        cuda_include = os.path.join(CUDA_HOME, "include")
        return cuda_include

58
59
60
    def csrc_abs_path(self, path):
        return os.path.join(self.relative_to_abs_path('kernel/cuda_native/csrc'), path)

61
    # functions must be overrided begin
62
63
64
65
66
    @abstractmethod
    def sources_files(self) -> List[str]:
        """
        This function should return a list of source files for extensions.
        """
67
68
        raise NotImplementedError

69
70
71
72
73
74
    @abstractmethod
    def include_dirs(self) -> List[str]:
        """
        This function should return a list of inlcude files for extensions.
        """
        pass
75

76
77
78
79
80
81
    @abstractmethod
    def cxx_flags(self) -> List[str]:
        """
        This function should return a list of cxx compilation flags for extensions.
        """
        pass
82

83
84
85
86
87
88
    @abstractmethod
    def nvcc_flags(self) -> List[str]:
        """
        This function should return a list of nvcc compilation flags for extensions.
        """
        pass
89
90
91
92
93
94
95
96

    # functions must be overrided over
    def strip_empty_entries(self, args):
        '''
        Drop any empty strings from the list of compile and link flags
        '''
        return [x for x in args if len(x) > 0]

97
98
99
100
101
102
    def import_op(self):
        """
        This function will import the op module by its string name.
        """
        return importlib.import_module(self.prebuilt_import_path)

103
104
    def load(self, verbose=True):
        """
105
106
107
        load the kernel during runtime. If the kernel is not built during pip install, it will build the kernel.
        If the kernel is built during runtime, it will be stored in `~/.cache/colossalai/torch_extensions/`. If the
        kernel is built during pip install, it can be accessed through `colossalai._C`.
108

109
        Warning: do not load this kernel repeatedly during model execution as it could slow down the training process.
110
111
112
113
114
115
116

        Args:
            verbose (bool, optional): show detailed info. Defaults to True.
        """
        from torch.utils.cpp_extension import load
        start_build = time.time()

117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
        try:
            op_module = self.import_op()
            if verbose:
                print(f"OP {self.prebuilt_import_path} already exists, skip building.")
        except ImportError:
            # construct the build directory
            import torch
            torch_version_major = torch.__version__.split('.')[0]
            torch_version_minor = torch.__version__.split('.')[1]
            torch_cuda_version = torch.version.cuda
            home_directory = os.path.expanduser('~')
            extension_directory = f".cache/colossalai/torch_extensions/torch{torch_version_major}.{torch_version_minor}_cu{torch_cuda_version}"
            build_directory = os.path.join(home_directory, extension_directory)
            Path(build_directory).mkdir(parents=True, exist_ok=True)

            if verbose:
                print("=========================================================================================")
                print(f"No pre-built kernel is found, build and load the {self.name} kernel during runtime now")
                print("=========================================================================================")

            # load the kernel
            op_module = load(name=self.name,
                             sources=self.strip_empty_entries(self.sources_files()),
                             extra_include_paths=self.strip_empty_entries(self.include_dirs()),
                             extra_cflags=self.cxx_flags(),
                             extra_cuda_cflags=self.nvcc_flags(),
                             extra_ldflags=[],
                             build_directory=build_directory,
                             verbose=verbose)
146
147
148
149
150
151
152

        build_duration = time.time() - start_build
        if verbose:
            print(f"Time to load {self.name} op: {build_duration} seconds")

        return op_module

153
    def builder(self) -> 'CUDAExtension':
154
155
156
157
158
        """
        get a CUDAExtension instance used for setup.py
        """
        from torch.utils.cpp_extension import CUDAExtension

159
160
161
162
163
164
165
        return CUDAExtension(name=self.prebuilt_import_path,
                             sources=self.strip_empty_entries(self.sources_files()),
                             include_dirs=self.strip_empty_entries(self.include_dirs()),
                             extra_compile_args={
                                 'cxx': self.strip_empty_entries(self.cxx_flags()),
                                 'nvcc': self.strip_empty_entries(self.nvcc_flags())
                             })