"docs/source/features/builtin.rst" did not exist on "d6a9265d167147bb6cc91c99ad2b25f2e82b6354"
setup.py 1.28 KB
Newer Older
1
import setuptools
Rick Ho's avatar
Rick Ho committed
2
3
4
from torch.utils.cpp_extension import BuildExtension, CUDAExtension
import os

Rick Ho's avatar
Rick Ho committed
5

Rick Ho's avatar
Rick Ho committed
6
7
8
9
10
11
12
CUDA_HELPER = os.environ.get('CUDA_HELPER', '/usr/local/cuda/samples/common/inc')
cxx_flags = [
        '-I{}'.format(CUDA_HELPER)
        ]
if os.environ.get('USE_NCCL', '0') == '1':
    cxx_flags.append('-DMOE_USE_NCCL')

Rick Ho's avatar
Rick Ho committed
13

14
15
if __name__ == '__main__':
    setuptools.setup(
Rick Ho's avatar
Rick Ho committed
16
        name='fmoe',
Rick Ho's avatar
Rick Ho committed
17
18
19
20
21
22
        version='0.1.0',
        description='An efficient Mixture-of-Experts impl. for PyTorch',
        author='Jiaao He, Jiezhong Qiu and Aohan Zeng',
        author_email='hja20@mails.tsinghua.edu.cn',
        license='Apache-2',
        url='https://github.com/laekov/fastmoe',
Rick Ho's avatar
Rick Ho committed
23
        packages=['fmoe'],
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
        ext_modules=[
            CUDAExtension(
                name='fmoe_cuda', 
                sources=[
                    'cuda/moe.cpp',
                    'cuda/cuda_stream_manager.cpp',
                    'cuda/moe_compute_kernel.cu',
                    'cuda/moe_comm_kernel.cu',
                    'cuda/moe_fused_kernel.cu',
                    ],
                extra_compile_args={
                    'cxx': cxx_flags,
                    'nvcc': cxx_flags
                    }
                )
            ],
        cmdclass={
            'build_ext': BuildExtension
        })