setup.py 2.96 KB
Newer Older
rusty1s's avatar
rusty1s committed
1
import os
rusty1s's avatar
rusty1s committed
2
import os.path as osp
rusty1s's avatar
rusty1s committed
3
4
import sys
import glob
rusty1s's avatar
rusty1s committed
5
from setuptools import setup, find_packages
rusty1s's avatar
rusty1s committed
6

rusty1s's avatar
rusty1s committed
7
import torch
rusty1s's avatar
rusty1s committed
8
from torch.utils.cpp_extension import BuildExtension
rusty1s's avatar
rusty1s committed
9
from torch.utils.cpp_extension import CppExtension, CUDAExtension, CUDA_HOME
rusty1s's avatar
rusty1s committed
10

rusty1s's avatar
rusty1s committed
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
WITH_CUDA = torch.cuda.is_available() and CUDA_HOME is not None
if os.getenv('FORCE_CUDA', '0') == '1':
    WITH_CUDA = True
if os.getenv('FORCE_NON_CUDA', '0') == '1':
    WITH_CUDA = False

BUILD_DOCS = os.getenv('BUILD_DOCS', '0') == '1'


def get_extensions():
    Extension = CppExtension
    define_macros = []
    extra_compile_args = {'cxx': [], 'nvcc': []}
    extra_link_args = []

    # Windows users: Edit both of these to contain your VS include path, i.e.:
    # extra_compile_args['cxx'] += ['-I{VISUAL_STUDIO_DIR}\\include']
    # extra_compile_args['nvcc'] += ['-I{VISUAL_STUDIO_DIR}\\include']

    if WITH_CUDA:
        Extension = CUDAExtension
        define_macros += [('WITH_CUDA', None)]
        nvcc_flags = os.getenv('NVCC_FLAGS', '')
        nvcc_flags = [] if nvcc_flags == '' else nvcc_flags.split(' ')
        nvcc_flags += ['-arch=sm_35', '--expt-relaxed-constexpr']
        extra_compile_args['cxx'] += ['-O0']
        extra_compile_args['nvcc'] += nvcc_flags
        if sys.platform == 'win32':

            extra_link_args = ['cusparse.lib']
        else:
            extra_link_args = ['-lcusparse', '-l', 'cusparse']

    if sys.platform == 'win32':
        extra_compile_args['cxx'] += ['/MP']

    extensions_dir = osp.join(osp.dirname(osp.abspath(__file__)), 'csrc')
    main_files = glob.glob(osp.join(extensions_dir, '*.cpp'))
    extensions = []
    for main in main_files:
        name = main.split(os.sep)[-1][:-4]

        sources = [main, osp.join(extensions_dir, 'cpu', f'{name}_cpu.cpp')]
        if WITH_CUDA:
            sources += [osp.join(extensions_dir, 'cuda', f'{name}_cuda.cu')]

        extension = Extension(
            f'torch_sparse._{name}',
            sources,
            include_dirs=[extensions_dir],
            define_macros=define_macros,
            extra_compile_args=extra_compile_args,
63
            extra_link_args=extra_link_args,
rusty1s's avatar
rusty1s committed
64
65
66
67
68
        )
        extensions += [extension]

    return extensions

rusty1s's avatar
rusty1s committed
69

rusty1s's avatar
rusty1s committed
70
__version__ = '1.0.0'
rusty1s's avatar
rusty1s committed
71
72
73
74

install_requires = ['scipy']
setup_requires = ['pytest-runner']
tests_require = ['pytest', 'pytest-cov']
rusty1s's avatar
rusty1s committed
75
76
77

setup(
    name='torch_sparse',
rusty1s's avatar
rusty1s committed
78
    version='1.0.0',
rusty1s's avatar
rusty1s committed
79
80
    author='Matthias Fey',
    author_email='matthias.fey@tu-dortmund.de',
rusty1s's avatar
rusty1s committed
81
82
83
    url='https://github.com/rusty1s/pytorch_sparse',
    description=('PyTorch Extension Library of Optimized Autograd Sparse '
                 'Matrix Operations'),
rusty1s's avatar
rusty1s committed
84
    keywords=['pytorch', 'sparse', 'sparse-matrices', 'autograd'],
rusty1s's avatar
rusty1s committed
85
    license='MIT',
rusty1s's avatar
rusty1s committed
86
87
88
    install_requires=install_requires,
    setup_requires=setup_requires,
    tests_require=tests_require,
rusty1s's avatar
rusty1s committed
89
90
91
92
    ext_modules=get_extensions() if not BUILD_DOCS else [],
    cmdclass={
        'build_ext': BuildExtension.with_options(no_python_abi_suffix=True)
    },
93
94
    packages=find_packages(),
)