setup.py 4.57 KB
Newer Older
quyuanhao123's avatar
quyuanhao123 committed
1
import glob
limm's avatar
limm committed
2
import os
quyuanhao123's avatar
quyuanhao123 committed
3
import os.path as osp
limm's avatar
limm committed
4
5
import platform
import sys
quyuanhao123's avatar
quyuanhao123 committed
6
7
8
from itertools import product

import torch
limm's avatar
limm committed
9
from setuptools import find_packages, setup
quyuanhao123's avatar
quyuanhao123 committed
10
from torch.__config__ import parallel_info
limm's avatar
limm committed
11
12
from torch.utils.cpp_extension import (CUDA_HOME, BuildExtension, CppExtension,
                                       CUDAExtension)
quyuanhao123's avatar
quyuanhao123 committed
13

limm's avatar
limm committed
14
15
16
17
18
19
20
__version__ = '2.1.0'
URL = 'https://github.com/rusty1s/pytorch_scatter'

WITH_CUDA = False
if torch.cuda.is_available():
    WITH_CUDA = CUDA_HOME is not None or torch.version.hip
suffices = ['cpu', 'cuda'] if WITH_CUDA else ['cpu']
quyuanhao123's avatar
quyuanhao123 committed
21
22
if os.getenv('FORCE_CUDA', '0') == '1':
    suffices = ['cuda', 'cpu']
limm's avatar
limm committed
23
24
if os.getenv('FORCE_ONLY_CUDA', '0') == '1':
    suffices = ['cuda']
quyuanhao123's avatar
quyuanhao123 committed
25
26
27
28
if os.getenv('FORCE_ONLY_CPU', '0') == '1':
    suffices = ['cpu']

BUILD_DOCS = os.getenv('BUILD_DOCS', '0') == '1'
limm's avatar
limm committed
29
WITH_SYMBOLS = os.getenv('WITH_SYMBOLS', '0') == '1'
quyuanhao123's avatar
quyuanhao123 committed
30
31
32
33
34
35
36


def get_extensions():
    extensions = []

    extensions_dir = osp.join('csrc')
    main_files = glob.glob(osp.join(extensions_dir, '*.cpp'))
limm's avatar
limm committed
37
38
    # remove generated 'hip' files, in case of rebuilds
    main_files = [path for path in main_files if 'hip' not in path]
quyuanhao123's avatar
quyuanhao123 committed
39
40

    for main, suffix in product(main_files, suffices):
limm's avatar
limm committed
41
42
43
44
45
46
47
48
49
50
        define_macros = [('WITH_PYTHON', None)]
        undef_macros = []

        if sys.platform == 'win32':
            define_macros += [('torchscatter_EXPORTS', None)]

        extra_compile_args = {'cxx': ['-O3']}
        if not os.name == 'nt':  # Not on Windows:
            extra_compile_args['cxx'] += ['-Wno-sign-compare']
        extra_link_args = [] if WITH_SYMBOLS else ['-s']
quyuanhao123's avatar
quyuanhao123 committed
51
52
53
54
55
56
57
58
59
60
61
62

        info = parallel_info()
        if ('backend: OpenMP' in info and 'OpenMP not found' not in info
                and sys.platform != 'darwin'):
            extra_compile_args['cxx'] += ['-DAT_PARALLEL_OPENMP']
            if sys.platform == 'win32':
                extra_compile_args['cxx'] += ['/openmp']
            else:
                extra_compile_args['cxx'] += ['-fopenmp']
        else:
            print('Compiling without OpenMP...')

limm's avatar
limm committed
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
        # Compile for mac arm64
        if (sys.platform == 'darwin' and platform.machine() == 'arm64'):
            extra_compile_args['cxx'] += ['-arch', 'arm64']
            extra_link_args += ['-arch', 'arm64']

        if suffix == 'cuda':
            define_macros += [('WITH_CUDA', None)]
            nvcc_flags = os.getenv('NVCC_FLAGS', '')
            nvcc_flags = [] if nvcc_flags == '' else nvcc_flags.split(' ')
            nvcc_flags += ['-O3']
            if torch.version.hip:
                # USE_ROCM was added to later versions of PyTorch.
                # Define here to support older PyTorch versions as well:
                define_macros += [('USE_ROCM', None)]
                undef_macros += ['__HIP_NO_HALF_CONVERSIONS__']
            else:
                nvcc_flags += ['--expt-relaxed-constexpr']
            extra_compile_args['nvcc'] = nvcc_flags
quyuanhao123's avatar
quyuanhao123 committed
81
82
83
84
85
86
87
88

        name = main.split(os.sep)[-1][:-4]
        sources = [main]

        path = osp.join(extensions_dir, 'cpu', f'{name}_cpu.cpp')
        if osp.exists(path):
            sources += [path]

limm's avatar
limm committed
89
90
        path = osp.join(extensions_dir, 'cuda', f'{name}_cuda.cu')
        if suffix == 'cuda' and osp.exists(path):
quyuanhao123's avatar
quyuanhao123 committed
91
92
93
94
95
96
            sources += [path]

        Extension = CppExtension if suffix == 'cpu' else CUDAExtension
        extension = Extension(
            f'torch_scatter._{name}_{suffix}',
            sources,
limm's avatar
limm committed
97
            include_dirs=[extensions_dir],
quyuanhao123's avatar
quyuanhao123 committed
98
            define_macros=define_macros,
limm's avatar
limm committed
99
            undef_macros=undef_macros,
quyuanhao123's avatar
quyuanhao123 committed
100
101
102
103
104
105
106
107
108
            extra_compile_args=extra_compile_args,
            extra_link_args=extra_link_args,
        )
        extensions += [extension]

    return extensions


install_requires = []
limm's avatar
limm committed
109
110
111
112
113
114
115
116
117
118

test_requires = [
    'pytest',
    'pytest-cov',
]

# work-around hipify abs paths
include_package_data = True
if torch.cuda.is_available() and torch.version.hip:
    include_package_data = False
quyuanhao123's avatar
quyuanhao123 committed
119
120
121

setup(
    name='torch_scatter',
limm's avatar
limm committed
122
123
    version=__version__,
    description='PyTorch Extension Library of Optimized Scatter Operations',
quyuanhao123's avatar
quyuanhao123 committed
124
125
    author='Matthias Fey',
    author_email='matthias.fey@tu-dortmund.de',
limm's avatar
limm committed
126
127
    url=URL,
    download_url=f'{URL}/archive/{__version__}.tar.gz',
quyuanhao123's avatar
quyuanhao123 committed
128
    keywords=['pytorch', 'scatter', 'segment', 'gather'],
limm's avatar
limm committed
129
    python_requires='>=3.7',
quyuanhao123's avatar
quyuanhao123 committed
130
    install_requires=install_requires,
limm's avatar
limm committed
131
132
133
    extras_require={
        'test': test_requires,
    },
quyuanhao123's avatar
quyuanhao123 committed
134
135
136
137
138
139
    ext_modules=get_extensions() if not BUILD_DOCS else [],
    cmdclass={
        'build_ext':
        BuildExtension.with_options(no_python_abi_suffix=True, use_ninja=False)
    },
    packages=find_packages(),
limm's avatar
limm committed
140
    include_package_data=include_package_data,
quyuanhao123's avatar
quyuanhao123 committed
141
)