"torchvision/transforms/v2/functional/_deprecated.py" did not exist on "b4b246a52ebbe29bff9099c6fbabff0b82d7edd3"
setup.py 5.4 KB
Newer Older
quyuanhao123's avatar
quyuanhao123 committed
1
2
3
4
5
6
7
8
9
10
import glob
import os
import os.path as osp
import platform
import sys
from itertools import product

import torch
from setuptools import find_packages, setup
from torch.__config__ import parallel_info
limm's avatar
limm committed
11
12
13
14
15
16
from torch.utils.cpp_extension import (
    CUDA_HOME,
    BuildExtension,
    CppExtension,
    CUDAExtension,
)
quyuanhao123's avatar
quyuanhao123 committed
17

limm's avatar
limm committed
18
__version__ = '0.6.16'
quyuanhao123's avatar
quyuanhao123 committed
19
20
URL = 'https://github.com/rusty1s/pytorch_sparse'

limm's avatar
limm committed
21
22
23
WITH_CUDA = False
if torch.cuda.is_available():
    WITH_CUDA = CUDA_HOME is not None or torch.version.hip
limm's avatar
limm committed
24
suffices = ['cpu', 'cuda'] if WITH_CUDA else ['cpu']
quyuanhao123's avatar
quyuanhao123 committed
25
26
if os.getenv('FORCE_CUDA', '0') == '1':
    suffices = ['cuda', 'cpu']
limm's avatar
limm committed
27
28
if os.getenv('FORCE_ONLY_CUDA', '0') == '1':
    suffices = ['cuda']
quyuanhao123's avatar
quyuanhao123 committed
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
if os.getenv('FORCE_ONLY_CPU', '0') == '1':
    suffices = ['cpu']

BUILD_DOCS = os.getenv('BUILD_DOCS', '0') == '1'

WITH_METIS = True if os.getenv('WITH_METIS', '0') == '1' else False
WITH_MTMETIS = True if os.getenv('WITH_MTMETIS', '0') == '1' else False

WITH_SYMBOLS = True if os.getenv('WITH_SYMBOLS', '0') == '1' else False


def get_extensions():
    extensions = []

    extensions_dir = osp.join('csrc')
    main_files = glob.glob(osp.join(extensions_dir, '*.cpp'))
limm's avatar
limm committed
45
46
    # remove generated 'hip' files, in case of rebuilds
    main_files = [path for path in main_files if 'hip' not in path]
quyuanhao123's avatar
quyuanhao123 committed
47
48
49

    for main, suffix in product(main_files, suffices):
        define_macros = [('WITH_PYTHON', None)]
limm's avatar
limm committed
50
        undef_macros = []
quyuanhao123's avatar
quyuanhao123 committed
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66

        if sys.platform == 'win32':
            define_macros += [('torchsparse_EXPORTS', None)]

        libraries = []
        if WITH_METIS:
            define_macros += [('WITH_METIS', None)]
            libraries += ['metis']
        if WITH_MTMETIS:
            define_macros += [('WITH_MTMETIS', None)]
            define_macros += [('MTMETIS_64BIT_VERTICES', None)]
            define_macros += [('MTMETIS_64BIT_EDGES', None)]
            define_macros += [('MTMETIS_64BIT_WEIGHTS', None)]
            define_macros += [('MTMETIS_64BIT_PARTITIONS', None)]
            libraries += ['mtmetis', 'wildriver']

limm's avatar
limm committed
67
        extra_compile_args = {'cxx': ['-O3']}
quyuanhao123's avatar
quyuanhao123 committed
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
        if not os.name == 'nt':  # Not on Windows:
            extra_compile_args['cxx'] += ['-Wno-sign-compare']
        extra_link_args = [] if WITH_SYMBOLS else ['-s']

        info = parallel_info()
        if ('backend: OpenMP' in info and 'OpenMP not found' not in info
                and sys.platform != 'darwin'):
            extra_compile_args['cxx'] += ['-DAT_PARALLEL_OPENMP']
            if sys.platform == 'win32':
                extra_compile_args['cxx'] += ['/openmp']
            else:
                extra_compile_args['cxx'] += ['-fopenmp']
        else:
            print('Compiling without OpenMP...')

        # Compile for mac arm64
        if (sys.platform == 'darwin' and platform.machine() == 'arm64'):
            extra_compile_args['cxx'] += ['-arch', 'arm64']
            extra_link_args += ['-arch', 'arm64']

limm's avatar
limm committed
88
89
90
91
        if suffix == 'cuda':
            define_macros += [('WITH_CUDA', None)]
            nvcc_flags = os.getenv('NVCC_FLAGS', '')
            nvcc_flags = [] if nvcc_flags == '' else nvcc_flags.split(' ')
limm's avatar
limm committed
92
93
94
95
96
97
            nvcc_flags += ['-O3']
            if torch.version.hip:
                # USE_ROCM was added to later versions of PyTorch
                # Define here to support older PyTorch versions as well:
                define_macros += [('USE_ROCM', None)]
                undef_macros += ['__HIP_NO_HALF_CONVERSIONS__']
quyuanhao123's avatar
quyuanhao123 committed
98
            else:
limm's avatar
limm committed
99
100
                nvcc_flags += ['--expt-relaxed-constexpr']
            extra_compile_args['nvcc'] = nvcc_flags
quyuanhao123's avatar
quyuanhao123 committed
101
102
103
104
105
106
107
108

        name = main.split(os.sep)[-1][:-4]
        sources = [main]

        path = osp.join(extensions_dir, 'cpu', f'{name}_cpu.cpp')
        if osp.exists(path):
            sources += [path]

limm's avatar
limm committed
109
110
        path = osp.join(extensions_dir, 'cuda', f'{name}_cuda.cu')
        if suffix == 'cuda' and osp.exists(path):
quyuanhao123's avatar
quyuanhao123 committed
111
112
            sources += [path]

limm's avatar
limm committed
113
114
        phmap_dir = "third_party/parallel-hashmap"

quyuanhao123's avatar
quyuanhao123 committed
115
116
117
118
        Extension = CppExtension if suffix == 'cpu' else CUDAExtension
        extension = Extension(
            f'torch_sparse._{name}_{suffix}',
            sources,
limm's avatar
limm committed
119
            include_dirs=[extensions_dir, phmap_dir],
quyuanhao123's avatar
quyuanhao123 committed
120
            define_macros=define_macros,
limm's avatar
limm committed
121
            undef_macros=undef_macros,
quyuanhao123's avatar
quyuanhao123 committed
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
            extra_compile_args=extra_compile_args,
            extra_link_args=extra_link_args,
            libraries=libraries,
        )
        extensions += [extension]

    return extensions


install_requires = [
    'scipy',
]

test_requires = [
    'pytest',
    'pytest-cov',
]

limm's avatar
limm committed
140
141
142
143
144
# work-around hipify abs paths
include_package_data = True
if torch.cuda.is_available() and torch.version.hip:
    include_package_data = False

quyuanhao123's avatar
quyuanhao123 committed
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
setup(
    name='torch_sparse',
    version=__version__,
    description=('PyTorch Extension Library of Optimized Autograd Sparse '
                 'Matrix Operations'),
    author='Matthias Fey',
    author_email='matthias.fey@tu-dortmund.de',
    url=URL,
    download_url=f'{URL}/archive/{__version__}.tar.gz',
    keywords=[
        'pytorch',
        'sparse',
        'sparse-matrices',
        'autograd',
    ],
    python_requires='>=3.7',
    install_requires=install_requires,
    extras_require={
        'test': test_requires,
    },
    ext_modules=get_extensions() if not BUILD_DOCS else [],
    cmdclass={
        'build_ext':
        BuildExtension.with_options(no_python_abi_suffix=True, use_ninja=False)
    },
    packages=find_packages(),
limm's avatar
limm committed
171
    include_package_data=include_package_data,
quyuanhao123's avatar
quyuanhao123 committed
172
)