setup.py 9.78 KB
Newer Older
1
2
#!/usr/bin/env python
# -*- coding: utf-8 -*-
Kai Chen's avatar
Kai Chen committed
3
import os
4
import platform
Kai Chen's avatar
Kai Chen committed
5
6
import subprocess
import time
Kai Chen's avatar
Kai Chen committed
7
from setuptools import Extension, dist, find_packages, setup
8

9
import torch
10
from torch.utils.cpp_extension import BuildExtension, CUDAExtension
Kai Chen's avatar
Kai Chen committed
11

Kai Chen's avatar
Kai Chen committed
12
dist.Distribution().fetch_build_eggs(['Cython', 'numpy>=1.11.1'])
Kai Chen's avatar
Kai Chen committed
13
14
import numpy as np  # noqa: E402, isort:skip
from Cython.Build import cythonize  # noqa: E402, isort:skip
Kai Chen's avatar
Kai Chen committed
15

Kai Chen's avatar
Kai Chen committed
16
17

def readme():
Kai Chen's avatar
Kai Chen committed
18
    with open('README.md', encoding='utf-8') as f:
Kai Chen's avatar
Kai Chen committed
19
20
21
22
        content = f.read()
    return content


Kai Chen's avatar
Kai Chen committed
23
24
25
MAJOR = 1
MINOR = 0
PATCH = ''
Kai Chen's avatar
Kai Chen committed
26
27
28
29
30
SUFFIX = 'rc1'
if PATCH:
    SHORT_VERSION = '{}.{}.{}{}'.format(MAJOR, MINOR, PATCH, SUFFIX)
else:
    SHORT_VERSION = '{}.{}{}'.format(MAJOR, MINOR, SUFFIX)
Kai Chen's avatar
Kai Chen committed
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76

version_file = 'mmdet/version.py'


def get_git_hash():

    def _minimal_ext_cmd(cmd):
        # construct minimal environment
        env = {}
        for k in ['SYSTEMROOT', 'PATH', 'HOME']:
            v = os.environ.get(k)
            if v is not None:
                env[k] = v
        # LANGUAGE is used on win32
        env['LANGUAGE'] = 'C'
        env['LANG'] = 'C'
        env['LC_ALL'] = 'C'
        out = subprocess.Popen(
            cmd, stdout=subprocess.PIPE, env=env).communicate()[0]
        return out

    try:
        out = _minimal_ext_cmd(['git', 'rev-parse', 'HEAD'])
        sha = out.strip().decode('ascii')
    except OSError:
        sha = 'unknown'

    return sha


def get_hash():
    if os.path.exists('.git'):
        sha = get_git_hash()[:7]
    elif os.path.exists(version_file):
        try:
            from mmdet.version import __version__
            sha = __version__.split('+')[-1]
        except ImportError:
            raise ImportError('Unable to get git version')
    else:
        sha = 'unknown'

    return sha


def write_version_py():
Kai Chen's avatar
Kai Chen committed
77
    content = """# GENERATED VERSION FILE
Kai Chen's avatar
Kai Chen committed
78
79
80
81
82
83
84
85
86
87
88
89
# TIME: {}

__version__ = '{}'
short_version = '{}'
"""
    sha = get_hash()
    VERSION = SHORT_VERSION + '+' + sha

    with open(version_file, 'w') as f:
        f.write(content.format(time.asctime(), VERSION, SHORT_VERSION))


Kai Chen's avatar
Kai Chen committed
90
91
92
93
94
95
def get_version():
    with open(version_file, 'r') as f:
        exec(compile(f.read(), version_file, 'exec'))
    return locals()['__version__']


96
97
def make_cuda_ext(name, module, sources):

98
99
100
101
102
103
104
    define_macros = []

    if torch.cuda.is_available() or os.getenv('FORCE_CUDA', '0') == '1':
        define_macros += [("WITH_CUDA", None)]
    else:
        raise EnvironmentError('CUDA is required to compile MMDetection!')

105
106
    return CUDAExtension(
        name='{}.{}'.format(module, name),
107
        sources=[os.path.join(*module.split('.'), p) for p in sources],
108
        define_macros=define_macros,
109
110
111
112
113
114
115
116
        extra_compile_args={
            'cxx': [],
            'nvcc': [
                '-D__CUDA_NO_HALF_OPERATORS__',
                '-D__CUDA_NO_HALF_CONVERSIONS__',
                '-D__CUDA_NO_HALF2_OPERATORS__',
            ]
        })
117
118
119


def make_cython_ext(name, module, sources):
120
121
122
123
124
125
    extra_compile_args = None
    if platform.system() != 'Windows':
        extra_compile_args = {
            'cxx': ['-Wno-unused-function', '-Wno-write-strings']
        }

126
127
128
129
130
    extension = Extension(
        '{}.{}'.format(module, name),
        [os.path.join(*module.split('.'), p) for p in sources],
        include_dirs=[np.get_include()],
        language='c++',
131
        extra_compile_args=extra_compile_args)
132
133
134
135
    extension, = cythonize(extension)
    return extension


136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
def parse_requirements(fname='requirements.txt', with_version=True):
    """
    Parse the package dependencies listed in a requirements file but strips
    specific versioning information.

    Args:
        fname (str): path to requirements file
        with_version (bool, default=False): if True include version specs

    Returns:
        List[str]: list of requirements items

    CommandLine:
        python -c "import setup; print(setup.parse_requirements())"
    """
    import sys
    from os.path import exists
    import re
    require_fpath = fname

    def parse_line(line):
        """
        Parse information from a line in a requirements text file
        """
        if line.startswith('-r '):
            # Allow specifying requirements in other files
            target = line.split(' ')[1]
            for info in parse_require_file(target):
                yield info
        else:
            info = {'line': line}
            if line.startswith('-e '):
                info['package'] = line.split('#egg=')[1]
            else:
                # Remove versioning from the package
                pat = '(' + '|'.join(['>=', '==', '>']) + ')'
                parts = re.split(pat, line, maxsplit=1)
                parts = [p.strip() for p in parts]

                info['package'] = parts[0]
                if len(parts) > 1:
                    op, rest = parts[1:]
                    if ';' in rest:
                        # Handle platform specific dependencies
                        # http://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-platform-specific-dependencies
                        version, platform_deps = map(str.strip,
                                                     rest.split(';'))
                        info['platform_deps'] = platform_deps
                    else:
                        version = rest  # NOQA
                    info['version'] = (op, version)
            yield info

    def parse_require_file(fpath):
        with open(fpath, 'r') as f:
            for line in f.readlines():
                line = line.strip()
                if line and not line.startswith('#'):
                    for info in parse_line(line):
                        yield info

    def gen_packages_items():
        if exists(require_fpath):
            for info in parse_require_file(require_fpath):
                parts = [info['package']]
                if with_version and 'version' in info:
                    parts.extend(info['version'])
                if not sys.version.startswith('3.4'):
                    # apparently package_deps are broken in 3.4
                    platform_deps = info.get('platform_deps')
                    if platform_deps is not None:
                        parts.append(';' + platform_deps)
                item = ''.join(parts)
                yield item

    packages = list(gen_packages_items())
    return packages
213
214


Kai Chen's avatar
Kai Chen committed
215
216
217
218
219
if __name__ == '__main__':
    write_version_py()
    setup(
        name='mmdet',
        version=get_version(),
220
        description='Open MMLab Detection Toolbox and Benchmark',
Kai Chen's avatar
Kai Chen committed
221
        long_description=readme(),
Jirka Borovec's avatar
Jirka Borovec committed
222
223
        author='OpenMMLab',
        author_email='chenkaidev@gmail.com',
Kai Chen's avatar
Kai Chen committed
224
        keywords='computer vision, object detection',
Kai Chen's avatar
Kai Chen committed
225
        url='https://github.com/open-mmlab/mmdetection',
Kai Chen's avatar
Kai Chen committed
226
        packages=find_packages(exclude=('configs', 'tools', 'demo')),
Kai Chen's avatar
Kai Chen committed
227
        package_data={'mmdet.ops': ['*/*.so']},
Kai Chen's avatar
Kai Chen committed
228
229
        classifiers=[
            'Development Status :: 4 - Beta',
Kai Chen's avatar
Kai Chen committed
230
            'License :: OSI Approved :: Apache Software License',
Kai Chen's avatar
Kai Chen committed
231
232
233
234
            'Operating System :: OS Independent',
            'Programming Language :: Python :: 3',
            'Programming Language :: Python :: 3.5',
            'Programming Language :: Python :: 3.6',
235
            'Programming Language :: Python :: 3.7',
Kai Chen's avatar
Kai Chen committed
236
        ],
Jiangmiao Pang's avatar
Jiangmiao Pang committed
237
        license='Apache License 2.0',
238
239
240
241
242
243
244
245
246
        setup_requires=parse_requirements('requirements/build.txt'),
        tests_require=parse_requirements('requirements/tests.txt'),
        install_requires=parse_requirements('requirements/runtime.txt'),
        extras_require={
            'all': parse_requirements('requirements.txt'),
            'tests': parse_requirements('requirements/tests.txt'),
            'build': parse_requirements('requirements/build.txt'),
            'optional': parse_requirements('requirements/optional.txt'),
        },
247
        ext_modules=[
248
249
250
251
            make_cuda_ext(
                name='compiling_info',
                module='mmdet.ops.utils',
                sources=['src/compiling_info.cpp']),
252
253
254
255
256
257
258
259
260
261
262
263
            make_cython_ext(
                name='soft_nms_cpu',
                module='mmdet.ops.nms',
                sources=['src/soft_nms_cpu.pyx']),
            make_cuda_ext(
                name='nms_cpu',
                module='mmdet.ops.nms',
                sources=['src/nms_cpu.cpp']),
            make_cuda_ext(
                name='nms_cuda',
                module='mmdet.ops.nms',
                sources=['src/nms_cuda.cpp', 'src/nms_kernel.cu']),
264
265
266
267
268
269
270
271
            make_cuda_ext(
                name='roi_align_cuda',
                module='mmdet.ops.roi_align',
                sources=['src/roi_align_cuda.cpp', 'src/roi_align_kernel.cu']),
            make_cuda_ext(
                name='roi_pool_cuda',
                module='mmdet.ops.roi_pool',
                sources=['src/roi_pool_cuda.cpp', 'src/roi_pool_kernel.cu']),
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
            make_cuda_ext(
                name='deform_conv_cuda',
                module='mmdet.ops.dcn',
                sources=[
                    'src/deform_conv_cuda.cpp',
                    'src/deform_conv_cuda_kernel.cu'
                ]),
            make_cuda_ext(
                name='deform_pool_cuda',
                module='mmdet.ops.dcn',
                sources=[
                    'src/deform_pool_cuda.cpp',
                    'src/deform_pool_cuda_kernel.cu'
                ]),
            make_cuda_ext(
                name='sigmoid_focal_loss_cuda',
                module='mmdet.ops.sigmoid_focal_loss',
                sources=[
                    'src/sigmoid_focal_loss.cpp',
                    'src/sigmoid_focal_loss_cuda.cu'
                ]),
            make_cuda_ext(
                name='masked_conv2d_cuda',
                module='mmdet.ops.masked_conv',
                sources=[
                    'src/masked_conv2d_cuda.cpp', 'src/masked_conv2d_kernel.cu'
                ]),
Kai Chen's avatar
Kai Chen committed
299
        ],
300
        cmdclass={'build_ext': BuildExtension},
Kai Chen's avatar
Kai Chen committed
301
        zip_safe=False)