setup.py 1.61 KB
Newer Older
Woosuk Kwon's avatar
Woosuk Kwon committed
1
2
3
4
5
6
7
8
9
10
11
import setuptools
from torch.utils import cpp_extension

CXX_FLAGS = ['-g']
NVCC_FLAGS = ['-O2']


ext_modules = []

# Cache operations.
cache_extension = cpp_extension.CUDAExtension(
12
    name='cacheflow.cache_ops',
Woosuk Kwon's avatar
Woosuk Kwon committed
13
    sources=['csrc/cache.cpp', 'csrc/cache_kernels.cu'],
Woosuk Kwon's avatar
Woosuk Kwon committed
14
15
16
17
    extra_compile_args={'cxx': CXX_FLAGS, 'nvcc': NVCC_FLAGS},
)
ext_modules.append(cache_extension)

18
19
20
21
22
23
24
25
# Attention kernels.
attention_extension = cpp_extension.CUDAExtension(
    name='cacheflow.attention_ops',
    sources=['csrc/attention.cpp', 'csrc/attention_kernels.cu'],
    extra_compile_args={'cxx': CXX_FLAGS, 'nvcc': NVCC_FLAGS},
)
ext_modules.append(attention_extension)

26
27
28
29
30
31
32
33
# Positional encodings.
positional_encoding_extension = cpp_extension.CUDAExtension(
    name='cacheflow.pos_encoding_ops',
    sources=['csrc/pos_encoding.cpp', 'csrc/pos_encoding_kernels.cu'],
    extra_compile_args={'cxx': CXX_FLAGS, 'nvcc': NVCC_FLAGS},
)
ext_modules.append(positional_encoding_extension)

34
35
36
37
38
39
40
41
# Layer normalization kernels.
layernorm_extension = cpp_extension.CUDAExtension(
    name='cacheflow.layernorm_ops',
    sources=['csrc/layernorm.cpp', 'csrc/layernorm_kernels.cu'],
    extra_compile_args={'cxx': CXX_FLAGS, 'nvcc': NVCC_FLAGS},
)
ext_modules.append(layernorm_extension)

Woosuk Kwon's avatar
Woosuk Kwon committed
42
43
44
45
46
47
48
activation_extension = cpp_extension.CUDAExtension(
    name='cacheflow.activation_ops',
    sources=['csrc/activation.cpp', 'csrc/activation_kernels.cu'],
    extra_compile_args={'cxx': CXX_FLAGS, 'nvcc': NVCC_FLAGS},
)
ext_modules.append(activation_extension)

Woosuk Kwon's avatar
Woosuk Kwon committed
49
50
51
52
53
setuptools.setup(
    name='cacheflow',
    ext_modules=ext_modules,
    cmdclass={'build_ext': cpp_extension.BuildExtension},
)