CMakeLists.txt 856 Bytes
Newer Older
1
2
3

set(TARGET ext_server)
option(LLAMA_SERVER_VERBOSE "Build verbose logging option for Server" ON)
4
if (WIN32)
5
    add_library(${TARGET} SHARED ext_server.cpp ../llama.cpp/llama.cpp)
6
else()
7
    add_library(${TARGET} STATIC ext_server.cpp ../llama.cpp/llama.cpp)
8
endif()
9
10
target_compile_features(${TARGET} PRIVATE cxx_std_11)
target_compile_definitions(${TARGET} PUBLIC LLAMA_SERVER_LIBRARY=1)
11
12
13
14
target_link_libraries(${TARGET} PRIVATE ggml llava common )
set_target_properties(${TARGET} PROPERTIES POSITION_INDEPENDENT_CODE ON)
target_compile_definitions(${TARGET} PRIVATE SERVER_VERBOSE=$<BOOL:${LLAMA_SERVER_VERBOSE}>)
install(TARGETS ext_server LIBRARY)
15
16
17
18

if (CUDAToolkit_FOUND)
    target_include_directories(${TARGET} PRIVATE ${CMAKE_CUDA_TOOLKIT_INCLUDE_DIRECTORIES})
    if (WIN32)
19
        target_link_libraries(${TARGET} PRIVATE nvml)
20
21
    endif()
endif()