|
1 | 1 | cmake_minimum_required(VERSION 3.10) |
2 | 2 | project(lite.ai.toolkit) |
3 | 3 |
|
4 | | -set(CMAKE_CXX_STANDARD 11) |
5 | | -set(VERSION_STRING 0.1.1) |
6 | | -set(SOVERSION_STRING 0.1.1) |
7 | | -include(cmake/platform.cmake) # checking platform |
| 4 | +set(CMAKE_CXX_STANDARD 17) |
| 5 | +set(VERSION_STRING 0.2.0) |
| 6 | +set(SOVERSION_STRING 0.2.0) |
| 7 | +include(cmake/utils.cmake) |
8 | 8 |
|
9 | | -message(STATUS "Lite.AI.ToolKit ${VERSION_STRING}") |
10 | | -message(STATUS "Project: lite.ai.toolkit") |
11 | | -message(STATUS "Version: ${VERSION_STRING}") |
12 | | -message(STATUS "SO Version: ${SOVERSION_STRING}") |
13 | | -message(STATUS "Build Type: ${CMAKE_BUILD_TYPE}") |
14 | | -message(STATUS "Platform Name: ${PLATFORM_NAME}") |
15 | | -message(STATUS "Root Path: ${CMAKE_SOURCE_DIR}") |
| 9 | +if (NOT (UNIX AND NOT APPLE)) |
| 10 | + message(FATAL_ERROR "lite.ai.toolkit>=0.2 not support for windows/mac now!") |
| 11 | +endif() |
| 12 | + |
| 13 | +set(THIRD_PARTY_PATH "${CMAKE_SOURCE_DIR}/third_party") |
| 14 | +if(NOT EXISTS ${THIRD_PARTY_PATH}) |
| 15 | + file(MAKE_DIRECTORY ${THIRD_PARTY_PATH}) |
| 16 | +endif() |
16 | 17 |
|
17 | 18 | # Linux GCC Compiler Options |
18 | 19 | if (CMAKE_COMPILER_IS_GNUCXX) |
19 | | - set(CMAKE_CXX_FLAGS "-std=c++11 -Wno-deprecated ${CMAKE_CXX_FLAGS} ") |
20 | | - message(STATUS "[Linux GCC Compiler Options]+:-std=c++11 -Wno-deprecated") |
| 20 | + set(CMAKE_CXX_FLAGS "-std=c++17 -Wno-deprecated ${CMAKE_CXX_FLAGS} ") |
| 21 | + message(STATUS "[Linux GCC Compiler Options]+:-std=c++17 -Wno-deprecated") |
21 | 22 | endif () |
22 | 23 | # message(STATUS "CMAKE_CXX_COMPILER: [${CMAKE_CXX_COMPILER}]") |
23 | 24 |
|
24 | 25 | # root dir |
25 | 26 | set(LITE_AI_ROOT_DIR ${CMAKE_SOURCE_DIR}) |
26 | | -# set default build dir for lite.ai.toolkit |
27 | | -if (NOT DEFINED BUILD_LITE_AI_DIR) |
28 | | - set(BUILD_LITE_AI_DIR ${LITE_AI_ROOT_DIR}/build/lite.ai.toolkit) |
29 | | -endif () |
30 | | -set(LIBRARY_OUTPUT_PATH ${BUILD_LITE_AI_DIR}/lib) |
31 | | -set(EXECUTABLE_OUTPUT_PATH ${BUILD_LITE_AI_DIR}/bin) |
32 | 27 |
|
33 | 28 | # compile options for lite.ai.toolkit |
34 | | -option(LITE_AI_BUILD_LIB "build shared libraries." ON) # now, ON only |
35 | 29 | option(LITE_AI_BUILD_TEST "build test examples." ON) |
36 | | -option(INCLUDE_OPENCV "package OpenCV into lite.ai.toolkit." ON) |
37 | 30 | # inference engines setups: config.h.in -> config.h |
38 | 31 | option(ENABLE_DEBUG_STRING "enable DEBUG string or not" ON) |
39 | 32 | option(ENABLE_ONNXRUNTIME "enable ONNXRuntime engine" ON) |
40 | | -option(ENABLE_MNN "enable MNN engine" ON) # unstable now, DON'T use |
41 | | -option(ENABLE_NCNN "enable NCNN engine" ON) # unstable now, DON'T use |
42 | | -option(ENABLE_TNN "enable TNN engine" ON) # unstable now, DON'T use |
| 33 | +option(ENABLE_MNN "enable MNN engine" OFF) # unstable now, DON'T use |
| 34 | +option(ENABLE_NCNN "enable NCNN engine" OFF) # unstable now, DON'T use |
| 35 | +option(ENABLE_TNN "enable TNN engine" OFF) # unstable now, DON'T use |
43 | 36 | # cuda provider setups: config.h.in -> config.h (only for onnxruntime) |
44 | 37 | option(ENABLE_ONNXRUNTIME_CUDA "enable ONNXRuntime engine with CUDA provider" OFF) |
45 | | -# openmp/opengl/vulkan/cuda setups: config.h.in -> config.h (for future use) |
46 | | -option(ENABLE_LITE_OPENMP "enable OPENMP accelerate for some post processes" OFF) # for future use, DON'T use NOW! |
47 | | -option(ENABLE_LITE_OPENGL "enable OPENGL accelerate for some post processes" OFF) # for future use, DON'T use NOW! |
48 | | -option(ENABLE_LITE_VULKAN "enable VULKAN accelerate for some post processes" OFF) # for future use, DON'T use NOW! |
49 | | -option(ENABLE_LITE_CUDA "enable CUDA accelerate for some post processes" OFF) # for future use, DON'T use NOW! |
50 | | -# videoio interface setups, for future use |
51 | 38 | option(ENABLE_OPENCV_VIDEOIO "enable opencv videoio modules for detect_video apis" ON) # now, ON only |
52 | | -# inference engines backend setups for lite.ai.toolkit |
53 | | -option(BACKEND_ONNXRUNTIME "set ONNXRuntime as the main backend of lite.ai.toolkit" ON) |
54 | | -option(BACKEND_MNN "set MNN as the main backend of lite.ai.toolkit" OFF) # now, OFF only |
55 | | -option(BACKEND_NCNN "set NCNN as the main backend of lite.ai.toolkit" OFF) # now, OFF only |
56 | | -option(BACKEND_TNN "set TNN as the main backend of lite.ai.toolkit" OFF) # now, OFF only |
57 | 39 |
|
58 | | -message(STATUS "Engines Enable Details ... ") |
59 | | -message(STATUS "INCLUDE_OPENCV: ${INCLUDE_OPENCV}") |
60 | | -message(STATUS "ENABLE_ONNXRUNTIME: ${ENABLE_ONNXRUNTIME}") |
61 | | -message(STATUS "ENABLE_MNN: ${ENABLE_MNN}") |
62 | | -message(STATUS "ENABLE_NCNN: ${ENABLE_NCNN}") |
63 | | -message(STATUS "ENABLE_TNN: ${ENABLE_TNN}") |
| 40 | +if (NOT ENABLE_ONNXRUNTIME) |
| 41 | + message(FATAL_ERROR "ENABLE_ONNXRUNTIME must be enbled now!") |
| 42 | +endif() |
64 | 43 |
|
65 | 44 | # setup include dir and lib dir |
66 | 45 | include_directories(${LITE_AI_ROOT_DIR}) |
67 | | -link_directories(${LITE_AI_ROOT_DIR}/lib/${PLATFORM_NAME}) |
| 46 | +configure_file(cmake/lite.ai.toolkit.cmake.in ${CMAKE_SOURCE_DIR}/cmake/lite.ai.toolkit.cmake @ONLY) |
68 | 47 |
|
69 | 48 | # include custom cmake files. |
70 | 49 | include(cmake/opencv.cmake) |
71 | | -include(cmake/command.cmake) |
72 | 50 |
|
73 | | -# configuration for lite.ai shared lib. |
74 | | -if (LITE_AI_BUILD_LIB) |
75 | | - include(cmake/lite.ai.toolkit.cmake) |
76 | | -endif () |
| 51 | +add_lite_ai_toolkit_shared_library(${VERSION_STRING} ${SOVERSION_STRING}) |
| 52 | +install(TARGETS lite.ai.toolkit LIBRARY DESTINATION lib) |
| 53 | +install(DIRECTORY ${CMAKE_SOURCE_DIR}/lite |
| 54 | + DESTINATION ${CMAKE_INSTALL_PREFIX}/include |
| 55 | + FILES_MATCHING |
| 56 | + PATTERN "*.h") |
| 57 | +install(DIRECTORY ${THIRD_PARTY_PATH} DESTINATION ${CMAKE_INSTALL_PREFIX}/include/) |
| 58 | +install(FILES ${CMAKE_SOURCE_DIR}/cmake/lite.ai.toolkit.cmake |
| 59 | + DESTINATION ${CMAKE_INSTALL_PREFIX}) |
| 60 | +install(FILES ${CMAKE_SOURCE_DIR}/cmake/lite.ai.toolkit-config.cmake |
| 61 | + DESTINATION ${CMAKE_INSTALL_PREFIX}) |
77 | 62 |
|
78 | 63 | # configuration for test examples. |
79 | | -if (LITE_AI_BUILD_LIB AND LITE_AI_BUILD_TEST) |
| 64 | +if (LITE_AI_BUILD_TEST) |
| 65 | + set(EXECUTABLE_OUTPUT_PATH ${CMAKE_INSTALL_PREFIX}/bin) |
80 | 66 | add_subdirectory(examples/lite) |
| 67 | + if ((UNIX AND NOT APPLE)) |
| 68 | + file(GLOB_RECURSE ALL_THIRD_LIBS ${THIRD_PARTY_PATH} FOLLOW_SYMLINKS *.so*) |
| 69 | + install(FILES ${ALL_THIRD_LIBS} DESTINATION ${EXECUTABLE_OUTPUT_PATH}) |
| 70 | + endif() |
81 | 71 | endif () |
| 72 | + |
| 73 | +message(STATUS "-------------------------- lite.ai.toolkit Configuration Summary --------------------------") |
| 74 | +message(STATUS " Version: ${VERSION_STRING}") |
| 75 | +message(STATUS " SO Version: ${SOVERSION_STRING}") |
| 76 | +message(STATUS " Build Type: ${CMAKE_BUILD_TYPE}") |
| 77 | +message(STATUS " Root Path: ${CMAKE_SOURCE_DIR}") |
| 78 | +message(STATUS " OpenCV: ON Version: ${OpenCV_Version}") |
| 79 | +message(STATUS " ONNXRUNTIME: ${ENABLE_ONNXRUNTIME} Version: ${OnnxRuntime_Version}") |
| 80 | +message(STATUS " MNN: ${ENABLE_MNN} Version: ${MNN_Version}") |
| 81 | +message(STATUS " NCNN: ${ENABLE_NCNN} Version: ${NCNN_Version}") |
| 82 | +message(STATUS " TNN: ${ENABLE_TNN} Version: ${TNN_Version}") |
| 83 | +message(STATUS "-------------------------- lite.ai.toolkit Configuration Summary --------------------------") |
0 commit comments