|
|
@@ -1,21 +1,29 @@
|
|
|
cmake_minimum_required(VERSION 3.0)
|
|
|
project(PaddleDeploy CXX C)
|
|
|
|
|
|
-option(WITH_MKL "Compile demo with MKL/OpenBlas support,defaultuseMKL." ON)
|
|
|
-option(WITH_GPU "Compile demo with GPU/CPU, default use CPU." OFF)
|
|
|
-if (NOT WIN32)
|
|
|
- option(WITH_STATIC_LIB "Compile demo with static/shared library, default use static." OFF)
|
|
|
+if (WIN32)
|
|
|
+ option(WITH_STATIC_LIB "Compile demo with static/shared library, default use static." ON)
|
|
|
else()
|
|
|
- option(WITH_STATIC_LIB "Compile demo with static/shared library, default use static." ON)
|
|
|
+ option(WITH_STATIC_LIB "Compile demo with static/shared library, default use static." OFF)
|
|
|
endif()
|
|
|
-option(WITH_TENSORRT "Compile demo with TensorRT." OFF)
|
|
|
-option(WITH_ENCRYPTION "Compile demo with ENCRYPTION." OFF)
|
|
|
+# Paddle
|
|
|
+option(WITH_MKL "Compile demo with MKL/OpenBlas support,defaultuseMKL." ON)
|
|
|
+option(WITH_GPU "Compile demo with GPU/CPU, default use CPU." OFF)
|
|
|
+option(WITH_PADDLE_TENSORRT "Compile demo with TensorRT." OFF)
|
|
|
+#other engine
|
|
|
+option(WITH_OPENVINO "Compile demo with TensorRT." OFF)
|
|
|
+option(WITH_ONNX_TENSORRT "Compile demo with TensorRT." OFF)
|
|
|
|
|
|
+SET(OPENCV_DIR "" CACHE PATH "Location of libraries")
|
|
|
+SET(DEPS "" CACHE PATH "Location of libraries")
|
|
|
+# Paddle
|
|
|
SET(TENSORRT_DIR "" CACHE PATH "Location of libraries")
|
|
|
SET(PADDLE_DIR "" CACHE PATH "Location of libraries")
|
|
|
-SET(OPENCV_DIR "" CACHE PATH "Location of libraries")
|
|
|
SET(CUDA_LIB "" CACHE PATH "Location of libraries")
|
|
|
-SET(OPENSSL_DIR "" CACHE PATH "Location of libraries")
|
|
|
+#OpenVINO
|
|
|
+SET(GFLAGS_DIR "" CACHE PATH "Location of libraries")
|
|
|
+SET(OPENVINO_DIR "" CACHE PATH "Location of libraries")
|
|
|
+SET(NGRAPH_LIB "" CACHE PATH "Location of libraries")
|
|
|
|
|
|
SET(PROJECT_ROOT_DIR "." CACHE PATH "root directory of project.")
|
|
|
|
|
|
@@ -30,9 +38,17 @@ else()
|
|
|
add_definitions(-DPADDLEX_DEPLOY)
|
|
|
endif()
|
|
|
|
|
|
-#source
|
|
|
-include_directories("${CMAKE_SOURCE_DIR}/")
|
|
|
-link_directories("${CMAKE_CURRENT_BINARY_DIR}")
|
|
|
+#project
|
|
|
+include_directories("${PROJECT_SOURCE_DIR}")
|
|
|
+
|
|
|
+# common
|
|
|
+aux_source_directory(${PROJECT_SOURCE_DIR}/model_deploy/common/src SRC)
|
|
|
+
|
|
|
+# det seg clas pdx src
|
|
|
+aux_source_directory(${PROJECT_SOURCE_DIR}/model_deploy/ppdet/src DETECTOR_SRC)
|
|
|
+aux_source_directory(${PROJECT_SOURCE_DIR}/model_deploy/ppseg/src DETECTOR_SRC)
|
|
|
+aux_source_directory(${PROJECT_SOURCE_DIR}/model_deploy/ppclas/src DETECTOR_SRC)
|
|
|
+aux_source_directory(${PROJECT_SOURCE_DIR}/model_deploy/paddlex/src DETECTOR_SRC)
|
|
|
|
|
|
#yaml-cpp
|
|
|
if(WIN32)
|
|
|
@@ -40,68 +56,10 @@ if(WIN32)
|
|
|
else()
|
|
|
SET(YAML_BUILD_SHARED_LIBS ON CACHE BOOL "yaml build shared library.")
|
|
|
endif(WIN32)
|
|
|
-include(${PROJECT_ROOT_DIR}/cmake/yaml-cpp.cmake)
|
|
|
+include(${PROJECT_SOURCE_DIR}/cmake/yaml-cpp.cmake)
|
|
|
include_directories("${CMAKE_CURRENT_BINARY_DIR}/ext/yaml-cpp/src/ext-yaml-cpp/include")
|
|
|
link_directories("${CMAKE_CURRENT_BINARY_DIR}/ext/yaml-cpp/lib")
|
|
|
|
|
|
-#paddle inference
|
|
|
-if (NOT DEFINED PADDLE_DIR OR ${PADDLE_DIR} STREQUAL "")
|
|
|
- message(FATAL_ERROR "please set PADDLE_DIR with -DPADDLE_DIR=/path/paddle_influence_dir")
|
|
|
-endif()
|
|
|
-
|
|
|
-#paddle inference third party
|
|
|
-include_directories("${PADDLE_DIR}")
|
|
|
-include_directories("${PADDLE_DIR}/third_party/install/protobuf/include")
|
|
|
-include_directories("${PADDLE_DIR}/third_party/install/glog/include")
|
|
|
-include_directories("${PADDLE_DIR}/third_party/install/gflags/include")
|
|
|
-include_directories("${PADDLE_DIR}/third_party/install/xxhash/include")
|
|
|
-include_directories("${PADDLE_DIR}/third_party/install/cryptopp/include")
|
|
|
-
|
|
|
-link_directories("${PADDLE_DIR}/paddle/lib/")
|
|
|
-link_directories("${PADDLE_DIR}/third_party/install/protobuf/lib")
|
|
|
-link_directories("${PADDLE_DIR}/third_party/install/glog/lib")
|
|
|
-link_directories("${PADDLE_DIR}/third_party/install/gflags/lib")
|
|
|
-link_directories("${PADDLE_DIR}/third_party/install/xxhash/lib")
|
|
|
-link_directories("${PADDLE_DIR}/third_party/install/cryptopp/lib")
|
|
|
-
|
|
|
-if (WIN32)
|
|
|
- set(DEPS ${DEPS} ${PADDLE_DIR}/paddle/lib/paddle_inference.lib)
|
|
|
- set(DEPS ${DEPS} glog gflags_static libprotobuf xxhash cryptopp-static libyaml-cppmt shlwapi)
|
|
|
-else()
|
|
|
- if (WITH_STATIC_LIB)
|
|
|
- set(DEPS ${PADDLE_DIR}/paddle/lib/libpaddle_inference${CMAKE_STATIC_LIBRARY_SUFFIX})
|
|
|
- else()
|
|
|
- set(DEPS ${PADDLE_DIR}/paddle/lib/libpaddle_inference${CMAKE_SHARED_LIBRARY_SUFFIX})
|
|
|
- endif()
|
|
|
- set(DEPS ${DEPS} glog gflags protobuf xxhash cryptopp yaml-cpp)
|
|
|
-endif(WIN32)
|
|
|
-
|
|
|
-#MKL
|
|
|
-if(WITH_MKL)
|
|
|
- ADD_DEFINITIONS(-DUSE_MKL)
|
|
|
- set(MKLML_PATH "${PADDLE_DIR}/third_party/install/mklml")
|
|
|
- include_directories("${MKLML_PATH}/include")
|
|
|
- if (WIN32)
|
|
|
- set(MATH_LIB ${MKLML_PATH}/lib/mklml.lib ${MKLML_PATH}/lib/libiomp5md.lib)
|
|
|
- else ()
|
|
|
- set(MATH_LIB ${MKLML_PATH}/lib/libmklml_intel${CMAKE_SHARED_LIBRARY_SUFFIX} ${MKLML_PATH}/lib/libiomp5${CMAKE_SHARED_LIBRARY_SUFFIX})
|
|
|
- execute_process(COMMAND cp -r ${MKLML_PATH}/lib/libmklml_intel${CMAKE_SHARED_LIBRARY_SUFFIX} /usr/lib)
|
|
|
- endif ()
|
|
|
- set(MKLDNN_PATH "${PADDLE_DIR}/third_party/install/mkldnn")
|
|
|
- if(EXISTS ${MKLDNN_PATH})
|
|
|
- include_directories("${MKLDNN_PATH}/include")
|
|
|
- if (WIN32)
|
|
|
- set(MKLDNN_LIB ${MKLDNN_PATH}/lib/mkldnn.lib)
|
|
|
- else ()
|
|
|
- set(MKLDNN_LIB ${MKLDNN_PATH}/lib/libmkldnn.so.0)
|
|
|
- endif ()
|
|
|
- endif()
|
|
|
-else()
|
|
|
- set(MATH_LIB ${PADDLE_DIR}/third_party/install/openblas/lib/libopenblas${CMAKE_STATIC_LIBRARY_SUFFIX})
|
|
|
-endif()
|
|
|
-
|
|
|
-set(DEPS ${DEPS} ${MATH_LIB} ${MKLDNN_LIB})
|
|
|
-
|
|
|
#OPENCV
|
|
|
if (NOT (${CMAKE_SYSTEM_PROCESSOR} STREQUAL "aarch64"))
|
|
|
if (NOT DEFINED OPENCV_DIR OR ${OPENCV_DIR} STREQUAL "")
|
|
|
@@ -125,62 +83,7 @@ endif ()
|
|
|
set(DEPS ${DEPS} ${OpenCV_LIBS})
|
|
|
include_directories(${OpenCV_INCLUDE_DIRS})
|
|
|
|
|
|
-
|
|
|
-if (WITH_TENSORRT AND WITH_GPU)
|
|
|
- include_directories("${TENSORRT_DIR}/include")
|
|
|
- link_directories("${TENSORRT_DIR}/lib")
|
|
|
-
|
|
|
- file(READ ${TENSORRT_DIR}/include/NvInfer.h TENSORRT_VERSION_FILE_CONTENTS)
|
|
|
- string(REGEX MATCH "define NV_TENSORRT_MAJOR +([0-9]+)" TENSORRT_MAJOR_VERSION
|
|
|
- "${TENSORRT_VERSION_FILE_CONTENTS}")
|
|
|
- if("${TENSORRT_MAJOR_VERSION}" STREQUAL "")
|
|
|
- file(READ ${TENSORRT_DIR}/include/NvInferVersion.h TENSORRT_VERSION_FILE_CONTENTS)
|
|
|
- string(REGEX MATCH "define NV_TENSORRT_MAJOR +([0-9]+)" TENSORRT_MAJOR_VERSION
|
|
|
- "${TENSORRT_VERSION_FILE_CONTENTS}")
|
|
|
- endif()
|
|
|
- if("${TENSORRT_MAJOR_VERSION}" STREQUAL "")
|
|
|
- message(SEND_ERROR "Failed to detect TensorRT version.")
|
|
|
- endif()
|
|
|
- string(REGEX REPLACE "define NV_TENSORRT_MAJOR +([0-9]+)" "\\1"
|
|
|
- TENSORRT_MAJOR_VERSION "${TENSORRT_MAJOR_VERSION}")
|
|
|
- message(STATUS "Current TensorRT header is ${TENSORRT_INCLUDE_DIR}/NvInfer.h. "
|
|
|
- "Current TensorRT version is v${TENSORRT_MAJOR_VERSION}. ")
|
|
|
-endif()
|
|
|
-
|
|
|
-#set GPU
|
|
|
-if(WITH_GPU)
|
|
|
- if (NOT DEFINED CUDA_LIB OR ${CUDA_LIB} STREQUAL "")
|
|
|
- message(FATAL_ERROR "please set CUDA_LIB with -DCUDA_LIB=/path/cuda/lib64")
|
|
|
- endif()
|
|
|
-
|
|
|
- if(NOT WIN32)
|
|
|
- if (NOT DEFINED CUDNN_LIB)
|
|
|
- message(FATAL_ERROR "please set CUDNN_LIB with -DCUDNN_LIB=/path/cudnn/")
|
|
|
- endif()
|
|
|
-
|
|
|
- set(DEPS ${DEPS} ${CUDA_LIB}/libcudart${CMAKE_SHARED_LIBRARY_SUFFIX})
|
|
|
- set(DEPS ${DEPS} ${CUDNN_LIB}/libcudnn${CMAKE_SHARED_LIBRARY_SUFFIX})
|
|
|
-
|
|
|
- if (WITH_TENSORRT)
|
|
|
- set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/libnvinfer${CMAKE_SHARED_LIBRARY_SUFFIX})
|
|
|
- set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/libnvinfer_plugin${CMAKE_SHARED_LIBRARY_SUFFIX})
|
|
|
- endif()
|
|
|
-
|
|
|
- else()
|
|
|
- set(DEPS ${DEPS} ${CUDA_LIB}/cudart${CMAKE_STATIC_LIBRARY_SUFFIX} )
|
|
|
- set(DEPS ${DEPS} ${CUDA_LIB}/cublas${CMAKE_STATIC_LIBRARY_SUFFIX} )
|
|
|
- set(DEPS ${DEPS} ${CUDA_LIB}/cudnn${CMAKE_STATIC_LIBRARY_SUFFIX})
|
|
|
-
|
|
|
- if (WITH_TENSORRT)
|
|
|
- set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/nvinfer${CMAKE_STATIC_LIBRARY_SUFFIX})
|
|
|
- set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/nvinfer_plugin${CMAKE_STATIC_LIBRARY_SUFFIX})
|
|
|
- if(${TENSORRT_MAJOR_VERSION} GREATER_EQUAL 7)
|
|
|
- set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/myelin64_1${CMAKE_STATIC_LIBRARY_SUFFIX})
|
|
|
- endif()
|
|
|
- endif()
|
|
|
- endif()
|
|
|
-endif()
|
|
|
-
|
|
|
+# MD
|
|
|
macro(safe_set_static_flag)
|
|
|
foreach(flag_var
|
|
|
CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE
|
|
|
@@ -216,20 +119,7 @@ else()
|
|
|
set(DEPS ${DEPS} ${EXTERNAL_LIB})
|
|
|
endif()
|
|
|
|
|
|
-message("-----DEPS = ${DEPS}")
|
|
|
-
|
|
|
-#project
|
|
|
-include_directories("${PROJECT_ROOT_DIR}")
|
|
|
-
|
|
|
-aux_source_directory(${PROJECT_ROOT_DIR}/model_deploy/common/src SRC)
|
|
|
-set(ENGINE_SRC ${PROJECT_ROOT_DIR}/model_deploy/engine/src/ppinference_engine.cpp)
|
|
|
-
|
|
|
-#detector seg
|
|
|
-aux_source_directory(${PROJECT_ROOT_DIR}/model_deploy/ppdet/src DETECTOR_SRC)
|
|
|
-aux_source_directory(${PROJECT_ROOT_DIR}/model_deploy/ppseg/src DETECTOR_SRC)
|
|
|
-aux_source_directory(${PROJECT_ROOT_DIR}/model_deploy/ppclas/src DETECTOR_SRC)
|
|
|
-aux_source_directory(${PROJECT_ROOT_DIR}/model_deploy/paddlex/src DETECTOR_SRC)
|
|
|
-
|
|
|
+# encryption
|
|
|
set(ENCRYPTION_SRC "")
|
|
|
if (WITH_ENCRYPTION)
|
|
|
add_definitions(-DPADDLEX_DEPLOY_ENCRYPTION)
|
|
|
@@ -248,40 +138,9 @@ if (WITH_ENCRYPTION)
|
|
|
aux_source_directory(${PROJECT_ROOT_DIR}/encryption/util/src/crypto ENCRYPTION_SRC)
|
|
|
endif()
|
|
|
|
|
|
-add_executable(model_infer ${PROJECT_ROOT_DIR}/demo/model_infer.cpp ${SRC} ${ENGINE_SRC} ${DETECTOR_SRC} ${ENCRYPTION_SRC})
|
|
|
-ADD_DEPENDENCIES(model_infer ext-yaml-cpp)
|
|
|
-target_link_libraries(model_infer ${DEPS})
|
|
|
-
|
|
|
-add_executable(batch_infer ${PROJECT_ROOT_DIR}/demo/batch_infer.cpp ${SRC} ${ENGINE_SRC} ${DETECTOR_SRC} ${ENCRYPTION_SRC})
|
|
|
-ADD_DEPENDENCIES(batch_infer ext-yaml-cpp)
|
|
|
-target_link_libraries(batch_infer ${DEPS})
|
|
|
-
|
|
|
-add_executable(multi_gpu_model_infer ${PROJECT_ROOT_DIR}/demo/multi_gpu_model_infer.cpp ${SRC} ${ENGINE_SRC} ${DETECTOR_SRC} ${ENCRYPTION_SRC})
|
|
|
-ADD_DEPENDENCIES(multi_gpu_model_infer ext-yaml-cpp)
|
|
|
-target_link_libraries(multi_gpu_model_infer ${DEPS})
|
|
|
-
|
|
|
-if (WITH_TENSORRT)
|
|
|
- add_executable(tensorrt_infer ${PROJECT_ROOT_DIR}/demo/tensorrt_infer.cpp ${SRC} ${ENGINE_SRC} ${DETECTOR_SRC} ${ENCRYPTION_SRC})
|
|
|
- ADD_DEPENDENCIES(tensorrt_infer ext-yaml-cpp)
|
|
|
- target_link_libraries(tensorrt_infer ${DEPS})
|
|
|
-endif()
|
|
|
-
|
|
|
-if(WIN32)
|
|
|
- add_custom_command(TARGET model_infer POST_BUILD
|
|
|
- COMMAND ${CMAKE_COMMAND} -E copy ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ${CMAKE_BINARY_DIR}/paddle_deploy
|
|
|
- COMMAND ${CMAKE_COMMAND} -E copy ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ${CMAKE_BINARY_DIR}/paddle_deploy
|
|
|
- COMMAND ${CMAKE_COMMAND} -E copy ${PADDLE_DIR}/third_party/install/mkldnn/lib/mkldnn.dll ${CMAKE_BINARY_DIR}/paddle_deploy
|
|
|
- COMMAND ${CMAKE_COMMAND} -E copy ${PADDLE_DIR}/paddle/lib/paddle_inference.dll ${CMAKE_BINARY_DIR}/paddle_deploy
|
|
|
- )
|
|
|
- if (WITH_TENSORRT)
|
|
|
- add_custom_command(TARGET model_infer POST_BUILD
|
|
|
- COMMAND ${CMAKE_COMMAND} -E copy ${TENSORRT_DIR}/lib/nvinfer.dll ${CMAKE_BINARY_DIR}/paddle_deploy
|
|
|
- COMMAND ${CMAKE_COMMAND} -E copy ${TENSORRT_DIR}/lib/nvinfer_plugin.dll ${CMAKE_BINARY_DIR}/paddle_deploy
|
|
|
- )
|
|
|
- if(${TENSORRT_MAJOR_VERSION} GREATER_EQUAL 7)
|
|
|
- add_custom_command(TARGET model_infer POST_BUILD
|
|
|
- COMMAND ${CMAKE_COMMAND} -E copy ${TENSORRT_DIR}/lib/myelin64_1.dll ${CMAKE_BINARY_DIR}/paddle_deploy
|
|
|
- )
|
|
|
- endif()
|
|
|
- endif()
|
|
|
+# select engine
|
|
|
+if(WITH_OPENVINO)
|
|
|
+ add_subdirectory(demo/onnx_openvino)
|
|
|
+else ()
|
|
|
+ add_subdirectory(demo)
|
|
|
endif()
|