CMakeLists.txt 6.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157
  1. #paddle inference
  2. if (NOT DEFINED PADDLE_DIR OR ${PADDLE_DIR} STREQUAL "")
  3. message(FATAL_ERROR "please set PADDLE_DIR with -DPADDLE_DIR=/path/paddle_influence_dir")
  4. endif()
  5. #paddle inference third party
  6. include_directories("${PADDLE_DIR}")
  7. include_directories("${PADDLE_DIR}/third_party/install/protobuf/include")
  8. include_directories("${PADDLE_DIR}/third_party/install/glog/include")
  9. include_directories("${PADDLE_DIR}/third_party/install/gflags/include")
  10. include_directories("${PADDLE_DIR}/third_party/install/xxhash/include")
  11. include_directories("${PADDLE_DIR}/third_party/install/cryptopp/include")
  12. link_directories("${PADDLE_DIR}/paddle/lib/")
  13. link_directories("${PADDLE_DIR}/third_party/install/protobuf/lib")
  14. link_directories("${PADDLE_DIR}/third_party/install/glog/lib")
  15. link_directories("${PADDLE_DIR}/third_party/install/gflags/lib")
  16. link_directories("${PADDLE_DIR}/third_party/install/xxhash/lib")
  17. link_directories("${PADDLE_DIR}/third_party/install/cryptopp/lib")
  18. if (WIN32)
  19. set(DEPS ${DEPS} ${PADDLE_DIR}/paddle/lib/paddle_inference.lib)
  20. set(DEPS ${DEPS} glog gflags_static libprotobuf xxhash cryptopp-static libyaml-cppmt shlwapi)
  21. else()
  22. if (WITH_STATIC_LIB)
  23. set(DEPS ${DEPS} ${PADDLE_DIR}/paddle/lib/libpaddle_inference${CMAKE_STATIC_LIBRARY_SUFFIX})
  24. else()
  25. set(DEPS ${DEPS} ${PADDLE_DIR}/paddle/lib/libpaddle_inference${CMAKE_SHARED_LIBRARY_SUFFIX})
  26. endif()
  27. set(DEPS ${DEPS} glog gflags protobuf xxhash cryptopp yaml-cpp)
  28. endif(WIN32)
  29. #MKL
  30. if(WITH_MKL)
  31. ADD_DEFINITIONS(-DUSE_MKL)
  32. set(MKLML_PATH "${PADDLE_DIR}/third_party/install/mklml")
  33. include_directories("${MKLML_PATH}/include")
  34. if (WIN32)
  35. set(MATH_LIB ${MKLML_PATH}/lib/mklml.lib ${MKLML_PATH}/lib/libiomp5md.lib)
  36. else ()
  37. set(MATH_LIB ${MKLML_PATH}/lib/libmklml_intel${CMAKE_SHARED_LIBRARY_SUFFIX} ${MKLML_PATH}/lib/libiomp5${CMAKE_SHARED_LIBRARY_SUFFIX})
  38. execute_process(COMMAND cp -r ${MKLML_PATH}/lib/libmklml_intel${CMAKE_SHARED_LIBRARY_SUFFIX} /usr/lib)
  39. endif ()
  40. set(MKLDNN_PATH "${PADDLE_DIR}/third_party/install/mkldnn")
  41. if(EXISTS ${MKLDNN_PATH})
  42. include_directories("${MKLDNN_PATH}/include")
  43. if (WIN32)
  44. set(MKLDNN_LIB ${MKLDNN_PATH}/lib/mkldnn.lib)
  45. else ()
  46. set(MKLDNN_LIB ${MKLDNN_PATH}/lib/libmkldnn.so.0)
  47. endif ()
  48. endif()
  49. else()
  50. set(MATH_LIB ${PADDLE_DIR}/third_party/install/openblas/lib/libopenblas${CMAKE_STATIC_LIBRARY_SUFFIX})
  51. endif()
  52. set(DEPS ${DEPS} ${MATH_LIB} ${MKLDNN_LIB})
  53. #set GPU
  54. if (WITH_PADDLE_TENSORRT AND WITH_GPU)
  55. include_directories("${TENSORRT_DIR}/include")
  56. link_directories("${TENSORRT_DIR}/lib")
  57. file(READ ${TENSORRT_DIR}/include/NvInfer.h TENSORRT_VERSION_FILE_CONTENTS)
  58. string(REGEX MATCH "define NV_TENSORRT_MAJOR +([0-9]+)" TENSORRT_MAJOR_VERSION
  59. "${TENSORRT_VERSION_FILE_CONTENTS}")
  60. if("${TENSORRT_MAJOR_VERSION}" STREQUAL "")
  61. file(READ ${TENSORRT_DIR}/include/NvInferVersion.h TENSORRT_VERSION_FILE_CONTENTS)
  62. string(REGEX MATCH "define NV_TENSORRT_MAJOR +([0-9]+)" TENSORRT_MAJOR_VERSION
  63. "${TENSORRT_VERSION_FILE_CONTENTS}")
  64. endif()
  65. if("${TENSORRT_MAJOR_VERSION}" STREQUAL "")
  66. message(SEND_ERROR "Failed to detect TensorRT version.")
  67. endif()
  68. string(REGEX REPLACE "define NV_TENSORRT_MAJOR +([0-9]+)" "\\1"
  69. TENSORRT_MAJOR_VERSION "${TENSORRT_MAJOR_VERSION}")
  70. message(STATUS "Current TensorRT header is ${TENSORRT_INCLUDE_DIR}/NvInfer.h. "
  71. "Current TensorRT version is v${TENSORRT_MAJOR_VERSION}. ")
  72. endif()
  73. if(WITH_GPU)
  74. if (NOT DEFINED CUDA_LIB OR ${CUDA_LIB} STREQUAL "")
  75. message(FATAL_ERROR "please set CUDA_LIB with -DCUDA_LIB=/path/cuda/lib64")
  76. endif()
  77. if(NOT WIN32)
  78. if (NOT DEFINED CUDNN_LIB)
  79. message(FATAL_ERROR "please set CUDNN_LIB with -DCUDNN_LIB=/path/cudnn/")
  80. endif()
  81. set(DEPS ${DEPS} ${CUDA_LIB}/libcudart${CMAKE_SHARED_LIBRARY_SUFFIX})
  82. set(DEPS ${DEPS} ${CUDNN_LIB}/libcudnn${CMAKE_SHARED_LIBRARY_SUFFIX})
  83. if (WITH_PADDLE_TENSORRT)
  84. set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/libnvinfer${CMAKE_SHARED_LIBRARY_SUFFIX})
  85. set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/libnvinfer_plugin${CMAKE_SHARED_LIBRARY_SUFFIX})
  86. endif()
  87. else()
  88. set(DEPS ${DEPS} ${CUDA_LIB}/cudart${CMAKE_STATIC_LIBRARY_SUFFIX} )
  89. set(DEPS ${DEPS} ${CUDA_LIB}/cublas${CMAKE_STATIC_LIBRARY_SUFFIX} )
  90. set(DEPS ${DEPS} ${CUDA_LIB}/cudnn${CMAKE_STATIC_LIBRARY_SUFFIX})
  91. if (WITH_PADDLE_TENSORRT)
  92. set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/nvinfer${CMAKE_STATIC_LIBRARY_SUFFIX})
  93. set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/nvinfer_plugin${CMAKE_STATIC_LIBRARY_SUFFIX})
  94. if(${TENSORRT_MAJOR_VERSION} GREATER_EQUAL 7)
  95. set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/myelin64_1${CMAKE_STATIC_LIBRARY_SUFFIX})
  96. endif()
  97. endif()
  98. endif()
  99. endif()
  100. message("-----DEPS = ${DEPS}")
  101. # engine src
  102. set(ENGINE_SRC ${PROJECT_SOURCE_DIR}/model_deploy/engine/src/ppinference_engine.cpp)
  103. ADD_library(model_infer SHARED ${PROJECT_SOURCE_DIR}/demo/model_infer.cpp ${SRC} ${ENGINE_SRC} ${DETECTOR_SRC} ${ENCRYPTION_SRC})
  104. # add_executable(model_infer model_infer.cpp ${SRC} ${ENGINE_SRC} ${DETECTOR_SRC} ${ENCRYPTION_SRC})
  105. ADD_DEPENDENCIES(model_infer ext-yaml-cpp)
  106. target_link_libraries(model_infer ${DEPS})
  107. add_executable(batch_infer batch_infer.cpp ${SRC} ${ENGINE_SRC} ${DETECTOR_SRC} ${ENCRYPTION_SRC})
  108. ADD_DEPENDENCIES(batch_infer ext-yaml-cpp)
  109. target_link_libraries(batch_infer ${DEPS})
  110. add_executable(multi_gpu_model_infer multi_gpu_model_infer.cpp ${SRC} ${ENGINE_SRC} ${DETECTOR_SRC} ${ENCRYPTION_SRC})
  111. ADD_DEPENDENCIES(multi_gpu_model_infer ext-yaml-cpp)
  112. target_link_libraries(multi_gpu_model_infer ${DEPS})
  113. if (WITH_PADDLE_TENSORRT)
  114. add_executable(tensorrt_infer tensorrt_infer.cpp ${SRC} ${ENGINE_SRC} ${DETECTOR_SRC} ${ENCRYPTION_SRC})
  115. ADD_DEPENDENCIES(tensorrt_infer ext-yaml-cpp)
  116. target_link_libraries(tensorrt_infer ${DEPS})
  117. endif()
  118. if(WIN32)
  119. add_custom_command(TARGET model_infer POST_BUILD
  120. COMMAND ${CMAKE_COMMAND} -E copy ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ${CMAKE_BINARY_DIR}/paddle_deploy
  121. COMMAND ${CMAKE_COMMAND} -E copy ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ${CMAKE_BINARY_DIR}/paddle_deploy
  122. COMMAND ${CMAKE_COMMAND} -E copy ${PADDLE_DIR}/third_party/install/mkldnn/lib/mkldnn.dll ${CMAKE_BINARY_DIR}/paddle_deploy
  123. COMMAND ${CMAKE_COMMAND} -E copy ${PADDLE_DIR}/paddle/lib/paddle_inference.dll ${CMAKE_BINARY_DIR}/paddle_deploy
  124. )
  125. if (WITH_PADDLE_TENSORRT)
  126. add_custom_command(TARGET model_infer POST_BUILD
  127. COMMAND ${CMAKE_COMMAND} -E copy ${TENSORRT_DIR}/lib/nvinfer.dll ${CMAKE_BINARY_DIR}/paddle_deploy
  128. COMMAND ${CMAKE_COMMAND} -E copy ${TENSORRT_DIR}/lib/nvinfer_plugin.dll ${CMAKE_BINARY_DIR}/paddle_deploy
  129. )
  130. if(${TENSORRT_MAJOR_VERSION} GREATER_EQUAL 7)
  131. add_custom_command(TARGET model_infer POST_BUILD
  132. COMMAND ${CMAKE_COMMAND} -E copy ${TENSORRT_DIR}/lib/myelin64_1.dll ${CMAKE_BINARY_DIR}/paddle_deploy
  133. )
  134. endif()
  135. endif()
  136. endif()