paddle_inference.cmake 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329
  1. # Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. include(ExternalProject)
  15. # The priority strategy for Paddle inference is as follows:
  16. # PADDLEINFERENCE_DIRECTORY > custom PADDLEINFERENCE_URL > default PADDLEINFERENCE_URL.
  17. if(WITH_GPU AND WITH_IPU)
  18. message(FATAL_ERROR "Cannot build with WITH_GPU=ON and WITH_IPU=ON on the same time.")
  19. endif()
  20. # Custom options for Paddle Inference backend
  21. option(PADDLEINFERENCE_DIRECTORY "Directory of custom Paddle Inference library" OFF)
  22. option(PADDLEINFERENCE_API_CUSTOM_OP "Whether building with custom paddle ops" OFF)
  23. option(PADDLEINFERENCE_API_COMPAT_2_4_x "Whether using Paddle Inference 2.4.x" OFF)
  24. option(PADDLEINFERENCE_API_COMPAT_2_5_x "Whether using Paddle Inference 2.5.x" OFF)
  25. option(PADDLEINFERENCE_API_COMPAT_2_6_x "Whether using Paddle Inference 2.6.x" OFF)
  26. option(PADDLEINFERENCE_API_COMPAT_DEV "Whether using Paddle Inference latest dev" OFF)
  27. option(PADDLEINFERENCE_API_COMPAT_CUDA_SM_80 "Whether using Paddle Inference with CUDA sm_80(A100)" OFF)
  28. set(PADDLEINFERENCE_URL "" CACHE STRING "URL of the custom Paddle Inference library")
  29. set(PADDLEINFERENCE_VERSION "" CACHE STRING "Paddle Inference version")
  30. set(PADDLEINFERENCE_PROJECT "extern_paddle_inference")
  31. set(PADDLEINFERENCE_PREFIX_DIR ${THIRD_PARTY_PATH}/paddle_inference)
  32. set(PADDLEINFERENCE_SOURCE_DIR
  33. ${THIRD_PARTY_PATH}/paddle_inference/src/${PADDLEINFERENCE_PROJECT})
  34. set(PADDLEINFERENCE_INSTALL_DIR ${THIRD_PARTY_PATH}/install/paddle_inference)
  35. set(PADDLEINFERENCE_INC_DIR "${PADDLEINFERENCE_INSTALL_DIR}"
  36. CACHE PATH "paddle_inference include directory." FORCE)
  37. set(PADDLEINFERENCE_LIB_DIR
  38. "${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/"
  39. CACHE PATH "paddle_inference lib directory." FORCE)
  40. set(CMAKE_BUILD_RPATH "${CMAKE_BUILD_RPATH}"
  41. "${PADDLEINFERENCE_LIB_DIR}")
  42. if(PADDLEINFERENCE_DIRECTORY)
  43. set(PADDLEINFERENCE_INC_DIR ${PADDLEINFERENCE_DIRECTORY})
  44. endif()
  45. include_directories(${PADDLEINFERENCE_INC_DIR})
  46. if(PADDLEINFERENCE_DIRECTORY)
  47. # Use custom Paddle Inference libs.
  48. if(EXISTS "${THIRD_PARTY_PATH}/install/paddle_inference")
  49. file(REMOVE_RECURSE "${THIRD_PARTY_PATH}/install/paddle_inference")
  50. endif()
  51. if(NOT Python_EXECUTABLE)
  52. find_package(Python COMPONENTS Interpreter Development REQUIRED)
  53. endif()
  54. message(STATUS "Copying ${PADDLEINFERENCE_DIRECTORY} to ${THIRD_PARTY_PATH}/install/paddle_inference ...")
  55. if(WIN32)
  56. execute_process(COMMAND mkdir -p ${THIRD_PARTY_PATH}/install)
  57. execute_process(COMMAND cp -r ${PADDLEINFERENCE_DIRECTORY} ${THIRD_PARTY_PATH}/install/paddle_inference)
  58. else()
  59. execute_process(COMMAND mkdir -p ${THIRD_PARTY_PATH}/install)
  60. execute_process(COMMAND cp -r ${PADDLEINFERENCE_DIRECTORY} ${THIRD_PARTY_PATH}/install/paddle_inference)
  61. execute_process(COMMAND rm -rf ${THIRD_PARTY_PATH}/install/paddle_inference/paddle/lib/*.a)
  62. endif()
  63. else()
  64. # Custom Paddle Inference URL
  65. if (NOT PADDLEINFERENCE_URL)
  66. # Use default Paddle Inference libs.
  67. set(PADDLEINFERENCE_URL_BASE "https://bj.bcebos.com/fastdeploy/third_libs/")
  68. if(WIN32)
  69. if (WITH_GPU)
  70. set(PADDLEINFERENCE_FILE "paddle_inference-win-x64-gpu-trt8.5.2.2-mkl-2.5.0.281761089e.zip")
  71. set(PADDLEINFERENCE_VERSION "2.5.0.281761089e")
  72. else()
  73. set(PADDLEINFERENCE_FILE "paddle_inference-win-x64-mkl-2.5.0.281761089e.zip")
  74. set(PADDLEINFERENCE_VERSION "2.5.0.281761089e")
  75. endif()
  76. elseif(APPLE)
  77. if(CURRENT_OSX_ARCH MATCHES "arm64")
  78. message(FATAL_ERROR "Paddle Backend doesn't support Mac OSX with Arm64 now.")
  79. set(PADDLEINFERENCE_FILE "paddle_inference-osx-arm64-openblas-0.0.0.660f781b77.tgz")
  80. else()
  81. # TODO(qiuyanjun): Should remove this old paddle inference lib
  82. # set(PADDLEINFERENCE_FILE "paddle_inference-osx-x86_64-2.4-dev3.tgz")
  83. set(PADDLEINFERENCE_FILE "paddle_inference-osx-x86_64-openblas-0.0.0.660f781b77.tgz")
  84. endif()
  85. set(PADDLEINFERENCE_VERSION "0.0.0.660f781b77")
  86. else()
  87. # Linux with x86/aarch64 CPU/Arm CPU/GPU/IPU ...
  88. if(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "aarch64")
  89. message(FATAL_ERROR "Paddle Backend doesn't support linux aarch64 now.")
  90. else()
  91. # x86_64
  92. if(WITH_GPU)
  93. if(PADDLEINFERENCE_API_COMPAT_CUDA_SM_80)
  94. set(PADDLEINFERENCE_FILE "paddle_inference-linux-x64-gpu-trt8.5.2.2-mkl-sm70.sm75.sm80.sm86.nodist-2.5.1.tgz")
  95. set(PADDLEINFERENCE_VERSION "2.5.1")
  96. else()
  97. set(PADDLEINFERENCE_FILE "paddle_inference-linux-x64-gpu-trt8.5.2.2-mkl-sm61.sm70.sm75.sm86.nodist-2.5.1.tgz")
  98. set(PADDLEINFERENCE_VERSION "2.5.1")
  99. endif()
  100. else()
  101. set(PADDLEINFERENCE_FILE "paddle_inference-linux-x64-mkl-2.5.1.tgz")
  102. set(PADDLEINFERENCE_VERSION "2.5.1")
  103. endif()
  104. if(WITH_IPU)
  105. set(PADDLEINFERENCE_FILE "paddle_inference-linux-x64-ipu-2.4-dev1.tgz")
  106. # TODO(qiuyanjun): Should use the commit id to tag the version
  107. set(PADDLEINFERENCE_VERSION "2.4-dev1")
  108. endif()
  109. if(WITH_KUNLUNXIN)
  110. set(PADDLEINFERENCE_FILE "paddle_inference-linux-x64-xpu-openblas-0.0.0.021fd73536.tgz")
  111. set(PADDLEINFERENCE_VERSION "0.0.0.021fd73536")
  112. endif()
  113. if(NEED_ABI0)
  114. if(WITH_GPU OR WITH_IPU OR WITH_KUNLUNXIN)
  115. message(WARNING "While NEED_ABI0=ON, only support CPU now, will fallback to CPU.")
  116. endif()
  117. set(PADDLEINFERENCE_FILE "paddle_inference-linux-x64-2.4.0-abi0.tgz")
  118. set(PADDLEINFERENCE_VERSION "2.4.0-abi0")
  119. endif()
  120. endif()
  121. endif()
  122. set(PADDLEINFERENCE_URL "${PADDLEINFERENCE_URL_BASE}${PADDLEINFERENCE_FILE}")
  123. endif(PADDLEINFERENCE_URL)
  124. ExternalProject_Add(
  125. ${PADDLEINFERENCE_PROJECT}
  126. ${EXTERNAL_PROJECT_LOG_ARGS}
  127. URL ${PADDLEINFERENCE_URL}
  128. PREFIX ${PADDLEINFERENCE_PREFIX_DIR}
  129. DOWNLOAD_NO_PROGRESS 1
  130. CONFIGURE_COMMAND ""
  131. BUILD_COMMAND ""
  132. UPDATE_COMMAND ""
  133. INSTALL_COMMAND
  134. ${CMAKE_COMMAND} -E copy_directory ${PADDLEINFERENCE_SOURCE_DIR} ${PADDLEINFERENCE_INSTALL_DIR}
  135. BUILD_BYPRODUCTS ${PADDLEINFERENCE_COMPILE_LIB})
  136. endif(PADDLEINFERENCE_DIRECTORY)
  137. if (PADDLEINFERENCE_VERSION STREQUAL "")
  138. message(FATAL_ERROR "The Paddle Inference version is unspecified and cannot be determined.")
  139. endif()
  140. string(REGEX MATCH "([0-9]+)\\.([0-9]+)\\.([0-9]+)" _ "${PADDLEINFERENCE_VERSION}")
  141. set(PADDLEINFERENCE_VERSION_MAJOR "${CMAKE_MATCH_1}")
  142. set(PADDLEINFERENCE_VERSION_MINOR "${CMAKE_MATCH_2}")
  143. set(PADDLEINFERENCE_VERSION_PATCH "${CMAKE_MATCH_3}")
  144. add_definitions("-DPADDLEINFERENCE_VERSION_MAJOR=${PADDLEINFERENCE_VERSION_MAJOR}")
  145. add_definitions("-DPADDLEINFERENCE_VERSION_MINOR=${PADDLEINFERENCE_VERSION_MINOR}")
  146. add_definitions("-DPADDLEINFERENCE_VERSION_PATCH=${PADDLEINFERENCE_VERSION_PATCH}")
  147. # check libs
  148. if(WIN32)
  149. set(PADDLEINFERENCE_COMPILE_LIB
  150. "${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/paddle_inference.lib"
  151. CACHE FILEPATH "paddle_inference compile library." FORCE)
  152. if(PADDLEINFERENCE_VERSION_MAJOR EQUAL 2)
  153. set(DNNL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mkldnn/lib/mkldnn.lib")
  154. else()
  155. set(DNNL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/onednn/lib/dnnl.lib")
  156. endif()
  157. set(OMP_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mklml/lib/libiomp5md.lib")
  158. set(P2O_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/paddle2onnx/lib/paddle2onnx.lib")
  159. set(ORT_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/onnxruntime/lib/onnxruntime.lib")
  160. elseif(APPLE)
  161. set(PADDLEINFERENCE_COMPILE_LIB
  162. "${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/libpaddle_inference.dylib"
  163. CACHE FILEPATH "paddle_inference compile library." FORCE)
  164. if(PADDLEINFERENCE_VERSION_MAJOR EQUAL 2)
  165. set(DNNL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mkldnn/lib/libdnnl.so.2")
  166. else()
  167. set(DNNL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/onednn/lib/libdnnl.so.3")
  168. endif()
  169. set(OMP_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mklml/lib/libiomp5.so")
  170. set(P2O_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/paddle2onnx/lib/libpaddle2onnx.dylib")
  171. set(ORT_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/onnxruntime/lib/libonnxruntime.dylib")
  172. else()
  173. set(PADDLEINFERENCE_COMPILE_LIB
  174. "${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/libpaddle_inference.so"
  175. CACHE FILEPATH "paddle_inference compile library." FORCE)
  176. if(PADDLEINFERENCE_VERSION_MAJOR EQUAL 2)
  177. set(DNNL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mkldnn/lib/libdnnl.so.2")
  178. else()
  179. set(DNNL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/onednn/lib/libdnnl.so.3")
  180. endif()
  181. set(OMP_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mklml/lib/libiomp5.so")
  182. set(P2O_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/paddle2onnx/lib/libpaddle2onnx.so")
  183. set(ORT_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/onnxruntime/lib/libonnxruntime.so")
  184. endif(WIN32)
  185. # Path Paddle Inference ELF lib file
  186. if(UNIX AND (NOT APPLE))
  187. set(PATCHELF_SCRIPT ${PROJECT_SOURCE_DIR}/scripts/patch_paddle_inference.py)
  188. set(PATCHELF_TARGET ${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/libpaddle_inference.so)
  189. add_custom_target(
  190. patchelf_paddle_inference ALL COMMAND bash -c
  191. "PATCHELF_EXE=${PATCHELF_EXE} python ${PATCHELF_SCRIPT} ${PATCHELF_TARGET} ${PADDLEINFERENCE_VERSION}"
  192. DEPENDS ${LIBRARY_NAME}
  193. )
  194. unset(PATCHELF_SCRIPT)
  195. unset(PATCHELF_TARGET)
  196. endif()
  197. add_library(external_paddle_inference STATIC IMPORTED GLOBAL)
  198. set_property(TARGET external_paddle_inference PROPERTY IMPORTED_LOCATION
  199. ${PADDLEINFERENCE_COMPILE_LIB})
  200. add_dependencies(external_paddle_inference ${PADDLEINFERENCE_PROJECT})
  201. add_library(external_p2o STATIC IMPORTED GLOBAL)
  202. set_property(TARGET external_p2o PROPERTY IMPORTED_LOCATION
  203. ${P2O_LIB})
  204. add_dependencies(external_p2o ${PADDLEINFERENCE_PROJECT})
  205. add_library(external_ort STATIC IMPORTED GLOBAL)
  206. set_property(TARGET external_ort PROPERTY IMPORTED_LOCATION
  207. ${ORT_LIB})
  208. add_dependencies(external_ort ${PADDLEINFERENCE_PROJECT})
  209. add_library(external_dnnl STATIC IMPORTED GLOBAL)
  210. set_property(TARGET external_dnnl PROPERTY IMPORTED_LOCATION
  211. ${DNNL_LIB})
  212. add_dependencies(external_dnnl ${PADDLEINFERENCE_PROJECT})
  213. add_library(external_omp STATIC IMPORTED GLOBAL)
  214. set_property(TARGET external_omp PROPERTY IMPORTED_LOCATION
  215. ${OMP_LIB})
  216. add_dependencies(external_omp ${PADDLEINFERENCE_PROJECT})
  217. # Compatible policy for 2.4.x/2.5.x/2.6.x and latest dev.
  218. if (NOT WITH_KUNLUNXIN)
  219. string(REGEX MATCH "0.0.0" PADDLEINFERENCE_USE_DEV ${PADDLEINFERENCE_VERSION})
  220. string(REGEX MATCH "2.4|post24|post2.4" PADDLEINFERENCE_USE_2_4_x ${PADDLEINFERENCE_VERSION})
  221. string(REGEX MATCH "2.5|post25|post2.5" PADDLEINFERENCE_USE_2_5_x ${PADDLEINFERENCE_VERSION})
  222. string(REGEX MATCH "2.6|post26|post2.6" PADDLEINFERENCE_USE_2_6_x ${PADDLEINFERENCE_VERSION})
  223. endif()
  224. if(PADDLEINFERENCE_USE_DEV)
  225. set(PADDLEINFERENCE_API_COMPAT_DEV ON CACHE BOOL "" FORCE)
  226. endif()
  227. if(PADDLEINFERENCE_USE_2_6_x)
  228. set(PADDLEINFERENCE_API_COMPAT_2_6_x ON CACHE BOOL "" FORCE)
  229. endif()
  230. if(PADDLEINFERENCE_USE_2_5_x)
  231. set(PADDLEINFERENCE_API_COMPAT_2_5_x ON CACHE BOOL "" FORCE)
  232. endif()
  233. if(PADDLEINFERENCE_USE_2_4_x AND (NOT PADDLEINFERENCE_API_COMPAT_2_5_x) AND (NOT PADDLEINFERENCE_API_COMPAT_2_6_x) AND (NOT PADDLEINFERENCE_API_COMPAT_DEV))
  234. set(PADDLEINFERENCE_API_COMPAT_2_4_x ON CACHE BOOL "" FORCE)
  235. message(WARNING "You are using PADDLEINFERENCE_USE_2_4_x:${PADDLEINFERENCE_VERSION}, force PADDLEINFERENCE_API_COMPAT_2_4_x=ON")
  236. endif()
  237. if(PADDLEINFERENCE_API_COMPAT_2_4_x)
  238. add_definitions(-DPADDLEINFERENCE_API_COMPAT_2_4_x)
  239. endif()
  240. if(PADDLEINFERENCE_API_COMPAT_2_5_x)
  241. add_definitions(-DPADDLEINFERENCE_API_COMPAT_2_5_x)
  242. endif()
  243. if(PADDLEINFERENCE_API_COMPAT_2_6_x)
  244. add_definitions(-DPADDLEINFERENCE_API_COMPAT_2_6_x)
  245. endif()
  246. if(PADDLEINFERENCE_API_COMPAT_DEV)
  247. add_definitions(-DPADDLEINFERENCE_API_COMPAT_DEV)
  248. endif()
  249. # Compatible policy for custom paddle ops
  250. if(PADDLEINFERENCE_API_COMPAT_2_5_x AND (NOT WITH_KUNLUNXIN))
  251. # no c++ standard policy conflicts vs c++ 11
  252. # TODO: support custom ops for latest dev
  253. set(PADDLEINFERENCE_API_CUSTOM_OP ON CACHE BOOL "" FORCE)
  254. # add paddle_inference/paddle/include path for custom ops
  255. # the extension.h and it's deps headers are located in
  256. # paddle/include/paddle directory.
  257. include_directories(${PADDLEINFERENCE_INC_DIR}/paddle/include)
  258. message(WARNING "You are using PADDLEINFERENCE_API_COMPAT_2_5_x:${PADDLEINFERENCE_VERSION}, force PADDLEINFERENCE_API_CUSTOM_OP=${PADDLEINFERENCE_API_CUSTOM_OP}")
  259. endif()
  260. function(set_paddle_custom_ops_compatible_policy)
  261. if(PADDLEINFERENCE_API_CUSTOM_OP AND (NOT WITH_KUNLUNXIN))
  262. if(NOT MSVC)
  263. # TODO: add non c++ 14 policy for latest dev
  264. if(NOT PADDLEINFERENCE_API_COMPAT_2_5_x)
  265. # gcc c++ 14 policy for 2.4.x
  266. if(NOT DEFINED CMAKE_CXX_STANDARD)
  267. set(CMAKE_CXX_STANDARD 14 PARENT_SCOPE)
  268. message(WARNING "Found PADDLEINFERENCE_API_CUSTOM_OP=ON, but CMAKE_CXX_STANDARD is not defined, use c++ 14 by default!")
  269. elseif(NOT (CMAKE_CXX_STANDARD EQUAL 14))
  270. set(CMAKE_CXX_STANDARD 14 PARENT_SCOPE)
  271. message(WARNING "Found PADDLEINFERENCE_API_CUSTOM_OP=ON, force use c++ 14!")
  272. endif()
  273. endif()
  274. if(WITH_GPU)
  275. # cuda c++ 14 policy for 2.4.x
  276. if(NOT PADDLEINFERENCE_API_COMPAT_2_5_x)
  277. if(NOT DEFINED CMAKE_CUDA_STANDARD)
  278. set(CMAKE_CUDA_STANDARD 14 PARENT_SCOPE)
  279. message(WARNING "Found PADDLEINFERENCE_API_CUSTOM_OP=ON and WITH_GPU=ON, but CMAKE_CUDA_STANDARD is not defined, use c++ 14 by default!")
  280. elseif(NOT (CMAKE_CUDA_STANDARD EQUAL 14))
  281. set(CMAKE_CUDA_STANDARD 14 PARENT_SCOPE)
  282. message(WARNING "Found PADDLEINFERENCE_API_CUSTOM_OP=ON and WITH_GPU=ON, force use c++ 14!")
  283. endif()
  284. endif()
  285. endif()
  286. endif()
  287. # common compile flags for paddle custom ops
  288. add_definitions(-DPADDLE_ON_INFERENCE)
  289. add_definitions(-DPADDLE_NO_PYTHON)
  290. if(WITH_GPU)
  291. add_definitions(-DPADDLE_WITH_CUDA)
  292. endif()
  293. endif()
  294. endfunction()