_build_cpp.sh 2.9 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576
  1. #!/bin/bash
  2. set -e
  3. # deal cmd input
  4. while [[ "$#" -gt 0 ]]; do
  5. case "$1" in
  6. --with-gpu) WITH_GPU="$2"; shift ;;
  7. --enable-benchmark) ENABLE_BENCHMARK="$2"; shift ;;
  8. --paddleinference-url) PADDLEINFERENCE_URL="$2"; shift ;;
  9. --paddleinference-version) PADDLEINFERENCE_VERSION="$2"; shift ;;
  10. --enable-paddle-backend) ENABLE_PADDLE_BACKEND="$2"; shift ;;
  11. --enable-ort-backend) ENABLE_ORT_BACKEND="$2"; shift ;;
  12. --enable-openvino-backend) ENABLE_OPENVINO_BACKEND="$2"; shift ;;
  13. --enable-trt-backend) ENABLE_TRT_BACKEND="$2"; shift ;;
  14. --trt-directory) TRT_DIRECTORY="$2"; shift ;;
  15. --enable-vision) ENABLE_VISION="$2"; shift ;;
  16. --enable-text) ENABLE_TEXT="$2"; shift ;;
  17. *) echo "Unknown parameter passed: $1"; exit 1 ;;
  18. esac
  19. shift
  20. done
  21. export DEBIAN_FRONTEND='noninteractive'
  22. export TZ='Asia/Shanghai'
  23. export CC=/usr/local/gcc-8.2/bin/gcc
  24. export CXX=/usr/local/gcc-8.2/bin/g++
  25. cd /workspace/ultra-infer
  26. wget -O /etc/yum.repos.d/CentOS-Base.repo http://mirrors.cloud.tencent.com/repo/centos7_base.repo
  27. yum clean all
  28. yum makecache
  29. yum install -y patchelf rapidjson-devel
  30. ln -sf /opt/_internal/cpython-3.10.0/bin/python3.10 /usr/bin/python
  31. ln -sf /opt/_internal/cpython-3.10.0/bin/pip3.10 /usr/bin/pip
  32. export LD_LIBRARY_PATH=/opt/_internal/cpython-3.10.0/lib:${LD_LIBRARY_PATH}
  33. export PATH=/opt/_internal/cpython-3.10.0/bin:${PATH}
  34. if [ "$ENABLE_TRT_BACKEND" = "ON" ] && [ "$TRT_DIRECTORY" = "Default" ]; then
  35. TRT_VERSION='8.5.2.2'
  36. CUDA_VERSION='11.8'
  37. CUDNN_VERSION='8.6'
  38. rm -rf "TensorRT-${TRT_VERSION}" "TensorRT-${TRT_VERSION}.Linux.x86_64-gnu.cuda-${CUDA_VERSION}.cudnn${CUDNN_VERSION}.tar.gz"
  39. http_proxy= https_proxy= wget "https://fastdeploy.bj.bcebos.com/resource/TensorRT/TensorRT-${TRT_VERSION}.Linux.x86_64-gnu.cuda-${CUDA_VERSION}.cudnn${CUDNN_VERSION}.tar.gz"
  40. tar -xzvf "TensorRT-${TRT_VERSION}.Linux.x86_64-gnu.cuda-${CUDA_VERSION}.cudnn${CUDNN_VERSION}.tar.gz"
  41. TRT_DIRECTORY="/workspace/ultra-infer/TensorRT-${TRT_VERSION}"
  42. fi
  43. (
  44. cd /workspace/ultra-infer
  45. rm -rf build && mkdir build && cd build
  46. unset http_proxy https_proxy
  47. cmake \
  48. -DLIBRARY_NAME='ultra_infer_runtime' \
  49. -DCMAKE_INSTALL_PREFIX="${PWD}/ultra_infer_install" \
  50. -DWITH_GPU="${WITH_GPU}" \
  51. -DENABLE_TRT_BACKEND="${ENABLE_TRT_BACKEND}" \
  52. -DTRT_DIRECTORY="${TRT_DIRECTORY}" \
  53. -DENABLE_ORT_BACKEND="${ENABLE_ORT_BACKEND}" \
  54. -DENABLE_PADDLE_BACKEND="${ENABLE_PADDLE_BACKEND}" \
  55. -DPADDLEINFERENCE_URL="${PADDLEINFERENCE_URL}" \
  56. -DPADDLEINFERENCE_VERSION="${PADDLEINFERENCE_VERSION}" \
  57. -DENABLE_OPENVINO_BACKEND="${ENABLE_OPENVINO_BACKEND}" \
  58. -DENABLE_VISION="${ENABLE_VISION}" \
  59. -DENABLE_TEXT="${ENABLE_TEXT}" \
  60. -DBUILD_ULTRAINFER_PYTHON=OFF \
  61. -DBUILD_FD_TRITON_BACKEND=ON \
  62. -DENABLE_BENCHMARK="${ENABLE_BENCHMARK}" \
  63. ..
  64. make -j"$(nproc)"
  65. make install
  66. )