_build_py.sh 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687
  1. #!/bin/bash
  2. set -e
  3. while [[ "$#" -gt 0 ]]; do
  4. case "$1" in
  5. --with-gpu) WITH_GPU="$2"; shift ;;
  6. --enable-benchmark) ENABLE_BENCHMARK="$2"; shift ;;
  7. --python) PYTHON_VERSION="$2"; shift ;;
  8. --paddleinference-url) PADDLEINFERENCE_URL="$2"; shift ;;
  9. --paddleinference-version) PADDLEINFERENCE_VERSION="$2"; shift ;;
  10. --enable-paddle-backend) ENABLE_PADDLE_BACKEND="$2"; shift ;;
  11. --enable-ort-backend) ENABLE_ORT_BACKEND="$2"; shift ;;
  12. --enable-openvino-backend) ENABLE_OPENVINO_BACKEND="$2"; shift ;;
  13. --enable-trt-backend) ENABLE_TRT_BACKEND="$2"; shift ;;
  14. --trt-directory) TRT_DIRECTORY="$2"; shift ;;
  15. --enable-vision) ENABLE_VISION="$2"; shift ;;
  16. --enable-text) ENABLE_TEXT="$2"; shift ;;
  17. *) echo "Unknown parameter passed: $1"; exit 1 ;;
  18. esac
  19. shift
  20. done
  21. export DEBIAN_FRONTEND='noninteractive'
  22. export TZ='Asia/Shanghai'
  23. cd /workspace
  24. wget -O /etc/yum.repos.d/CentOS-Base.repo http://mirrors.cloud.tencent.com/repo/centos7_base.repo
  25. yum clean all
  26. yum makecache
  27. yum --disablerepo=epel -y update ca-certificates
  28. yum install -y wget bzip2
  29. yum install -y epel-release
  30. yum install -y patchelf rapidjson-devel
  31. PYTHON_DIR="/opt/_internal/cpython-${PYTHON_VERSION}"
  32. if [ -d "$PYTHON_DIR" ]; then
  33. ln -sf "${PYTHON_DIR}/bin/python${PYTHON_VERSION}" /usr/bin/python
  34. ln -sf "${PYTHON_DIR}/bin/pip${PYTHON_VERSION}" /usr/bin/pip
  35. export LD_LIBRARY_PATH="${PYTHON_DIR}/lib:${LD_LIBRARY_PATH}"
  36. export PATH="${PYTHON_DIR}/bin:${PATH}"
  37. else
  38. echo "Python version ${PYTHON_VERSION} not found in ${PYTHON_DIR}."
  39. exit 1
  40. fi
  41. python -m pip install numpy pandas
  42. cd /workspace/ultra-infer
  43. if [ "$ENABLE_TRT_BACKEND" = "ON" ] && [ "$TRT_DIRECTORY" = "Default" ]; then
  44. TRT_VERSION='8.5.2.2'
  45. CUDA_VERSION='11.8'
  46. CUDNN_VERSION='8.6'
  47. rm -rf "TensorRT-${TRT_VERSION}" "TensorRT-${TRT_VERSION}.Linux.x86_64-gnu.cuda-${CUDA_VERSION}.cudnn${CUDNN_VERSION}.tar.gz"
  48. http_proxy= https_proxy= wget "https://fastdeploy.bj.bcebos.com/resource/TensorRT/TensorRT-${TRT_VERSION}.Linux.x86_64-gnu.cuda-${CUDA_VERSION}.cudnn${CUDNN_VERSION}.tar.gz"
  49. tar -xzvf "TensorRT-${TRT_VERSION}.Linux.x86_64-gnu.cuda-${CUDA_VERSION}.cudnn${CUDNN_VERSION}.tar.gz"
  50. TRT_DIRECTORY="/workspace/ultra-infer/TensorRT-${TRT_VERSION}"
  51. fi
  52. export WITH_GPU="${WITH_GPU}"
  53. export ENABLE_TRT_BACKEND="${ENABLE_TRT_BACKEND}"
  54. export TRT_DIRECTORY="${TRT_DIRECTORY}"
  55. export ENABLE_ORT_BACKEND="${ENABLE_ORT_BACKEND}"
  56. export ENABLE_PADDLE_BACKEND="${ENABLE_PADDLE_BACKEND}"
  57. export PADDLEINFERENCE_URL="${PADDLEINFERENCE_URL}"
  58. export PADDLEINFERENCE_VERSION="${PADDLEINFERENCE_VERSION}"
  59. export ENABLE_OPENVINO_BACKEND="${ENABLE_OPENVINO_BACKEND}"
  60. export ENABLE_VISION="${ENABLE_VISION}"
  61. export ENABLE_TEXT="${ENABLE_TEXT}"
  62. export ENABLE_BENCHMARK="${ENABLE_BENCHMARK}"
  63. export CC=/usr/local/gcc-8.2/bin/gcc
  64. export CXX=/usr/local/gcc-8.2/bin/g++
  65. cd /workspace/ultra-infer/python
  66. python -m pip install wheel
  67. unset http_proxy https_proxy
  68. rm -rf .setuptools-cmake-build build ultra_infer/libs/third_libs dist
  69. python setup.py build
  70. # HACK
  71. patchelf \
  72. --set-rpath '$ORIGIN/libs/third_libs/onnxruntime/lib:$ORIGIN/libs/third_libs/paddle2onnx/lib:$ORIGIN/libs/third_libs/paddle_inference/paddle/lib:$ORIGIN/libs/third_libs/paddle_inference/third_party/install/cryptopp/lib:$ORIGIN/libs/third_libs/paddle_inference/third_party/install/mklml/lib:$ORIGIN/libs/third_libs/paddle_inference/third_party/install/glog/lib:$ORIGIN/libs/third_libs/paddle_inference/third_party/install/protobuf/lib:$ORIGIN/libs/third_libs/paddle_inference/third_party/install/utf8proc/lib:$ORIGIN/libs/third_libs/paddle_inference/third_party/install/xxhash/lib:$ORIGIN/libs/third_libs/paddle_inference/third_party/install/gflags/lib:$ORIGIN/libs/third_libs/paddle_inference/third_party/install/onednn/lib:$ORIGIN/libs/third_libs/tensorrt/lib:$ORIGIN/libs/third_libs/opencv/lib64:$ORIGIN/libs/third_libs/openvino/runtime/lib:$ORIGIN/libs/third_libs/openvino/runtime/3rdparty/omp/lib' \
  73. build/lib.*/ultra_infer/ultra_infer_main*.so
  74. python setup.py bdist_wheel