_build_py.sh 3.5 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879
  1. #!/bin/bash
  2. set -e
  3. while [[ "$#" -gt 0 ]]; do
  4. case "$1" in
  5. --with-gpu) WITH_GPU="$2"; shift ;;
  6. --enable-benchmark) ENABLE_BENCHMARK="$2"; shift ;;
  7. --python) PYTHON_VERSION="$2"; shift ;;
  8. --enable-ort-backend) ENABLE_ORT_BACKEND="$2"; shift ;;
  9. --enable-openvino-backend) ENABLE_OPENVINO_BACKEND="$2"; shift ;;
  10. --enable-trt-backend) ENABLE_TRT_BACKEND="$2"; shift ;;
  11. --trt-directory) TRT_DIRECTORY="$2"; shift ;;
  12. *) echo "Unknown parameter passed: $1"; exit 1 ;;
  13. esac
  14. shift
  15. done
  16. export DEBIAN_FRONTEND='noninteractive'
  17. export TZ='Asia/Shanghai'
  18. cd /workspace
  19. wget -O /etc/yum.repos.d/CentOS-Base.repo http://mirrors.cloud.tencent.com/repo/centos7_base.repo
  20. yum clean all
  21. yum makecache
  22. yum --disablerepo=epel -y update ca-certificates
  23. yum install -y wget bzip2
  24. yum install -y epel-release
  25. yum install -y patchelf rapidjson-devel
  26. PYTHON_DIR="/opt/_internal/cpython-${PYTHON_VERSION}"
  27. if [ -d "$PYTHON_DIR" ]; then
  28. ln -sf "${PYTHON_DIR}/bin/python${PYTHON_VERSION}" /usr/bin/python
  29. ln -sf "${PYTHON_DIR}/bin/pip${PYTHON_VERSION}" /usr/bin/pip
  30. export LD_LIBRARY_PATH="${PYTHON_DIR}/lib:${LD_LIBRARY_PATH}"
  31. export PATH="${PYTHON_DIR}/bin:${PATH}"
  32. else
  33. echo "Python version ${PYTHON_VERSION} not found in ${PYTHON_DIR}."
  34. exit 1
  35. fi
  36. python -m pip install numpy pandas
  37. cd /workspace/ultra-infer
  38. if [ "$ENABLE_TRT_BACKEND" = "ON" ] && [ "$TRT_DIRECTORY" = "Default" ]; then
  39. TRT_VERSION='8.6.1.6'
  40. CUDA_VERSION='11.8'
  41. rm -rf "TensorRT-${TRT_VERSION}" "TensorRT-${TRT_VERSION}.Linux.x86_64-gnu.cuda-${CUDA_VERSION}.tar.gz"
  42. http_proxy= https_proxy= wget "https://fastdeploy.bj.bcebos.com/resource/TensorRT/TensorRT-${TRT_VERSION}.Linux.x86_64-gnu.cuda-${CUDA_VERSION}.tar.gz"
  43. tar -xzvf "TensorRT-${TRT_VERSION}.Linux.x86_64-gnu.cuda-${CUDA_VERSION}.tar.gz"
  44. TRT_DIRECTORY="/workspace/ultra-infer/TensorRT-${TRT_VERSION}"
  45. fi
  46. export WITH_GPU="${WITH_GPU}"
  47. export ENABLE_TRT_BACKEND="${ENABLE_TRT_BACKEND}"
  48. export TRT_DIRECTORY="${TRT_DIRECTORY}"
  49. export ENABLE_ORT_BACKEND="${ENABLE_ORT_BACKEND}"
  50. export ENABLE_OPENVINO_BACKEND="${ENABLE_OPENVINO_BACKEND}"
  51. export ENABLE_BENCHMARK="${ENABLE_BENCHMARK}"
  52. export ENABLE_PADDLE_BACKEND=OFF
  53. export ENABLE_VISION=OFF
  54. export ENABLE_TEXT=OFF
  55. export CC=/usr/local/gcc-8.2/bin/gcc
  56. export CXX=/usr/local/gcc-8.2/bin/g++
  57. cd /workspace/ultra-infer/python
  58. python -m pip install wheel
  59. unset http_proxy https_proxy
  60. rm -rf .setuptools-cmake-build build ultra_infer/libs/third_libs dist
  61. python setup.py build
  62. # HACK
  63. patchelf \
  64. --set-rpath '$ORIGIN/libs/third_libs/onnxruntime/lib:$ORIGIN/libs/third_libs/paddle2onnx/lib:$ORIGIN/libs/third_libs/paddle_inference/paddle/lib:$ORIGIN/libs/third_libs/paddle_inference/third_party/install/cryptopp/lib:$ORIGIN/libs/third_libs/paddle_inference/third_party/install/mklml/lib:$ORIGIN/libs/third_libs/paddle_inference/third_party/install/glog/lib:$ORIGIN/libs/third_libs/paddle_inference/third_party/install/protobuf/lib:$ORIGIN/libs/third_libs/paddle_inference/third_party/install/utf8proc/lib:$ORIGIN/libs/third_libs/paddle_inference/third_party/install/xxhash/lib:$ORIGIN/libs/third_libs/paddle_inference/third_party/install/gflags/lib:$ORIGIN/libs/third_libs/paddle_inference/third_party/install/onednn/lib:$ORIGIN/libs/third_libs/tensorrt/lib:$ORIGIN/libs/third_libs/opencv/lib64:$ORIGIN/libs/third_libs/openvino/runtime/lib:$ORIGIN/libs/third_libs/openvino/runtime/3rdparty/tbb/lib' \
  65. build/lib.*/ultra_infer/ultra_infer_main*.so
  66. python setup.py bdist_wheel