Browse Source

Merge pull request #495 from syyxsxx/develop

jetson build fix
Jason 4 năm trước cách đây
mục cha
commit
cfa7fe9f8d

+ 14 - 4
deploy/cpp/CMakeLists.txt

@@ -164,8 +164,13 @@ endif()
 
 if (NOT WIN32)
   if (WITH_TENSORRT AND WITH_GPU)
-      include_directories("${TENSORRT_DIR}/include")
-      link_directories("${TENSORRT_DIR}/lib")
+      if (${CMAKE_SYSTEM_PROCESSOR} STREQUAL "aarch64") # x86_64 aarch64
+          include_directories("/usr/include/aarch64-linux-gnu")
+	  link_directories("/usr/lib/aarch64-linux-gnu")
+      else()
+          include_directories("${TENSORRT_DIR}/include")
+          link_directories("${TENSORRT_DIR}/lib")
+      endif()
   endif()
 endif(NOT WIN32)
 
@@ -256,8 +261,13 @@ endif(NOT WIN32)
 if(WITH_GPU)
   if(NOT WIN32)
     if (WITH_TENSORRT)
-      set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/libnvinfer${CMAKE_SHARED_LIBRARY_SUFFIX})
-      set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/libnvinfer_plugin${CMAKE_SHARED_LIBRARY_SUFFIX})
+      if (${CMAKE_SYSTEM_PROCESSOR} STREQUAL "aarch64") # x86_64 aarch64
+	set(DEPS ${DEPS} /usr/lib/aarch64-linux-gnu/libnvinfer${CMAKE_SHARED_LIBRARY_SUFFIX})
+        set(DEPS ${DEPS} /usr/lib/aarch64-linux-gnu/libnvinfer_plugin${CMAKE_SHARED_LIBRARY_SUFFIX})
+      else()
+	set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/libnvinfer${CMAKE_SHARED_LIBRARY_SUFFIX})
+        set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/libnvinfer_plugin${CMAKE_SHARED_LIBRARY_SUFFIX})
+      endif()
     endif()
     set(DEPS ${DEPS} ${CUDA_LIB}/libcudart${CMAKE_SHARED_LIBRARY_SUFFIX})
     set(DEPS ${DEPS} ${CUDNN_LIB}/libcudnn${CMAKE_SHARED_LIBRARY_SUFFIX})

+ 5 - 0
deploy/cpp/CMakeSettings.json

@@ -50,6 +50,11 @@
                     "name": "ENCRYPTION_DIR",
                     "value": "",
                     "type": "PATH"
+                },
+                {
+                    "name": "WITH_TENSORRT",
+                    "value": "False",
+                    "type": "BOOL"
                 }
             ]
         }

+ 1 - 3
deploy/cpp/scripts/jetson_build.sh

@@ -3,9 +3,7 @@ WITH_GPU=ON
 # 使用MKL or openblas
 WITH_MKL=OFF
 # 是否集成 TensorRT(仅WITH_GPU=ON 有效)
-WITH_TENSORRT=OFF
-# TensorRT 的路径,如果需要集成TensorRT,需修改为您实际安装的TensorRT路径
-TENSORRT_DIR=/root/projects/TensorRT/
+WITH_TENSORRT=ON
 # Paddle 预测库路径, 请修改为您实际安装的预测库路径
 PADDLE_DIR=/root/projects/paddle_inference
 # Paddle 的预测库是否使用静态库来编译

+ 4 - 1
docs/gui/restful/introduction.md

@@ -2,8 +2,11 @@
 PaddleX RESTful是基于PaddleX开发的RESTful API。  
 
 对于开发者来说可以通过如下指令启动PaddleX RESTful服务  
-**paddlex --start_restful --port [端口号] --workspace_dir [工作空间地址]**  
 
+```
+paddlex --start_restful --port [端口号] --workspace_dir [工作空间地址]   
+```
+  
 对于设置workspace在HOME目录的wk文件夹下,RESTful服务端口为8080的命令参考如下:
 ![](./img/start_restful.png)