Browse Source

Merge remote-tracking branch 'paddle/restful' into restful

wangsiyuan06 5 years ago
parent
commit
6ef324c58c
3 changed files with 20 additions and 7 deletions
  1. 6 1
      README.md
  2. 7 2
      deploy/openvino/src/paddlex.cpp
  3. 7 4
      deploy/raspberry/src/paddlex.cpp

+ 6 - 1
README.md

@@ -100,7 +100,12 @@ pip install paddlex -i https://mirror.baidu.com/pypi/simple
   * [工业表计读数](https://paddlex.readthedocs.io/zh_CN/develop/examples/meter_reader.html)
 
 * 工业质检:
-  * 电池隔膜缺陷检测(Coming Soon)
+  * [电池隔膜缺陷检测](https://paddlex.readthedocs.io/zh_CN/develop/examples/industrial_quality_inspection/README.html)
+
+* 卫星遥感: 
+  * [RGB遥感影像分割](https://paddlex.readthedocs.io/zh_CN/develop/examples/remote_sensing.html)
+  * [多通道遥感影像分割](https://paddlex.readthedocs.io/zh_CN/develop/examples/multi-channel_remote_sensing/README.html)
+  * [地块变化检测](https://paddlex.readthedocs.io/zh_CN/develop/examples/multi-channel_remote_sensing/README.html)
 
 * [人像分割](https://paddlex.readthedocs.io/zh_CN/develop/examples/human_segmentation.html)
 

+ 7 - 2
deploy/openvino/src/paddlex.cpp

@@ -116,11 +116,16 @@ bool Model::predict(const cv::Mat& im, ClsResult* result) {
   output_ = infer_request.GetBlob(output_name);
   InferenceEngine::MemoryBlob::CPtr moutput =
     InferenceEngine::as<InferenceEngine::MemoryBlob>(output_);
+  InferenceEngine::TensorDesc blob_output = moutput->getTensorDesc();
+  std::vector<size_t> output_shape = blob_output.getDims();
   auto moutputHolder = moutput->rmap();
   float* outputs_data = moutputHolder.as<float *>();
-
+  int size = 1;
+  for (auto& i : output_shape) {
+    size *= static_cast<int>(i);
+  }
   // post process
-  auto ptr = std::max_element(outputs_data, outputs_data+sizeof(outputs_data));
+  auto ptr = std::max_element(outputs_data, outputs_data + size);
   result->category_id = std::distance(outputs_data, ptr);
   result->score = *ptr;
   result->category = labels[result->category_id];

+ 7 - 4
deploy/raspberry/src/paddlex.cpp

@@ -90,10 +90,13 @@ bool Model::predict(const cv::Mat& im, ClsResult* result) {
   std::unique_ptr<const paddle::lite_api::Tensor> output_tensor(
     std::move(predictor_->GetOutput(0)));
   const float *outputs_data = output_tensor->mutable_data<float>();
-
-
+  auto output_shape = output_tensor->shape();
+  int64_t size = 1;
+  for (const auto& i : output_shape) {
+    size *= i;
+  }
   // postprocess
-  auto ptr = std::max_element(outputs_data, outputs_data+sizeof(outputs_data));
+  auto ptr = std::max_element(outputs_data, outputs_data + size);
   result->category_id = std::distance(outputs_data, ptr);
   result->score = *ptr;
   result->category = labels[result->category_id];
@@ -121,7 +124,7 @@ bool Model::predict(const cv::Mat& im, DetResult* result) {
   if (name == "YOLOv3") {
     std::unique_ptr<paddle::lite_api::Tensor> im_size_tensor(
       std::move(predictor_->GetInput(1)));
-    im_size_tensor->Resize({1,2});
+    im_size_tensor->Resize({1, 2});
     auto *im_size_data = im_size_tensor->mutable_data<int>();
     memcpy(im_size_data, inputs_.ori_im_size_.data(), 1*2*sizeof(int));
   }