Bläddra i källkod

add namespace InferenceEngine::

syyxsxx 5 år sedan
förälder
incheckning
be8c86a10d

+ 1 - 1
deploy/openvino/src/paddlex.cpp

@@ -222,7 +222,7 @@ bool Model::predict(const cv::Mat& im, SegResult* result) {
   //
   infer_request.Infer();
 
-  OInferenceEngine::utputsDataMap out_map = network_.getOutputsInfo();
+  InferenceEngine::OutputsDataMap out_map = network_.getOutputsInfo();
   auto iter = out_map.begin();
   iter++;
   std::string output_name_score = iter->first;

+ 1 - 1
deploy/raspberry/include/paddlex/transforms.h

@@ -49,7 +49,7 @@ class ImageBlob {
   // Resize scale
   float scale = 1.0;
   // Buffer for image data after preprocessing
-  std::unique_ptr<Tensor> input_tensor_;
+  std::unique_ptr<paddle::lite_api::Tensor> input_tensor_;
 
   void clear() {
     im_size_before_resize_.clear();

+ 3 - 1
deploy/raspberry/src/paddlex.cpp

@@ -26,7 +26,9 @@ void Model::create_predictor(const std::string& model_dir,
   config.set_model_from_file(model_dir);
   config.set_threads(thread_num);
   load_config(cfg_dir);
-  predictor_ = CreatePaddlePredictor<paddle::lite_api::MobileConfig>(config);
+  predictor_ =
+    paddle::lite_api::CreatePaddlePredictor<paddle::lite_api::MobileConfig>(
+      config);
 }
 
 bool Model::load_config(const std::string& cfg_dir) {