postprocessor.h 3.2 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889
  1. // Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // http://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. #pragma once
  15. #include "ultra_infer/ultra_infer_model.h"
  16. #include "ultra_infer/vision/common/processors/transform.h"
  17. #include "ultra_infer/vision/common/result.h"
  18. #include "ultra_infer/vision/utils/utils.h"
  19. namespace ultra_infer {
  20. namespace vision {
  21. namespace segmentation {
  22. /*! @brief Postprocessor object for PaddleSeg serials model.
  23. */
  24. class ULTRAINFER_DECL PaddleSegPostprocessor {
  25. public:
  26. /** \brief Create a postprocessor instance for PaddleSeg serials model
  27. *
  28. * \param[in] config_file Path of configuration file for deployment, e.g
  29. * ppliteseg/deploy.yaml
  30. */
  31. explicit PaddleSegPostprocessor(const std::string &config_file);
  32. /** \brief Process the result of runtime and fill to SegmentationResult
  33. * structure
  34. *
  35. * \param[in] tensors The inference result from runtime
  36. * \param[in] result The output result of detection
  37. * \param[in] imgs_info The original input images shape info map, key is
  38. * "shape_info", value is vector<array<int, 2>> a{{height, width}} \return
  39. * true if the postprocess succeeded, otherwise false
  40. */
  41. virtual bool
  42. Run(const std::vector<FDTensor> &infer_results,
  43. std::vector<SegmentationResult> *results,
  44. const std::map<std::string, std::vector<std::array<int, 2>>> &imgs_info);
  45. /** \brief Get apply_softmax property of PaddleSeg model, default is false
  46. */
  47. bool GetApplySoftmax() const { return apply_softmax_; }
  48. /// Set apply_softmax value, bool type required
  49. void SetApplySoftmax(bool value) { apply_softmax_ = value; }
  50. /// Get store_score_map property of PaddleSeg model, default is false
  51. bool GetStoreScoreMap() const { return store_score_map_; }
  52. /// Set store_score_map value, bool type required
  53. void SetStoreScoreMap(bool value) { store_score_map_ = value; }
  54. private:
  55. virtual bool ReadFromConfig(const std::string &config_file);
  56. virtual bool SliceOneResultFromBatchInferResults(
  57. const FDTensor &infer_results, FDTensor *infer_result,
  58. const std::vector<int64_t> &infer_result_shape, const int64_t &start_idx);
  59. virtual bool ProcessWithScoreResult(const FDTensor &infer_result,
  60. const int64_t &out_num,
  61. SegmentationResult *result);
  62. virtual bool ProcessWithLabelResult(const FDTensor &infer_result,
  63. const int64_t &out_num,
  64. SegmentationResult *result);
  65. bool is_with_softmax_ = false;
  66. bool is_with_argmax_ = true;
  67. bool apply_softmax_ = false;
  68. bool store_score_map_ = false;
  69. bool initialized_ = false;
  70. };
  71. } // namespace segmentation
  72. } // namespace vision
  73. } // namespace ultra_infer