resnet.cc 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135
  1. // Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // http://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. #include "ultra_infer/vision/classification/contrib/resnet.h"
  15. #include "ultra_infer/utils/perf.h"
  16. #include "ultra_infer/vision/utils/utils.h"
  17. namespace ultra_infer {
  18. namespace vision {
  19. namespace classification {
  20. ResNet::ResNet(const std::string &model_file, const std::string &params_file,
  21. const RuntimeOption &custom_option,
  22. const ModelFormat &model_format) {
  23. // In constructor, the 3 steps below are necessary.
  24. // 1. set the Backend 2. set RuntimeOption 3. call Initialize()
  25. if (model_format == ModelFormat::ONNX) {
  26. valid_cpu_backends = {Backend::ORT, Backend::OPENVINO};
  27. valid_gpu_backends = {Backend::ORT, Backend::TRT};
  28. } else {
  29. valid_cpu_backends = {Backend::PDINFER};
  30. valid_gpu_backends = {Backend::PDINFER};
  31. }
  32. runtime_option = custom_option;
  33. runtime_option.model_format = model_format;
  34. runtime_option.model_file = model_file;
  35. runtime_option.params_file = params_file;
  36. initialized = Initialize();
  37. }
  38. bool ResNet::Initialize() {
  39. // In this function, the 3 steps below are necessary.
  40. // 1. assign values to the global variables 2. call InitRuntime()
  41. size = {224, 224};
  42. mean_vals = {0.485f, 0.456f, 0.406f};
  43. std_vals = {0.229f, 0.224f, 0.225f};
  44. if (!InitRuntime()) {
  45. FDERROR << "Failed to initialize ultra_infer backend." << std::endl;
  46. return false;
  47. }
  48. return true;
  49. }
  50. bool ResNet::Preprocess(Mat *mat, FDTensor *output) {
  51. // In this function, the preprocess need be implemented according to the
  52. // original Repos,
  53. // The result of preprocess has to be saved in FDTensor variable, because the
  54. // input of Infer() need to be std::vector<FDTensor>.
  55. // 1. Resize 2. BGR2RGB 3. Normalize 4. HWC2CHW 5. Put the result into
  56. // FDTensor variable.
  57. if (mat->Height() != size[0] || mat->Width() != size[1]) {
  58. int interp = cv::INTER_LINEAR;
  59. Resize::Run(mat, size[1], size[0], -1, -1, interp);
  60. }
  61. BGR2RGB::Run(mat);
  62. Normalize::Run(mat, mean_vals, std_vals);
  63. HWC2CHW::Run(mat);
  64. Cast::Run(mat, "float");
  65. mat->ShareWithTensor(output);
  66. output->shape.insert(output->shape.begin(), 1); // reshape to n, c, h, w
  67. return true;
  68. }
  69. bool ResNet::Postprocess(FDTensor &infer_result, ClassifyResult *result,
  70. int topk) {
  71. // In this function, the postprocess need be implemented according to the
  72. // original Repos,
  73. // Finally the result of postprocess should be saved in ClassifyResult
  74. // variable.
  75. // 1. Softmax 2. Choose topk labels 3. Put the result into ClassifyResult
  76. // variable.
  77. int num_classes = infer_result.shape[1];
  78. function::Softmax(infer_result, &infer_result);
  79. const float *infer_result_buffer =
  80. reinterpret_cast<float *>(infer_result.Data());
  81. topk = std::min(num_classes, topk);
  82. result->label_ids =
  83. utils::TopKIndices(infer_result_buffer, num_classes, topk);
  84. result->scores.resize(topk);
  85. for (int i = 0; i < topk; ++i) {
  86. result->scores[i] = *(infer_result_buffer + result->label_ids[i]);
  87. }
  88. return true;
  89. }
  90. bool ResNet::Predict(cv::Mat *im, ClassifyResult *result, int topk) {
  91. // In this function, the Preprocess(), Infer(), and Postprocess() are called
  92. // sequentially.
  93. Mat mat(*im);
  94. std::vector<FDTensor> processed_data(1);
  95. if (!Preprocess(&mat, &(processed_data[0]))) {
  96. FDERROR << "Failed to preprocess input data while using model:"
  97. << ModelName() << "." << std::endl;
  98. return false;
  99. }
  100. processed_data[0].name = InputInfoOfRuntime(0).name;
  101. std::vector<FDTensor> output_tensors;
  102. if (!Infer(processed_data, &output_tensors)) {
  103. FDERROR << "Failed to inference while using model:" << ModelName() << "."
  104. << std::endl;
  105. return false;
  106. }
  107. if (!Postprocess(output_tensors[0], result, topk)) {
  108. FDERROR << "Failed to postprocess while using model:" << ModelName() << "."
  109. << std::endl;
  110. return false;
  111. }
  112. return true;
  113. }
  114. } // namespace classification
  115. } // namespace vision
  116. } // namespace ultra_infer