ov_backend.h 2.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172
  1. // Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // http://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. #pragma once
  15. #include <iostream>
  16. #include <memory>
  17. #include <string>
  18. #include <vector>
  19. #include "openvino/openvino.hpp"
  20. #include "ultra_infer/runtime/backends/backend.h"
  21. #include "ultra_infer/runtime/backends/openvino/option.h"
  22. #include "ultra_infer/utils/unique_ptr.h"
  23. namespace ultra_infer {
  24. class OpenVINOBackend : public BaseBackend {
  25. public:
  26. static ov::Core core_;
  27. OpenVINOBackend() {}
  28. virtual ~OpenVINOBackend() = default;
  29. bool Init(const RuntimeOption &option);
  30. bool Infer(std::vector<FDTensor> &inputs, std::vector<FDTensor> *outputs,
  31. bool copy_to_fd = true) override;
  32. int NumInputs() const override;
  33. int NumOutputs() const override;
  34. TensorInfo GetInputInfo(int index) override;
  35. TensorInfo GetOutputInfo(int index) override;
  36. std::vector<TensorInfo> GetInputInfos() override;
  37. std::vector<TensorInfo> GetOutputInfos() override;
  38. std::unique_ptr<BaseBackend> Clone(RuntimeOption &runtime_option,
  39. void *stream = nullptr,
  40. int device_id = -1) override;
  41. private:
  42. bool
  43. InitFromPaddle(const std::string &model_file, const std::string &params_file,
  44. const OpenVINOBackendOption &option = OpenVINOBackendOption());
  45. bool
  46. InitFromOnnx(const std::string &model_file,
  47. const OpenVINOBackendOption &option = OpenVINOBackendOption());
  48. void InitTensorInfo(const std::vector<ov::Output<ov::Node>> &ov_outputs,
  49. std::map<std::string, TensorInfo> *tensor_infos);
  50. ov::CompiledModel compiled_model_;
  51. ov::InferRequest request_;
  52. OpenVINOBackendOption option_;
  53. std::vector<TensorInfo> input_infos_;
  54. std::vector<TensorInfo> output_infos_;
  55. };
  56. } // namespace ultra_infer