| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508 |
- // Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
- //
- // Licensed under the Apache License, Version 2.0 (the "License");
- // you may not use this file except in compliance with the License.
- // You may obtain a copy of the License at
- //
- // http://www.apache.org/licenses/LICENSE-2.0
- //
- // Unless required by applicable law or agreed to in writing, software
- // distributed under the License is distributed on an "AS IS" BASIS,
- // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- // See the License for the specific language governing permissions and
- // limitations under the License.
- #pragma once
- #include "ultra_infer/vision/detection/ppdet/base.h"
- #include "ultra_infer/vision/detection/ppdet/multiclass_nms.h"
- #include "ultra_infer/vision/detection/ppdet/multiclass_nms_rotated.h"
- namespace ultra_infer {
- namespace vision {
- namespace detection {
- class ULTRAINFER_DECL PicoDet : public PPDetBase {
- public:
- /** \brief Set path of model file and configuration file, and the
- * configuration of runtime
- *
- * \param[in] model_file Path of model file, e.g picodet/model.pdmodel
- * \param[in] params_file Path of parameter file, e.g picodet/model.pdiparams,
- * if the model format is ONNX, this parameter will be ignored \param[in]
- * config_file Path of configuration file for deployment, e.g
- * picodet/infer_cfg.yml \param[in] custom_option RuntimeOption for inference,
- * the default will use cpu, and choose the backend defined in
- * `valid_cpu_backends` \param[in] model_format Model format of the loaded
- * model, default is Paddle format
- */
- PicoDet(const std::string &model_file, const std::string ¶ms_file,
- const std::string &config_file,
- const RuntimeOption &custom_option = RuntimeOption(),
- const ModelFormat &model_format = ModelFormat::PADDLE)
- : PPDetBase(model_file, params_file, config_file, custom_option,
- model_format) {
- valid_cpu_backends = {Backend::OPENVINO, Backend::ORT, Backend::PDINFER,
- Backend::LITE};
- valid_gpu_backends = {Backend::ORT, Backend::PDINFER, Backend::TRT};
- valid_rknpu_backends = {Backend::RKNPU2};
- valid_kunlunxin_backends = {Backend::LITE};
- valid_ascend_backends = {Backend::LITE};
- valid_sophgonpu_backends = {Backend::SOPHGOTPU};
- valid_timvx_backends = {Backend::LITE};
- initialized = Initialize();
- }
- virtual std::string ModelName() const { return "PicoDet"; }
- };
- class ULTRAINFER_DECL SOLOv2 : public PPDetBase {
- public:
- /** \brief Set path of model file and configuration file, and the
- * configuration of runtime
- *
- * \param[in] model_file Path of model file, e.g picodet/model.pdmodel
- * \param[in] params_file Path of parameter file, e.g picodet/model.pdiparams,
- * if the model format is ONNX, this parameter will be ignored \param[in]
- * config_file Path of configuration file for deployment, e.g
- * picodet/infer_cfg.yml \param[in] custom_option RuntimeOption for inference,
- * the default will use cpu, and choose the backend defined in
- * `valid_cpu_backends` \param[in] model_format Model format of the loaded
- * model, default is Paddle format
- */
- SOLOv2(const std::string &model_file, const std::string ¶ms_file,
- const std::string &config_file,
- const RuntimeOption &custom_option = RuntimeOption(),
- const ModelFormat &model_format = ModelFormat::PADDLE)
- : PPDetBase(model_file, params_file, config_file, custom_option,
- model_format) {
- valid_cpu_backends = {Backend::PDINFER};
- valid_gpu_backends = {Backend::PDINFER, Backend::TRT};
- initialized = Initialize();
- }
- virtual std::string ModelName() const { return "SOLOv2"; }
- };
- class ULTRAINFER_DECL PPYOLOE : public PPDetBase {
- public:
- /** \brief Set path of model file and configuration file, and the
- * configuration of runtime
- *
- * \param[in] model_file Path of model file, e.g ppyoloe/model.pdmodel
- * \param[in] params_file Path of parameter file, e.g picodet/model.pdiparams,
- * if the model format is ONNX, this parameter will be ignored \param[in]
- * config_file Path of configuration file for deployment, e.g
- * picodet/infer_cfg.yml \param[in] custom_option RuntimeOption for inference,
- * the default will use cpu, and choose the backend defined in
- * `valid_cpu_backends` \param[in] model_format Model format of the loaded
- * model, default is Paddle format
- */
- PPYOLOE(const std::string &model_file, const std::string ¶ms_file,
- const std::string &config_file,
- const RuntimeOption &custom_option = RuntimeOption(),
- const ModelFormat &model_format = ModelFormat::PADDLE)
- : PPDetBase(model_file, params_file, config_file, custom_option,
- model_format) {
- valid_cpu_backends = {Backend::OPENVINO, Backend::ORT, Backend::PDINFER,
- Backend::LITE, Backend::TVM};
- valid_gpu_backends = {Backend::ORT, Backend::PDINFER, Backend::TRT};
- valid_timvx_backends = {Backend::LITE};
- valid_kunlunxin_backends = {Backend::LITE};
- valid_rknpu_backends = {Backend::RKNPU2};
- valid_ascend_backends = {Backend::LITE};
- valid_sophgonpu_backends = {Backend::SOPHGOTPU};
- valid_horizon_backends = {Backend::HORIZONNPU};
- initialized = Initialize();
- }
- virtual std::string ModelName() const { return "PPYOLOE"; }
- };
- class ULTRAINFER_DECL PPYOLO : public PPDetBase {
- public:
- /** \brief Set path of model file and configuration file, and the
- * configuration of runtime
- *
- * \param[in] model_file Path of model file, e.g ppyolo/model.pdmodel
- * \param[in] params_file Path of parameter file, e.g ppyolo/model.pdiparams,
- * if the model format is ONNX, this parameter will be ignored \param[in]
- * config_file Path of configuration file for deployment, e.g
- * picodet/infer_cfg.yml \param[in] custom_option RuntimeOption for inference,
- * the default will use cpu, and choose the backend defined in
- * `valid_cpu_backends` \param[in] model_format Model format of the loaded
- * model, default is Paddle format
- */
- PPYOLO(const std::string &model_file, const std::string ¶ms_file,
- const std::string &config_file,
- const RuntimeOption &custom_option = RuntimeOption(),
- const ModelFormat &model_format = ModelFormat::PADDLE)
- : PPDetBase(model_file, params_file, config_file, custom_option,
- model_format) {
- valid_cpu_backends = {Backend::PDINFER, Backend::LITE};
- valid_gpu_backends = {Backend::PDINFER};
- valid_kunlunxin_backends = {Backend::LITE};
- valid_ascend_backends = {Backend::LITE};
- initialized = Initialize();
- }
- virtual std::string ModelName() const { return "PaddleDetection/PP-YOLO"; }
- };
- class ULTRAINFER_DECL YOLOv3 : public PPDetBase {
- public:
- YOLOv3(const std::string &model_file, const std::string ¶ms_file,
- const std::string &config_file,
- const RuntimeOption &custom_option = RuntimeOption(),
- const ModelFormat &model_format = ModelFormat::PADDLE)
- : PPDetBase(model_file, params_file, config_file, custom_option,
- model_format) {
- valid_cpu_backends = {Backend::OPENVINO, Backend::ORT, Backend::PDINFER,
- Backend::LITE};
- valid_gpu_backends = {Backend::ORT, Backend::PDINFER, Backend::TRT};
- valid_kunlunxin_backends = {Backend::LITE};
- valid_ascend_backends = {Backend::LITE};
- initialized = Initialize();
- }
- virtual std::string ModelName() const { return "PaddleDetection/YOLOv3"; }
- };
- class ULTRAINFER_DECL PaddleYOLOX : public PPDetBase {
- public:
- PaddleYOLOX(const std::string &model_file, const std::string ¶ms_file,
- const std::string &config_file,
- const RuntimeOption &custom_option = RuntimeOption(),
- const ModelFormat &model_format = ModelFormat::PADDLE)
- : PPDetBase(model_file, params_file, config_file, custom_option,
- model_format) {
- valid_cpu_backends = {Backend::OPENVINO, Backend::ORT, Backend::PDINFER,
- Backend::LITE};
- valid_gpu_backends = {Backend::ORT, Backend::PDINFER, Backend::TRT};
- valid_kunlunxin_backends = {Backend::LITE};
- valid_ascend_backends = {Backend::LITE};
- initialized = Initialize();
- }
- virtual std::string ModelName() const { return "PaddleDetection/YOLOX"; }
- };
- class ULTRAINFER_DECL FasterRCNN : public PPDetBase {
- public:
- FasterRCNN(const std::string &model_file, const std::string ¶ms_file,
- const std::string &config_file,
- const RuntimeOption &custom_option = RuntimeOption(),
- const ModelFormat &model_format = ModelFormat::PADDLE)
- : PPDetBase(model_file, params_file, config_file, custom_option,
- model_format) {
- valid_cpu_backends = {Backend::PDINFER, Backend::LITE};
- valid_gpu_backends = {Backend::PDINFER};
- valid_kunlunxin_backends = {Backend::LITE};
- initialized = Initialize();
- }
- virtual std::string ModelName() const { return "PaddleDetection/FasterRCNN"; }
- };
- class ULTRAINFER_DECL MaskRCNN : public PPDetBase {
- public:
- MaskRCNN(const std::string &model_file, const std::string ¶ms_file,
- const std::string &config_file,
- const RuntimeOption &custom_option = RuntimeOption(),
- const ModelFormat &model_format = ModelFormat::PADDLE)
- : PPDetBase(model_file, params_file, config_file, custom_option,
- model_format) {
- valid_cpu_backends = {Backend::PDINFER, Backend::LITE};
- valid_gpu_backends = {Backend::PDINFER};
- valid_kunlunxin_backends = {Backend::LITE};
- initialized = Initialize();
- }
- virtual std::string ModelName() const { return "PaddleDetection/MaskRCNN"; }
- };
- class ULTRAINFER_DECL SSD : public PPDetBase {
- public:
- SSD(const std::string &model_file, const std::string ¶ms_file,
- const std::string &config_file,
- const RuntimeOption &custom_option = RuntimeOption(),
- const ModelFormat &model_format = ModelFormat::PADDLE)
- : PPDetBase(model_file, params_file, config_file, custom_option,
- model_format) {
- valid_cpu_backends = {Backend::PDINFER, Backend::LITE};
- valid_gpu_backends = {Backend::PDINFER};
- valid_kunlunxin_backends = {Backend::LITE};
- valid_ascend_backends = {Backend::LITE};
- initialized = Initialize();
- }
- virtual std::string ModelName() const { return "PaddleDetection/SSD"; }
- };
- class ULTRAINFER_DECL PaddleYOLOv5 : public PPDetBase {
- public:
- PaddleYOLOv5(const std::string &model_file, const std::string ¶ms_file,
- const std::string &config_file,
- const RuntimeOption &custom_option = RuntimeOption(),
- const ModelFormat &model_format = ModelFormat::PADDLE)
- : PPDetBase(model_file, params_file, config_file, custom_option,
- model_format) {
- valid_cpu_backends = {Backend::ORT, Backend::PDINFER};
- valid_gpu_backends = {Backend::ORT, Backend::PDINFER, Backend::TRT};
- valid_kunlunxin_backends = {Backend::LITE};
- initialized = Initialize();
- }
- virtual std::string ModelName() const { return "PaddleDetection/YOLOv5"; }
- };
- class ULTRAINFER_DECL PaddleYOLOv6 : public PPDetBase {
- public:
- PaddleYOLOv6(const std::string &model_file, const std::string ¶ms_file,
- const std::string &config_file,
- const RuntimeOption &custom_option = RuntimeOption(),
- const ModelFormat &model_format = ModelFormat::PADDLE)
- : PPDetBase(model_file, params_file, config_file, custom_option,
- model_format) {
- valid_cpu_backends = {Backend::OPENVINO, Backend::ORT, Backend::PDINFER};
- valid_gpu_backends = {Backend::ORT, Backend::PDINFER, Backend::TRT};
- valid_kunlunxin_backends = {Backend::LITE};
- initialized = Initialize();
- }
- virtual std::string ModelName() const { return "PaddleDetection/YOLOv6"; }
- };
- class ULTRAINFER_DECL PaddleYOLOv7 : public PPDetBase {
- public:
- PaddleYOLOv7(const std::string &model_file, const std::string ¶ms_file,
- const std::string &config_file,
- const RuntimeOption &custom_option = RuntimeOption(),
- const ModelFormat &model_format = ModelFormat::PADDLE)
- : PPDetBase(model_file, params_file, config_file, custom_option,
- model_format) {
- valid_cpu_backends = {Backend::ORT, Backend::PDINFER};
- valid_gpu_backends = {Backend::ORT, Backend::PDINFER, Backend::TRT};
- valid_kunlunxin_backends = {Backend::LITE};
- initialized = Initialize();
- }
- virtual std::string ModelName() const { return "PaddleDetection/YOLOv7"; }
- };
- class ULTRAINFER_DECL PaddleYOLOv8 : public PPDetBase {
- public:
- PaddleYOLOv8(const std::string &model_file, const std::string ¶ms_file,
- const std::string &config_file,
- const RuntimeOption &custom_option = RuntimeOption(),
- const ModelFormat &model_format = ModelFormat::PADDLE)
- : PPDetBase(model_file, params_file, config_file, custom_option,
- model_format) {
- valid_cpu_backends = {Backend::OPENVINO, Backend::ORT, Backend::PDINFER,
- Backend::LITE};
- valid_gpu_backends = {Backend::ORT, Backend::PDINFER, Backend::TRT};
- valid_kunlunxin_backends = {Backend::LITE};
- valid_rknpu_backends = {Backend::RKNPU2};
- valid_ascend_backends = {Backend::LITE};
- valid_sophgonpu_backends = {Backend::SOPHGOTPU};
- initialized = Initialize();
- }
- virtual std::string ModelName() const { return "PaddleDetection/YOLOv8"; }
- };
- class ULTRAINFER_DECL RTMDet : public PPDetBase {
- public:
- RTMDet(const std::string &model_file, const std::string ¶ms_file,
- const std::string &config_file,
- const RuntimeOption &custom_option = RuntimeOption(),
- const ModelFormat &model_format = ModelFormat::PADDLE)
- : PPDetBase(model_file, params_file, config_file, custom_option,
- model_format) {
- valid_cpu_backends = {Backend::OPENVINO, Backend::ORT, Backend::PDINFER};
- valid_gpu_backends = {Backend::ORT, Backend::PDINFER, Backend::TRT};
- valid_kunlunxin_backends = {Backend::LITE};
- initialized = Initialize();
- }
- virtual std::string ModelName() const { return "PaddleDetection/RTMDet"; }
- };
- class ULTRAINFER_DECL CascadeRCNN : public PPDetBase {
- public:
- CascadeRCNN(const std::string &model_file, const std::string ¶ms_file,
- const std::string &config_file,
- const RuntimeOption &custom_option = RuntimeOption(),
- const ModelFormat &model_format = ModelFormat::PADDLE)
- : PPDetBase(model_file, params_file, config_file, custom_option,
- model_format) {
- valid_cpu_backends = {Backend::PDINFER};
- valid_gpu_backends = {Backend::PDINFER};
- initialized = Initialize();
- }
- virtual std::string ModelName() const {
- return "PaddleDetection/CascadeRCNN";
- }
- };
- class ULTRAINFER_DECL PSSDet : public PPDetBase {
- public:
- PSSDet(const std::string &model_file, const std::string ¶ms_file,
- const std::string &config_file,
- const RuntimeOption &custom_option = RuntimeOption(),
- const ModelFormat &model_format = ModelFormat::PADDLE)
- : PPDetBase(model_file, params_file, config_file, custom_option,
- model_format) {
- valid_cpu_backends = {Backend::PDINFER};
- valid_gpu_backends = {Backend::PDINFER};
- initialized = Initialize();
- }
- virtual std::string ModelName() const { return "PaddleDetection/PSSDet"; }
- };
- class ULTRAINFER_DECL RetinaNet : public PPDetBase {
- public:
- RetinaNet(const std::string &model_file, const std::string ¶ms_file,
- const std::string &config_file,
- const RuntimeOption &custom_option = RuntimeOption(),
- const ModelFormat &model_format = ModelFormat::PADDLE)
- : PPDetBase(model_file, params_file, config_file, custom_option,
- model_format) {
- valid_cpu_backends = {Backend::PDINFER};
- valid_gpu_backends = {Backend::PDINFER};
- initialized = Initialize();
- }
- virtual std::string ModelName() const { return "PaddleDetection/RetinaNet"; }
- };
- class ULTRAINFER_DECL PPYOLOESOD : public PPDetBase {
- public:
- PPYOLOESOD(const std::string &model_file, const std::string ¶ms_file,
- const std::string &config_file,
- const RuntimeOption &custom_option = RuntimeOption(),
- const ModelFormat &model_format = ModelFormat::PADDLE)
- : PPDetBase(model_file, params_file, config_file, custom_option,
- model_format) {
- valid_cpu_backends = {Backend::ORT, Backend::PDINFER};
- valid_gpu_backends = {Backend::ORT, Backend::PDINFER, Backend::TRT};
- initialized = Initialize();
- }
- virtual std::string ModelName() const { return "PaddleDetection/PPYOLOESOD"; }
- };
- class ULTRAINFER_DECL FCOS : public PPDetBase {
- public:
- FCOS(const std::string &model_file, const std::string ¶ms_file,
- const std::string &config_file,
- const RuntimeOption &custom_option = RuntimeOption(),
- const ModelFormat &model_format = ModelFormat::PADDLE)
- : PPDetBase(model_file, params_file, config_file, custom_option,
- model_format) {
- valid_cpu_backends = {Backend::PDINFER};
- valid_gpu_backends = {Backend::ORT, Backend::PDINFER};
- initialized = Initialize();
- }
- virtual std::string ModelName() const { return "PaddleDetection/FCOS"; }
- };
- class ULTRAINFER_DECL TTFNet : public PPDetBase {
- public:
- TTFNet(const std::string &model_file, const std::string ¶ms_file,
- const std::string &config_file,
- const RuntimeOption &custom_option = RuntimeOption(),
- const ModelFormat &model_format = ModelFormat::PADDLE)
- : PPDetBase(model_file, params_file, config_file, custom_option,
- model_format) {
- valid_cpu_backends = {Backend::PDINFER};
- valid_gpu_backends = {Backend::PDINFER};
- initialized = Initialize();
- }
- virtual std::string ModelName() const { return "PaddleDetection/TTFNet"; }
- };
- class ULTRAINFER_DECL TOOD : public PPDetBase {
- public:
- TOOD(const std::string &model_file, const std::string ¶ms_file,
- const std::string &config_file,
- const RuntimeOption &custom_option = RuntimeOption(),
- const ModelFormat &model_format = ModelFormat::PADDLE)
- : PPDetBase(model_file, params_file, config_file, custom_option,
- model_format) {
- valid_cpu_backends = {Backend::PDINFER};
- valid_gpu_backends = {Backend::PDINFER};
- initialized = Initialize();
- }
- virtual std::string ModelName() const { return "PaddleDetection/TOOD"; }
- };
- class ULTRAINFER_DECL GFL : public PPDetBase {
- public:
- GFL(const std::string &model_file, const std::string ¶ms_file,
- const std::string &config_file,
- const RuntimeOption &custom_option = RuntimeOption(),
- const ModelFormat &model_format = ModelFormat::PADDLE)
- : PPDetBase(model_file, params_file, config_file, custom_option,
- model_format) {
- valid_cpu_backends = {Backend::ORT, Backend::PDINFER};
- valid_gpu_backends = {Backend::ORT, Backend::PDINFER};
- initialized = Initialize();
- }
- virtual std::string ModelName() const { return "PaddleDetection/GFL"; }
- };
- class ULTRAINFER_DECL PaddleDetectionModel : public PPDetBase {
- public:
- PaddleDetectionModel(const std::string &model_file,
- const std::string ¶ms_file,
- const std::string &config_file,
- const RuntimeOption &custom_option = RuntimeOption(),
- const ModelFormat &model_format = ModelFormat::PADDLE)
- : PPDetBase(model_file, params_file, config_file, custom_option,
- model_format) {
- CheckArch();
- valid_cpu_backends = {Backend::OPENVINO, Backend::ORT, Backend::PDINFER,
- Backend::LITE};
- valid_gpu_backends = {Backend::ORT, Backend::PDINFER, Backend::TRT};
- valid_timvx_backends = {Backend::LITE};
- valid_kunlunxin_backends = {Backend::LITE};
- valid_rknpu_backends = {Backend::RKNPU2};
- valid_ascend_backends = {Backend::LITE};
- valid_sophgonpu_backends = {Backend::SOPHGOTPU};
- initialized = Initialize();
- }
- virtual std::string ModelName() const { return "PaddleDetectionModel"; }
- };
- class ULTRAINFER_DECL PPYOLOER : public PPDetBase {
- public:
- PPYOLOER(const std::string &model_file, const std::string ¶ms_file,
- const std::string &config_file,
- const RuntimeOption &custom_option = RuntimeOption(),
- const ModelFormat &model_format = ModelFormat::PADDLE)
- : PPDetBase(model_file, params_file, config_file, custom_option,
- model_format) {
- valid_cpu_backends = {Backend::PDINFER, Backend::OPENVINO, Backend::ORT,
- Backend::LITE};
- valid_gpu_backends = {Backend::PDINFER, Backend::ORT, Backend::TRT};
- valid_timvx_backends = {Backend::LITE};
- valid_kunlunxin_backends = {Backend::LITE};
- valid_rknpu_backends = {Backend::RKNPU2};
- valid_ascend_backends = {Backend::LITE};
- valid_sophgonpu_backends = {Backend::SOPHGOTPU};
- initialized = Initialize();
- }
- virtual std::string ModelName() const { return "PPYOLOER"; }
- };
- } // namespace detection
- } // namespace vision
- } // namespace ultra_infer
|