model.h 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508
  1. // Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // http://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. #pragma once
  15. #include "ultra_infer/vision/detection/ppdet/base.h"
  16. #include "ultra_infer/vision/detection/ppdet/multiclass_nms.h"
  17. #include "ultra_infer/vision/detection/ppdet/multiclass_nms_rotated.h"
  18. namespace ultra_infer {
  19. namespace vision {
  20. namespace detection {
  21. class ULTRAINFER_DECL PicoDet : public PPDetBase {
  22. public:
  23. /** \brief Set path of model file and configuration file, and the
  24. * configuration of runtime
  25. *
  26. * \param[in] model_file Path of model file, e.g picodet/model.pdmodel
  27. * \param[in] params_file Path of parameter file, e.g picodet/model.pdiparams,
  28. * if the model format is ONNX, this parameter will be ignored \param[in]
  29. * config_file Path of configuration file for deployment, e.g
  30. * picodet/infer_cfg.yml \param[in] custom_option RuntimeOption for inference,
  31. * the default will use cpu, and choose the backend defined in
  32. * `valid_cpu_backends` \param[in] model_format Model format of the loaded
  33. * model, default is Paddle format
  34. */
  35. PicoDet(const std::string &model_file, const std::string &params_file,
  36. const std::string &config_file,
  37. const RuntimeOption &custom_option = RuntimeOption(),
  38. const ModelFormat &model_format = ModelFormat::PADDLE)
  39. : PPDetBase(model_file, params_file, config_file, custom_option,
  40. model_format) {
  41. valid_cpu_backends = {Backend::OPENVINO, Backend::ORT, Backend::PDINFER,
  42. Backend::LITE};
  43. valid_gpu_backends = {Backend::ORT, Backend::PDINFER, Backend::TRT};
  44. valid_rknpu_backends = {Backend::RKNPU2};
  45. valid_kunlunxin_backends = {Backend::LITE};
  46. valid_ascend_backends = {Backend::LITE};
  47. valid_sophgonpu_backends = {Backend::SOPHGOTPU};
  48. valid_timvx_backends = {Backend::LITE};
  49. initialized = Initialize();
  50. }
  51. virtual std::string ModelName() const { return "PicoDet"; }
  52. };
  53. class ULTRAINFER_DECL SOLOv2 : public PPDetBase {
  54. public:
  55. /** \brief Set path of model file and configuration file, and the
  56. * configuration of runtime
  57. *
  58. * \param[in] model_file Path of model file, e.g picodet/model.pdmodel
  59. * \param[in] params_file Path of parameter file, e.g picodet/model.pdiparams,
  60. * if the model format is ONNX, this parameter will be ignored \param[in]
  61. * config_file Path of configuration file for deployment, e.g
  62. * picodet/infer_cfg.yml \param[in] custom_option RuntimeOption for inference,
  63. * the default will use cpu, and choose the backend defined in
  64. * `valid_cpu_backends` \param[in] model_format Model format of the loaded
  65. * model, default is Paddle format
  66. */
  67. SOLOv2(const std::string &model_file, const std::string &params_file,
  68. const std::string &config_file,
  69. const RuntimeOption &custom_option = RuntimeOption(),
  70. const ModelFormat &model_format = ModelFormat::PADDLE)
  71. : PPDetBase(model_file, params_file, config_file, custom_option,
  72. model_format) {
  73. valid_cpu_backends = {Backend::PDINFER};
  74. valid_gpu_backends = {Backend::PDINFER, Backend::TRT};
  75. initialized = Initialize();
  76. }
  77. virtual std::string ModelName() const { return "SOLOv2"; }
  78. };
  79. class ULTRAINFER_DECL PPYOLOE : public PPDetBase {
  80. public:
  81. /** \brief Set path of model file and configuration file, and the
  82. * configuration of runtime
  83. *
  84. * \param[in] model_file Path of model file, e.g ppyoloe/model.pdmodel
  85. * \param[in] params_file Path of parameter file, e.g picodet/model.pdiparams,
  86. * if the model format is ONNX, this parameter will be ignored \param[in]
  87. * config_file Path of configuration file for deployment, e.g
  88. * picodet/infer_cfg.yml \param[in] custom_option RuntimeOption for inference,
  89. * the default will use cpu, and choose the backend defined in
  90. * `valid_cpu_backends` \param[in] model_format Model format of the loaded
  91. * model, default is Paddle format
  92. */
  93. PPYOLOE(const std::string &model_file, const std::string &params_file,
  94. const std::string &config_file,
  95. const RuntimeOption &custom_option = RuntimeOption(),
  96. const ModelFormat &model_format = ModelFormat::PADDLE)
  97. : PPDetBase(model_file, params_file, config_file, custom_option,
  98. model_format) {
  99. valid_cpu_backends = {Backend::OPENVINO, Backend::ORT, Backend::PDINFER,
  100. Backend::LITE, Backend::TVM};
  101. valid_gpu_backends = {Backend::ORT, Backend::PDINFER, Backend::TRT};
  102. valid_timvx_backends = {Backend::LITE};
  103. valid_kunlunxin_backends = {Backend::LITE};
  104. valid_rknpu_backends = {Backend::RKNPU2};
  105. valid_ascend_backends = {Backend::LITE};
  106. valid_sophgonpu_backends = {Backend::SOPHGOTPU};
  107. valid_horizon_backends = {Backend::HORIZONNPU};
  108. initialized = Initialize();
  109. }
  110. virtual std::string ModelName() const { return "PPYOLOE"; }
  111. };
  112. class ULTRAINFER_DECL PPYOLO : public PPDetBase {
  113. public:
  114. /** \brief Set path of model file and configuration file, and the
  115. * configuration of runtime
  116. *
  117. * \param[in] model_file Path of model file, e.g ppyolo/model.pdmodel
  118. * \param[in] params_file Path of parameter file, e.g ppyolo/model.pdiparams,
  119. * if the model format is ONNX, this parameter will be ignored \param[in]
  120. * config_file Path of configuration file for deployment, e.g
  121. * picodet/infer_cfg.yml \param[in] custom_option RuntimeOption for inference,
  122. * the default will use cpu, and choose the backend defined in
  123. * `valid_cpu_backends` \param[in] model_format Model format of the loaded
  124. * model, default is Paddle format
  125. */
  126. PPYOLO(const std::string &model_file, const std::string &params_file,
  127. const std::string &config_file,
  128. const RuntimeOption &custom_option = RuntimeOption(),
  129. const ModelFormat &model_format = ModelFormat::PADDLE)
  130. : PPDetBase(model_file, params_file, config_file, custom_option,
  131. model_format) {
  132. valid_cpu_backends = {Backend::PDINFER, Backend::LITE};
  133. valid_gpu_backends = {Backend::PDINFER};
  134. valid_kunlunxin_backends = {Backend::LITE};
  135. valid_ascend_backends = {Backend::LITE};
  136. initialized = Initialize();
  137. }
  138. virtual std::string ModelName() const { return "PaddleDetection/PP-YOLO"; }
  139. };
  140. class ULTRAINFER_DECL YOLOv3 : public PPDetBase {
  141. public:
  142. YOLOv3(const std::string &model_file, const std::string &params_file,
  143. const std::string &config_file,
  144. const RuntimeOption &custom_option = RuntimeOption(),
  145. const ModelFormat &model_format = ModelFormat::PADDLE)
  146. : PPDetBase(model_file, params_file, config_file, custom_option,
  147. model_format) {
  148. valid_cpu_backends = {Backend::OPENVINO, Backend::ORT, Backend::PDINFER,
  149. Backend::LITE};
  150. valid_gpu_backends = {Backend::ORT, Backend::PDINFER, Backend::TRT};
  151. valid_kunlunxin_backends = {Backend::LITE};
  152. valid_ascend_backends = {Backend::LITE};
  153. initialized = Initialize();
  154. }
  155. virtual std::string ModelName() const { return "PaddleDetection/YOLOv3"; }
  156. };
  157. class ULTRAINFER_DECL PaddleYOLOX : public PPDetBase {
  158. public:
  159. PaddleYOLOX(const std::string &model_file, const std::string &params_file,
  160. const std::string &config_file,
  161. const RuntimeOption &custom_option = RuntimeOption(),
  162. const ModelFormat &model_format = ModelFormat::PADDLE)
  163. : PPDetBase(model_file, params_file, config_file, custom_option,
  164. model_format) {
  165. valid_cpu_backends = {Backend::OPENVINO, Backend::ORT, Backend::PDINFER,
  166. Backend::LITE};
  167. valid_gpu_backends = {Backend::ORT, Backend::PDINFER, Backend::TRT};
  168. valid_kunlunxin_backends = {Backend::LITE};
  169. valid_ascend_backends = {Backend::LITE};
  170. initialized = Initialize();
  171. }
  172. virtual std::string ModelName() const { return "PaddleDetection/YOLOX"; }
  173. };
  174. class ULTRAINFER_DECL FasterRCNN : public PPDetBase {
  175. public:
  176. FasterRCNN(const std::string &model_file, const std::string &params_file,
  177. const std::string &config_file,
  178. const RuntimeOption &custom_option = RuntimeOption(),
  179. const ModelFormat &model_format = ModelFormat::PADDLE)
  180. : PPDetBase(model_file, params_file, config_file, custom_option,
  181. model_format) {
  182. valid_cpu_backends = {Backend::PDINFER, Backend::LITE};
  183. valid_gpu_backends = {Backend::PDINFER};
  184. valid_kunlunxin_backends = {Backend::LITE};
  185. initialized = Initialize();
  186. }
  187. virtual std::string ModelName() const { return "PaddleDetection/FasterRCNN"; }
  188. };
  189. class ULTRAINFER_DECL MaskRCNN : public PPDetBase {
  190. public:
  191. MaskRCNN(const std::string &model_file, const std::string &params_file,
  192. const std::string &config_file,
  193. const RuntimeOption &custom_option = RuntimeOption(),
  194. const ModelFormat &model_format = ModelFormat::PADDLE)
  195. : PPDetBase(model_file, params_file, config_file, custom_option,
  196. model_format) {
  197. valid_cpu_backends = {Backend::PDINFER, Backend::LITE};
  198. valid_gpu_backends = {Backend::PDINFER};
  199. valid_kunlunxin_backends = {Backend::LITE};
  200. initialized = Initialize();
  201. }
  202. virtual std::string ModelName() const { return "PaddleDetection/MaskRCNN"; }
  203. };
  204. class ULTRAINFER_DECL SSD : public PPDetBase {
  205. public:
  206. SSD(const std::string &model_file, const std::string &params_file,
  207. const std::string &config_file,
  208. const RuntimeOption &custom_option = RuntimeOption(),
  209. const ModelFormat &model_format = ModelFormat::PADDLE)
  210. : PPDetBase(model_file, params_file, config_file, custom_option,
  211. model_format) {
  212. valid_cpu_backends = {Backend::PDINFER, Backend::LITE};
  213. valid_gpu_backends = {Backend::PDINFER};
  214. valid_kunlunxin_backends = {Backend::LITE};
  215. valid_ascend_backends = {Backend::LITE};
  216. initialized = Initialize();
  217. }
  218. virtual std::string ModelName() const { return "PaddleDetection/SSD"; }
  219. };
  220. class ULTRAINFER_DECL PaddleYOLOv5 : public PPDetBase {
  221. public:
  222. PaddleYOLOv5(const std::string &model_file, const std::string &params_file,
  223. const std::string &config_file,
  224. const RuntimeOption &custom_option = RuntimeOption(),
  225. const ModelFormat &model_format = ModelFormat::PADDLE)
  226. : PPDetBase(model_file, params_file, config_file, custom_option,
  227. model_format) {
  228. valid_cpu_backends = {Backend::ORT, Backend::PDINFER};
  229. valid_gpu_backends = {Backend::ORT, Backend::PDINFER, Backend::TRT};
  230. valid_kunlunxin_backends = {Backend::LITE};
  231. initialized = Initialize();
  232. }
  233. virtual std::string ModelName() const { return "PaddleDetection/YOLOv5"; }
  234. };
  235. class ULTRAINFER_DECL PaddleYOLOv6 : public PPDetBase {
  236. public:
  237. PaddleYOLOv6(const std::string &model_file, const std::string &params_file,
  238. const std::string &config_file,
  239. const RuntimeOption &custom_option = RuntimeOption(),
  240. const ModelFormat &model_format = ModelFormat::PADDLE)
  241. : PPDetBase(model_file, params_file, config_file, custom_option,
  242. model_format) {
  243. valid_cpu_backends = {Backend::OPENVINO, Backend::ORT, Backend::PDINFER};
  244. valid_gpu_backends = {Backend::ORT, Backend::PDINFER, Backend::TRT};
  245. valid_kunlunxin_backends = {Backend::LITE};
  246. initialized = Initialize();
  247. }
  248. virtual std::string ModelName() const { return "PaddleDetection/YOLOv6"; }
  249. };
  250. class ULTRAINFER_DECL PaddleYOLOv7 : public PPDetBase {
  251. public:
  252. PaddleYOLOv7(const std::string &model_file, const std::string &params_file,
  253. const std::string &config_file,
  254. const RuntimeOption &custom_option = RuntimeOption(),
  255. const ModelFormat &model_format = ModelFormat::PADDLE)
  256. : PPDetBase(model_file, params_file, config_file, custom_option,
  257. model_format) {
  258. valid_cpu_backends = {Backend::ORT, Backend::PDINFER};
  259. valid_gpu_backends = {Backend::ORT, Backend::PDINFER, Backend::TRT};
  260. valid_kunlunxin_backends = {Backend::LITE};
  261. initialized = Initialize();
  262. }
  263. virtual std::string ModelName() const { return "PaddleDetection/YOLOv7"; }
  264. };
  265. class ULTRAINFER_DECL PaddleYOLOv8 : public PPDetBase {
  266. public:
  267. PaddleYOLOv8(const std::string &model_file, const std::string &params_file,
  268. const std::string &config_file,
  269. const RuntimeOption &custom_option = RuntimeOption(),
  270. const ModelFormat &model_format = ModelFormat::PADDLE)
  271. : PPDetBase(model_file, params_file, config_file, custom_option,
  272. model_format) {
  273. valid_cpu_backends = {Backend::OPENVINO, Backend::ORT, Backend::PDINFER,
  274. Backend::LITE};
  275. valid_gpu_backends = {Backend::ORT, Backend::PDINFER, Backend::TRT};
  276. valid_kunlunxin_backends = {Backend::LITE};
  277. valid_rknpu_backends = {Backend::RKNPU2};
  278. valid_ascend_backends = {Backend::LITE};
  279. valid_sophgonpu_backends = {Backend::SOPHGOTPU};
  280. initialized = Initialize();
  281. }
  282. virtual std::string ModelName() const { return "PaddleDetection/YOLOv8"; }
  283. };
  284. class ULTRAINFER_DECL RTMDet : public PPDetBase {
  285. public:
  286. RTMDet(const std::string &model_file, const std::string &params_file,
  287. const std::string &config_file,
  288. const RuntimeOption &custom_option = RuntimeOption(),
  289. const ModelFormat &model_format = ModelFormat::PADDLE)
  290. : PPDetBase(model_file, params_file, config_file, custom_option,
  291. model_format) {
  292. valid_cpu_backends = {Backend::OPENVINO, Backend::ORT, Backend::PDINFER};
  293. valid_gpu_backends = {Backend::ORT, Backend::PDINFER, Backend::TRT};
  294. valid_kunlunxin_backends = {Backend::LITE};
  295. initialized = Initialize();
  296. }
  297. virtual std::string ModelName() const { return "PaddleDetection/RTMDet"; }
  298. };
  299. class ULTRAINFER_DECL CascadeRCNN : public PPDetBase {
  300. public:
  301. CascadeRCNN(const std::string &model_file, const std::string &params_file,
  302. const std::string &config_file,
  303. const RuntimeOption &custom_option = RuntimeOption(),
  304. const ModelFormat &model_format = ModelFormat::PADDLE)
  305. : PPDetBase(model_file, params_file, config_file, custom_option,
  306. model_format) {
  307. valid_cpu_backends = {Backend::PDINFER};
  308. valid_gpu_backends = {Backend::PDINFER};
  309. initialized = Initialize();
  310. }
  311. virtual std::string ModelName() const {
  312. return "PaddleDetection/CascadeRCNN";
  313. }
  314. };
  315. class ULTRAINFER_DECL PSSDet : public PPDetBase {
  316. public:
  317. PSSDet(const std::string &model_file, const std::string &params_file,
  318. const std::string &config_file,
  319. const RuntimeOption &custom_option = RuntimeOption(),
  320. const ModelFormat &model_format = ModelFormat::PADDLE)
  321. : PPDetBase(model_file, params_file, config_file, custom_option,
  322. model_format) {
  323. valid_cpu_backends = {Backend::PDINFER};
  324. valid_gpu_backends = {Backend::PDINFER};
  325. initialized = Initialize();
  326. }
  327. virtual std::string ModelName() const { return "PaddleDetection/PSSDet"; }
  328. };
  329. class ULTRAINFER_DECL RetinaNet : public PPDetBase {
  330. public:
  331. RetinaNet(const std::string &model_file, const std::string &params_file,
  332. const std::string &config_file,
  333. const RuntimeOption &custom_option = RuntimeOption(),
  334. const ModelFormat &model_format = ModelFormat::PADDLE)
  335. : PPDetBase(model_file, params_file, config_file, custom_option,
  336. model_format) {
  337. valid_cpu_backends = {Backend::PDINFER};
  338. valid_gpu_backends = {Backend::PDINFER};
  339. initialized = Initialize();
  340. }
  341. virtual std::string ModelName() const { return "PaddleDetection/RetinaNet"; }
  342. };
  343. class ULTRAINFER_DECL PPYOLOESOD : public PPDetBase {
  344. public:
  345. PPYOLOESOD(const std::string &model_file, const std::string &params_file,
  346. const std::string &config_file,
  347. const RuntimeOption &custom_option = RuntimeOption(),
  348. const ModelFormat &model_format = ModelFormat::PADDLE)
  349. : PPDetBase(model_file, params_file, config_file, custom_option,
  350. model_format) {
  351. valid_cpu_backends = {Backend::ORT, Backend::PDINFER};
  352. valid_gpu_backends = {Backend::ORT, Backend::PDINFER, Backend::TRT};
  353. initialized = Initialize();
  354. }
  355. virtual std::string ModelName() const { return "PaddleDetection/PPYOLOESOD"; }
  356. };
  357. class ULTRAINFER_DECL FCOS : public PPDetBase {
  358. public:
  359. FCOS(const std::string &model_file, const std::string &params_file,
  360. const std::string &config_file,
  361. const RuntimeOption &custom_option = RuntimeOption(),
  362. const ModelFormat &model_format = ModelFormat::PADDLE)
  363. : PPDetBase(model_file, params_file, config_file, custom_option,
  364. model_format) {
  365. valid_cpu_backends = {Backend::PDINFER};
  366. valid_gpu_backends = {Backend::ORT, Backend::PDINFER};
  367. initialized = Initialize();
  368. }
  369. virtual std::string ModelName() const { return "PaddleDetection/FCOS"; }
  370. };
  371. class ULTRAINFER_DECL TTFNet : public PPDetBase {
  372. public:
  373. TTFNet(const std::string &model_file, const std::string &params_file,
  374. const std::string &config_file,
  375. const RuntimeOption &custom_option = RuntimeOption(),
  376. const ModelFormat &model_format = ModelFormat::PADDLE)
  377. : PPDetBase(model_file, params_file, config_file, custom_option,
  378. model_format) {
  379. valid_cpu_backends = {Backend::PDINFER};
  380. valid_gpu_backends = {Backend::PDINFER};
  381. initialized = Initialize();
  382. }
  383. virtual std::string ModelName() const { return "PaddleDetection/TTFNet"; }
  384. };
  385. class ULTRAINFER_DECL TOOD : public PPDetBase {
  386. public:
  387. TOOD(const std::string &model_file, const std::string &params_file,
  388. const std::string &config_file,
  389. const RuntimeOption &custom_option = RuntimeOption(),
  390. const ModelFormat &model_format = ModelFormat::PADDLE)
  391. : PPDetBase(model_file, params_file, config_file, custom_option,
  392. model_format) {
  393. valid_cpu_backends = {Backend::PDINFER};
  394. valid_gpu_backends = {Backend::PDINFER};
  395. initialized = Initialize();
  396. }
  397. virtual std::string ModelName() const { return "PaddleDetection/TOOD"; }
  398. };
  399. class ULTRAINFER_DECL GFL : public PPDetBase {
  400. public:
  401. GFL(const std::string &model_file, const std::string &params_file,
  402. const std::string &config_file,
  403. const RuntimeOption &custom_option = RuntimeOption(),
  404. const ModelFormat &model_format = ModelFormat::PADDLE)
  405. : PPDetBase(model_file, params_file, config_file, custom_option,
  406. model_format) {
  407. valid_cpu_backends = {Backend::ORT, Backend::PDINFER};
  408. valid_gpu_backends = {Backend::ORT, Backend::PDINFER};
  409. initialized = Initialize();
  410. }
  411. virtual std::string ModelName() const { return "PaddleDetection/GFL"; }
  412. };
  413. class ULTRAINFER_DECL PaddleDetectionModel : public PPDetBase {
  414. public:
  415. PaddleDetectionModel(const std::string &model_file,
  416. const std::string &params_file,
  417. const std::string &config_file,
  418. const RuntimeOption &custom_option = RuntimeOption(),
  419. const ModelFormat &model_format = ModelFormat::PADDLE)
  420. : PPDetBase(model_file, params_file, config_file, custom_option,
  421. model_format) {
  422. CheckArch();
  423. valid_cpu_backends = {Backend::OPENVINO, Backend::ORT, Backend::PDINFER,
  424. Backend::LITE};
  425. valid_gpu_backends = {Backend::ORT, Backend::PDINFER, Backend::TRT};
  426. valid_timvx_backends = {Backend::LITE};
  427. valid_kunlunxin_backends = {Backend::LITE};
  428. valid_rknpu_backends = {Backend::RKNPU2};
  429. valid_ascend_backends = {Backend::LITE};
  430. valid_sophgonpu_backends = {Backend::SOPHGOTPU};
  431. initialized = Initialize();
  432. }
  433. virtual std::string ModelName() const { return "PaddleDetectionModel"; }
  434. };
  435. class ULTRAINFER_DECL PPYOLOER : public PPDetBase {
  436. public:
  437. PPYOLOER(const std::string &model_file, const std::string &params_file,
  438. const std::string &config_file,
  439. const RuntimeOption &custom_option = RuntimeOption(),
  440. const ModelFormat &model_format = ModelFormat::PADDLE)
  441. : PPDetBase(model_file, params_file, config_file, custom_option,
  442. model_format) {
  443. valid_cpu_backends = {Backend::PDINFER, Backend::OPENVINO, Backend::ORT,
  444. Backend::LITE};
  445. valid_gpu_backends = {Backend::PDINFER, Backend::ORT, Backend::TRT};
  446. valid_timvx_backends = {Backend::LITE};
  447. valid_kunlunxin_backends = {Backend::LITE};
  448. valid_rknpu_backends = {Backend::RKNPU2};
  449. valid_ascend_backends = {Backend::LITE};
  450. valid_sophgonpu_backends = {Backend::SOPHGOTPU};
  451. initialized = Initialize();
  452. }
  453. virtual std::string ModelName() const { return "PPYOLOER"; }
  454. };
  455. } // namespace detection
  456. } // namespace vision
  457. } // namespace ultra_infer