paddlex_cli.py 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456
  1. # copyright (c) 2024 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import os
  15. import argparse
  16. import subprocess
  17. import sys
  18. import shutil
  19. from pathlib import Path
  20. from importlib_resources import files, as_file
  21. from . import create_pipeline
  22. from .inference.pipelines import create_pipeline_from_config, load_pipeline_config
  23. from .repo_manager import setup, get_all_supported_repo_names
  24. from .utils.flags import FLAGS_json_format_model
  25. from .utils import logging
  26. from .utils.interactive_get_pipeline import interactive_get_pipeline
  27. from .utils.pipeline_arguments import PIPELINE_ARGUMENTS
  28. def args_cfg():
  29. """parse cli arguments"""
  30. def parse_str(s):
  31. """convert str type value
  32. to None type if it is "None",
  33. to bool type if it means True or False.
  34. """
  35. if s in ("None", "none", "NONE"):
  36. return None
  37. elif s in ("TRUE", "True", "true", "T", "t"):
  38. return True
  39. elif s in ("FALSE", "False", "false", "F", "f"):
  40. return False
  41. return s
  42. parser = argparse.ArgumentParser(
  43. "Command-line interface for PaddleX. Use the options below to install plugins, run pipeline predictions, or start the serving application."
  44. )
  45. install_group = parser.add_argument_group("Install PaddleX Options")
  46. pipeline_group = parser.add_argument_group("Pipeline Predict Options")
  47. serving_group = parser.add_argument_group("Serving Options")
  48. paddle2onnx_group = parser.add_argument_group("Paddle2ONNX Options")
  49. ################# install pdx #################
  50. install_group.add_argument(
  51. "--install",
  52. action="store_true",
  53. default=False,
  54. help="Install specified PaddleX plugins.",
  55. )
  56. install_group.add_argument(
  57. "plugins",
  58. nargs="*",
  59. default=[],
  60. help="Names of custom development plugins to install (space-separated).",
  61. )
  62. install_group.add_argument(
  63. "--no_deps",
  64. action="store_true",
  65. help="Install custom development plugins without their dependencies.",
  66. )
  67. install_group.add_argument(
  68. "--platform",
  69. type=str,
  70. choices=["github.com", "gitee.com"],
  71. default="github.com",
  72. help="Platform to use for installation (default: github.com).",
  73. )
  74. install_group.add_argument(
  75. "-y",
  76. "--yes",
  77. dest="update_repos",
  78. action="store_true",
  79. help="Automatically confirm prompts and update repositories.",
  80. )
  81. install_group.add_argument(
  82. "--use_local_repos",
  83. action="store_true",
  84. default=False,
  85. help="Use local repositories if they exist.",
  86. )
  87. ################# pipeline predict #################
  88. pipeline_group.add_argument(
  89. "--pipeline", type=str, help="Name of the pipeline to execute for prediction."
  90. )
  91. pipeline_group.add_argument(
  92. "--input",
  93. type=str,
  94. default=None,
  95. help="Input data or path for the pipeline, supports specific file and directory.",
  96. )
  97. pipeline_group.add_argument(
  98. "--save_path",
  99. type=str,
  100. default=None,
  101. help="Path to save the prediction results.",
  102. )
  103. pipeline_group.add_argument(
  104. "--device",
  105. type=str,
  106. default=None,
  107. help="Device to run the pipeline on (e.g., 'cpu', 'gpu:0').",
  108. )
  109. pipeline_group.add_argument(
  110. "--use_hpip", action="store_true", help="Enable HPIP acceleration if available."
  111. )
  112. pipeline_group.add_argument(
  113. "--get_pipeline_config",
  114. type=str,
  115. default=None,
  116. help="Retrieve the configuration for a specified pipeline.",
  117. )
  118. ################# serving #################
  119. serving_group.add_argument(
  120. "--serve",
  121. action="store_true",
  122. help="Start the serving application to handle requests.",
  123. )
  124. serving_group.add_argument(
  125. "--host",
  126. type=str,
  127. default="0.0.0.0",
  128. help="Host address to serve on (default: 0.0.0.0).",
  129. )
  130. serving_group.add_argument(
  131. "--port",
  132. type=int,
  133. default=8080,
  134. help="Port number to serve on (default: 8080).",
  135. )
  136. ################# paddle2onnx #################
  137. paddle2onnx_group.add_argument(
  138. "--paddle2onnx", action="store_true", help="Convert Paddle model to ONNX format"
  139. )
  140. paddle2onnx_group.add_argument(
  141. "--paddle_model_dir", type=str, help="Directory containing the Paddle model"
  142. )
  143. paddle2onnx_group.add_argument(
  144. "--onnx_model_dir",
  145. type=str,
  146. default="onnx",
  147. help="Output directory for the ONNX model",
  148. )
  149. paddle2onnx_group.add_argument(
  150. "--opset_version", type=int, help="Version of the ONNX opset to use"
  151. )
  152. # Parse known arguments to get the pipeline name
  153. args, remaining_args = parser.parse_known_args()
  154. pipeline_name = args.pipeline
  155. pipeline_args = []
  156. if not args.install and pipeline_name is not None:
  157. if pipeline_name not in PIPELINE_ARGUMENTS:
  158. support_pipelines = ", ".join(PIPELINE_ARGUMENTS.keys())
  159. logging.error(
  160. f"Unsupported pipeline: {pipeline_name}, CLI predict only supports these pipelines: {support_pipelines}\n"
  161. )
  162. sys.exit(1)
  163. pipeline_args = PIPELINE_ARGUMENTS[pipeline_name]
  164. if pipeline_args is None:
  165. pipeline_args = []
  166. pipeline_specific_group = parser.add_argument_group(
  167. f"{pipeline_name.capitalize()} Pipeline Options"
  168. )
  169. for arg in pipeline_args:
  170. pipeline_specific_group.add_argument(
  171. arg["name"],
  172. type=parse_str if arg["type"] is bool else arg["type"],
  173. help=arg.get("help", f"Argument for {pipeline_name} pipeline."),
  174. )
  175. return parser, pipeline_args
  176. def install(args):
  177. """install paddlex"""
  178. def _install_serving_deps():
  179. with as_file(files("paddlex").joinpath("serving_requirements.txt")) as req_file:
  180. return subprocess.check_call(
  181. [sys.executable, "-m", "pip", "install", "-r", str(req_file)]
  182. )
  183. def _install_paddle2onnx_deps():
  184. with as_file(
  185. files("paddlex").joinpath("paddle2onnx_requirements.txt")
  186. ) as req_file:
  187. return subprocess.check_call(
  188. [sys.executable, "-m", "pip", "install", "-r", str(req_file)]
  189. )
  190. def _install_hpi_deps(device_type):
  191. support_device_type = ["cpu", "gpu"]
  192. if device_type not in support_device_type:
  193. logging.error(
  194. "HPI installation failed!\n"
  195. "Supported device_type: %s. Your input device_type: %s.\n"
  196. "Please ensure the device_type is correct.",
  197. support_device_type,
  198. device_type,
  199. )
  200. sys.exit(2)
  201. if device_type == "cpu":
  202. packages = ["ultra_infer_python", "paddlex_hpi"]
  203. elif device_type == "gpu":
  204. packages = ["ultra_infer_gpu_python", "paddlex_hpi"]
  205. return subprocess.check_call(
  206. [sys.executable, "-m", "pip", "install"]
  207. + packages
  208. + [
  209. "--find-links",
  210. "https://github.com/PaddlePaddle/PaddleX/blob/develop/docs/pipeline_deploy/high_performance_inference.md",
  211. ]
  212. )
  213. # Enable debug info
  214. os.environ["PADDLE_PDX_DEBUG"] = "True"
  215. # Disable eager initialization
  216. os.environ["PADDLE_PDX_EAGER_INIT"] = "False"
  217. plugins = args.plugins[:]
  218. if "serving" in plugins:
  219. plugins.remove("serving")
  220. if plugins:
  221. logging.error("`serving` cannot be used together with other plugins.")
  222. sys.exit(2)
  223. _install_serving_deps()
  224. return
  225. if "paddle2onnx" in plugins:
  226. plugins.remove("paddle2onnx")
  227. if plugins:
  228. logging.error("`paddle2onnx` cannot be used together with other plugins.")
  229. sys.exit(2)
  230. _install_paddle2onnx_deps()
  231. return
  232. hpi_plugins = list(filter(lambda name: name.startswith("hpi-"), plugins))
  233. if hpi_plugins:
  234. for i in hpi_plugins:
  235. plugins.remove(i)
  236. if plugins:
  237. logging.error("`hpi` cannot be used together with other plugins.")
  238. sys.exit(2)
  239. if len(hpi_plugins) > 1 or len(hpi_plugins[0].split("-")) != 2:
  240. logging.error(
  241. "Invalid HPI plugin installation format detected.\n"
  242. "Correct format: paddlex --install hpi-<device_type>\n"
  243. "Example: paddlex --install hpi-gpu"
  244. )
  245. sys.exit(2)
  246. device_type = hpi_plugins[0].split("-")[1]
  247. _install_hpi_deps(device_type=device_type)
  248. return
  249. if plugins:
  250. repo_names = plugins
  251. elif len(plugins) == 0:
  252. repo_names = get_all_supported_repo_names()
  253. setup(
  254. repo_names=repo_names,
  255. no_deps=args.no_deps,
  256. platform=args.platform,
  257. update_repos=args.update_repos,
  258. use_local_repos=args.use_local_repos,
  259. )
  260. return
  261. def pipeline_predict(
  262. pipeline,
  263. input,
  264. device,
  265. save_path,
  266. use_hpip,
  267. **pipeline_args,
  268. ):
  269. """pipeline predict"""
  270. pipeline = create_pipeline(pipeline, device=device, use_hpip=use_hpip)
  271. result = pipeline.predict(input, **pipeline_args)
  272. for res in result:
  273. res.print()
  274. if save_path:
  275. res.save_all(save_path=save_path)
  276. def serve(pipeline, *, device, use_hpip, host, port):
  277. from .inference.serving.basic_serving import create_pipeline_app, run_server
  278. pipeline_config = load_pipeline_config(pipeline)
  279. pipeline = create_pipeline(config=pipeline_config, device=device, use_hpip=use_hpip)
  280. app = create_pipeline_app(pipeline, pipeline_config)
  281. run_server(app, host=host, port=port, debug=False)
  282. # TODO: Move to another module
  283. def paddle_to_onnx(paddle_model_dir, onnx_model_dir, *, opset_version):
  284. PD_MODEL_FILE_PREFIX = "inference"
  285. PD_PARAMS_FILENAME = "inference.pdiparams"
  286. ONNX_MODEL_FILENAME = "inference.onnx"
  287. CONFIG_FILENAME = "inference.yml"
  288. ADDITIONAL_FILENAMES = ["scaler.pkl"]
  289. def _check_input_dir(input_dir, pd_model_file_ext):
  290. if input_dir is None:
  291. sys.exit("Input directory must be specified")
  292. if not input_dir.exists():
  293. sys.exit(f"{input_dir} does not exist")
  294. if not input_dir.is_dir():
  295. sys.exit(f"{input_dir} is not a directory")
  296. model_path = (input_dir / PD_MODEL_FILE_PREFIX).with_suffix(pd_model_file_ext)
  297. if not model_path.exists():
  298. sys.exit(f"{model_path} does not exist")
  299. params_path = input_dir / PD_PARAMS_FILENAME
  300. if not params_path.exists():
  301. sys.exit(f"{params_path} does not exist")
  302. config_path = input_dir / CONFIG_FILENAME
  303. if not config_path.exists():
  304. sys.exit(f"{config_path} does not exist")
  305. def _check_paddle2onnx():
  306. if shutil.which("paddle2onnx") is None:
  307. sys.exit("Paddle2ONNX is not available. Please install the plugin first.")
  308. def _run_paddle2onnx(input_dir, pd_model_file_ext, output_dir, opset_version):
  309. logging.info("Paddle2ONNX conversion starting...")
  310. # XXX: To circumvent Paddle2ONNX's bug
  311. if opset_version is None:
  312. if pd_model_file_ext == ".json":
  313. opset_version = 19
  314. else:
  315. opset_version = 7
  316. logging.info("Using default ONNX opset version: %d", opset_version)
  317. cmd = [
  318. "paddle2onnx",
  319. "--model_dir",
  320. str(input_dir),
  321. "--model_filename",
  322. str(Path(PD_MODEL_FILE_PREFIX).with_suffix(pd_model_file_ext)),
  323. "--params_filename",
  324. PD_PARAMS_FILENAME,
  325. "--save_file",
  326. str(output_dir / ONNX_MODEL_FILENAME),
  327. "--opset_version",
  328. str(opset_version),
  329. ]
  330. try:
  331. subprocess.check_call(cmd)
  332. except subprocess.CalledProcessError as e:
  333. sys.exit(f"Paddle2ONNX conversion failed with exit code {e.returncode}")
  334. logging.info("Paddle2ONNX conversion succeeded")
  335. def _copy_config_file(input_dir, output_dir):
  336. src_path = input_dir / CONFIG_FILENAME
  337. dst_path = output_dir / CONFIG_FILENAME
  338. shutil.copy(src_path, dst_path)
  339. logging.info(f"Copied {src_path} to {dst_path}")
  340. def _copy_additional_files(input_dir, output_dir):
  341. for filename in ADDITIONAL_FILENAMES:
  342. src_path = input_dir / filename
  343. if not src_path.exists():
  344. continue
  345. dst_path = output_dir / filename
  346. shutil.copy(src_path, dst_path)
  347. logging.info(f"Copied {src_path} to {dst_path}")
  348. paddle_model_dir = Path(paddle_model_dir)
  349. onnx_model_dir = Path(onnx_model_dir)
  350. logging.info(f"Input dir: {paddle_model_dir}")
  351. logging.info(f"Output dir: {onnx_model_dir}")
  352. pd_model_file_ext = ".json"
  353. if not FLAGS_json_format_model:
  354. if not (paddle_model_dir / f"{PD_MODEL_FILE_PREFIX}.json").exists():
  355. pd_model_file_ext = ".pdmodel"
  356. _check_input_dir(paddle_model_dir, pd_model_file_ext)
  357. _check_paddle2onnx()
  358. _run_paddle2onnx(paddle_model_dir, pd_model_file_ext, onnx_model_dir, opset_version)
  359. if not (onnx_model_dir.exists() and onnx_model_dir.samefile(paddle_model_dir)):
  360. _copy_config_file(paddle_model_dir, onnx_model_dir)
  361. _copy_additional_files(paddle_model_dir, onnx_model_dir)
  362. logging.info("Done")
  363. # for CLI
  364. def main():
  365. """API for commad line"""
  366. parser, pipeline_args = args_cfg()
  367. args = parser.parse_args()
  368. if len(sys.argv) == 1:
  369. logging.warning("No arguments provided. Displaying help information:")
  370. parser.print_help()
  371. sys.exit(2)
  372. if args.install:
  373. install(args)
  374. elif args.serve:
  375. serve(
  376. args.pipeline,
  377. device=args.device,
  378. use_hpip=args.use_hpip,
  379. host=args.host,
  380. port=args.port,
  381. )
  382. elif args.paddle2onnx:
  383. paddle_to_onnx(
  384. args.paddle_model_dir,
  385. args.onnx_model_dir,
  386. opset_version=args.opset_version,
  387. )
  388. else:
  389. if args.get_pipeline_config is not None:
  390. interactive_get_pipeline(args.get_pipeline_config, args.save_path)
  391. else:
  392. pipeline_args_dict = {}
  393. from .utils.flags import USE_NEW_INFERENCE
  394. if USE_NEW_INFERENCE:
  395. for arg in pipeline_args:
  396. arg_name = arg["name"].lstrip("-")
  397. if hasattr(args, arg_name):
  398. pipeline_args_dict[arg_name] = getattr(args, arg_name)
  399. else:
  400. logging.warning(f"Argument {arg_name} is missing in args")
  401. return pipeline_predict(
  402. args.pipeline,
  403. args.input,
  404. args.device,
  405. args.save_path,
  406. use_hpip=args.use_hpip,
  407. **pipeline_args_dict,
  408. )