paddlex_cli.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472
  1. # copyright (c) 2024 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import os
  15. import argparse
  16. import importlib.resources
  17. import subprocess
  18. import sys
  19. import shutil
  20. from pathlib import Path
  21. from . import create_pipeline
  22. from .inference.pipelines import load_pipeline_config
  23. from .repo_manager import setup, get_all_supported_repo_names
  24. from .utils.flags import FLAGS_json_format_model
  25. from .utils import logging
  26. from .utils.interactive_get_pipeline import interactive_get_pipeline
  27. from .utils.pipeline_arguments import PIPELINE_ARGUMENTS
  28. def args_cfg():
  29. """parse cli arguments"""
  30. def parse_str(s):
  31. """convert str type value
  32. to None type if it is "None",
  33. to bool type if it means True or False.
  34. """
  35. if s in ("None", "none", "NONE"):
  36. return None
  37. elif s in ("TRUE", "True", "true", "T", "t"):
  38. return True
  39. elif s in ("FALSE", "False", "false", "F", "f"):
  40. return False
  41. return s
  42. parser = argparse.ArgumentParser(
  43. "Command-line interface for PaddleX. Use the options below to install plugins, run pipeline predictions, or start the serving application."
  44. )
  45. install_group = parser.add_argument_group("Install PaddleX Options")
  46. pipeline_group = parser.add_argument_group("Pipeline Predict Options")
  47. serving_group = parser.add_argument_group("Serving Options")
  48. paddle2onnx_group = parser.add_argument_group("Paddle2ONNX Options")
  49. ################# install pdx #################
  50. install_group.add_argument(
  51. "--install",
  52. action="store_true",
  53. default=False,
  54. help="Install specified PaddleX plugins.",
  55. )
  56. install_group.add_argument(
  57. "plugins",
  58. nargs="*",
  59. default=[],
  60. help="Names of custom development plugins to install (space-separated).",
  61. )
  62. install_group.add_argument(
  63. "--no_deps",
  64. action="store_true",
  65. help="Install custom development plugins without their dependencies.",
  66. )
  67. install_group.add_argument(
  68. "--platform",
  69. type=str,
  70. choices=["github.com", "gitee.com"],
  71. default="github.com",
  72. help="Platform to use for installation (default: github.com).",
  73. )
  74. install_group.add_argument(
  75. "-y",
  76. "--yes",
  77. dest="update_repos",
  78. action="store_true",
  79. help="Automatically confirm prompts and update repositories.",
  80. )
  81. install_group.add_argument(
  82. "--use_local_repos",
  83. action="store_true",
  84. default=False,
  85. help="Use local repositories if they exist.",
  86. )
  87. install_group.add_argument(
  88. "--deps_to_replace",
  89. type=str,
  90. nargs="+",
  91. default=None,
  92. help="Replace dependency version when installing from repositories.",
  93. )
  94. ################# pipeline predict #################
  95. pipeline_group.add_argument(
  96. "--pipeline", type=str, help="Name of the pipeline to execute for prediction."
  97. )
  98. pipeline_group.add_argument(
  99. "--input",
  100. type=str,
  101. default=None,
  102. help="Input data or path for the pipeline, supports specific file and directory.",
  103. )
  104. pipeline_group.add_argument(
  105. "--save_path",
  106. type=str,
  107. default=None,
  108. help="Path to save the prediction results.",
  109. )
  110. pipeline_group.add_argument(
  111. "--device",
  112. type=str,
  113. default=None,
  114. help="Device to run the pipeline on (e.g., 'cpu', 'gpu:0').",
  115. )
  116. pipeline_group.add_argument(
  117. "--use_hpip", action="store_true", help="Enable HPIP acceleration if available."
  118. )
  119. pipeline_group.add_argument(
  120. "--get_pipeline_config",
  121. type=str,
  122. default=None,
  123. help="Retrieve the configuration for a specified pipeline.",
  124. )
  125. ################# serving #################
  126. serving_group.add_argument(
  127. "--serve",
  128. action="store_true",
  129. help="Start the serving application to handle requests.",
  130. )
  131. serving_group.add_argument(
  132. "--host",
  133. type=str,
  134. default="0.0.0.0",
  135. help="Host address to serve on (default: 0.0.0.0).",
  136. )
  137. serving_group.add_argument(
  138. "--port",
  139. type=int,
  140. default=8080,
  141. help="Port number to serve on (default: 8080).",
  142. )
  143. # Serving also uses `--pipeline`, `--device`, and `--use_hpip`
  144. ################# paddle2onnx #################
  145. paddle2onnx_group.add_argument(
  146. "--paddle2onnx", action="store_true", help="Convert Paddle model to ONNX format"
  147. )
  148. paddle2onnx_group.add_argument(
  149. "--paddle_model_dir", type=str, help="Directory containing the Paddle model"
  150. )
  151. paddle2onnx_group.add_argument(
  152. "--onnx_model_dir",
  153. type=str,
  154. default="onnx",
  155. help="Output directory for the ONNX model",
  156. )
  157. paddle2onnx_group.add_argument(
  158. "--opset_version", type=int, help="Version of the ONNX opset to use"
  159. )
  160. # Parse known arguments to get the pipeline name
  161. args, remaining_args = parser.parse_known_args()
  162. pipeline = args.pipeline
  163. pipeline_args = []
  164. if not (args.install or args.serve or args.paddle2onnx) and pipeline is not None:
  165. if os.path.isfile(pipeline):
  166. pipeline_name = load_pipeline_config(pipeline)["pipeline_name"]
  167. else:
  168. pipeline_name = pipeline
  169. if pipeline_name not in PIPELINE_ARGUMENTS:
  170. support_pipelines = ", ".join(PIPELINE_ARGUMENTS.keys())
  171. logging.error(
  172. f"Unsupported pipeline: {pipeline_name}, CLI predict only supports these pipelines: {support_pipelines}\n"
  173. )
  174. sys.exit(1)
  175. pipeline_args = PIPELINE_ARGUMENTS[pipeline_name]
  176. if pipeline_args is None:
  177. pipeline_args = []
  178. pipeline_specific_group = parser.add_argument_group(
  179. f"{pipeline_name.capitalize()} Pipeline Options"
  180. )
  181. for arg in pipeline_args:
  182. pipeline_specific_group.add_argument(
  183. arg["name"],
  184. type=parse_str if arg["type"] is bool else arg["type"],
  185. help=arg.get("help", f"Argument for {pipeline_name} pipeline."),
  186. )
  187. return parser, pipeline_args
  188. def install(args):
  189. """install paddlex"""
  190. def _install_serving_deps():
  191. with importlib.resources.path(
  192. "paddlex", "serving_requirements.txt"
  193. ) as req_file:
  194. return subprocess.check_call(
  195. [sys.executable, "-m", "pip", "install", "-r", str(req_file)]
  196. )
  197. def _install_paddle2onnx_deps():
  198. with importlib.resources.path(
  199. "paddlex", "paddle2onnx_requirements.txt"
  200. ) as req_file:
  201. return subprocess.check_call(
  202. [sys.executable, "-m", "pip", "install", "-r", str(req_file)]
  203. )
  204. def _install_hpi_deps(device_type):
  205. support_device_type = ["cpu", "gpu"]
  206. if device_type not in support_device_type:
  207. logging.error(
  208. "HPI installation failed!\n"
  209. "Supported device_type: %s. Your input device_type: %s.\n"
  210. "Please ensure the device_type is correct.",
  211. support_device_type,
  212. device_type,
  213. )
  214. sys.exit(2)
  215. if device_type == "cpu":
  216. packages = ["ultra-infer-python", "paddlex-hpi"]
  217. elif device_type == "gpu":
  218. packages = ["ultra-infer-gpu-python", "paddlex-hpi"]
  219. with importlib.resources.path("paddlex", "hpip_links.html") as f:
  220. return subprocess.check_call(
  221. [
  222. sys.executable,
  223. "-m",
  224. "pip",
  225. "install",
  226. "--find-links",
  227. str(f),
  228. *packages,
  229. ]
  230. )
  231. # Enable debug info
  232. os.environ["PADDLE_PDX_DEBUG"] = "True"
  233. # Disable eager initialization
  234. os.environ["PADDLE_PDX_EAGER_INIT"] = "False"
  235. plugins = args.plugins[:]
  236. if "serving" in plugins:
  237. plugins.remove("serving")
  238. if plugins:
  239. logging.error("`serving` cannot be used together with other plugins.")
  240. sys.exit(2)
  241. _install_serving_deps()
  242. return
  243. if "paddle2onnx" in plugins:
  244. plugins.remove("paddle2onnx")
  245. if plugins:
  246. logging.error("`paddle2onnx` cannot be used together with other plugins.")
  247. sys.exit(2)
  248. _install_paddle2onnx_deps()
  249. return
  250. hpi_plugins = list(filter(lambda name: name.startswith("hpi-"), plugins))
  251. if hpi_plugins:
  252. for i in hpi_plugins:
  253. plugins.remove(i)
  254. if plugins:
  255. logging.error("`hpi` cannot be used together with other plugins.")
  256. sys.exit(2)
  257. if len(hpi_plugins) > 1 or len(hpi_plugins[0].split("-")) != 2:
  258. logging.error(
  259. "Invalid HPI plugin installation format detected.\n"
  260. "Correct format: paddlex --install hpi-<device_type>\n"
  261. "Example: paddlex --install hpi-gpu"
  262. )
  263. sys.exit(2)
  264. device_type = hpi_plugins[0].split("-")[1]
  265. _install_hpi_deps(device_type=device_type)
  266. return
  267. if plugins:
  268. repo_names = plugins
  269. elif len(plugins) == 0:
  270. repo_names = get_all_supported_repo_names()
  271. setup(
  272. repo_names=repo_names,
  273. no_deps=args.no_deps,
  274. platform=args.platform,
  275. update_repos=args.update_repos,
  276. use_local_repos=args.use_local_repos,
  277. deps_to_replace=args.deps_to_replace,
  278. )
  279. return
  280. def pipeline_predict(
  281. pipeline,
  282. input,
  283. device,
  284. save_path,
  285. use_hpip,
  286. **pipeline_args,
  287. ):
  288. """pipeline predict"""
  289. pipeline = create_pipeline(pipeline, device=device, use_hpip=use_hpip)
  290. result = pipeline.predict(input, **pipeline_args)
  291. for res in result:
  292. res.print()
  293. if save_path:
  294. res.save_all(save_path=save_path)
  295. def serve(pipeline, *, device, use_hpip, host, port):
  296. from .inference.serving.basic_serving import create_pipeline_app, run_server
  297. pipeline_config = load_pipeline_config(pipeline)
  298. pipeline = create_pipeline(config=pipeline_config, device=device, use_hpip=use_hpip)
  299. app = create_pipeline_app(pipeline, pipeline_config)
  300. run_server(app, host=host, port=port)
  301. # TODO: Move to another module
  302. def paddle_to_onnx(paddle_model_dir, onnx_model_dir, *, opset_version):
  303. PD_MODEL_FILE_PREFIX = "inference"
  304. PD_PARAMS_FILENAME = "inference.pdiparams"
  305. ONNX_MODEL_FILENAME = "inference.onnx"
  306. CONFIG_FILENAME = "inference.yml"
  307. ADDITIONAL_FILENAMES = ["scaler.pkl"]
  308. def _check_input_dir(input_dir, pd_model_file_ext):
  309. if input_dir is None:
  310. sys.exit("Input directory must be specified")
  311. if not input_dir.exists():
  312. sys.exit(f"{input_dir} does not exist")
  313. if not input_dir.is_dir():
  314. sys.exit(f"{input_dir} is not a directory")
  315. model_path = (input_dir / PD_MODEL_FILE_PREFIX).with_suffix(pd_model_file_ext)
  316. if not model_path.exists():
  317. sys.exit(f"{model_path} does not exist")
  318. params_path = input_dir / PD_PARAMS_FILENAME
  319. if not params_path.exists():
  320. sys.exit(f"{params_path} does not exist")
  321. config_path = input_dir / CONFIG_FILENAME
  322. if not config_path.exists():
  323. sys.exit(f"{config_path} does not exist")
  324. def _check_paddle2onnx():
  325. if shutil.which("paddle2onnx") is None:
  326. sys.exit("Paddle2ONNX is not available. Please install the plugin first.")
  327. def _run_paddle2onnx(input_dir, pd_model_file_ext, output_dir, opset_version):
  328. logging.info("Paddle2ONNX conversion starting...")
  329. # XXX: To circumvent Paddle2ONNX's bug
  330. if opset_version is None:
  331. if pd_model_file_ext == ".json":
  332. opset_version = 19
  333. else:
  334. opset_version = 7
  335. logging.info("Using default ONNX opset version: %d", opset_version)
  336. cmd = [
  337. "paddle2onnx",
  338. "--model_dir",
  339. str(input_dir),
  340. "--model_filename",
  341. str(Path(PD_MODEL_FILE_PREFIX).with_suffix(pd_model_file_ext)),
  342. "--params_filename",
  343. PD_PARAMS_FILENAME,
  344. "--save_file",
  345. str(output_dir / ONNX_MODEL_FILENAME),
  346. "--opset_version",
  347. str(opset_version),
  348. ]
  349. try:
  350. subprocess.check_call(cmd)
  351. except subprocess.CalledProcessError as e:
  352. sys.exit(f"Paddle2ONNX conversion failed with exit code {e.returncode}")
  353. logging.info("Paddle2ONNX conversion succeeded")
  354. def _copy_config_file(input_dir, output_dir):
  355. src_path = input_dir / CONFIG_FILENAME
  356. dst_path = output_dir / CONFIG_FILENAME
  357. shutil.copy(src_path, dst_path)
  358. logging.info(f"Copied {src_path} to {dst_path}")
  359. def _copy_additional_files(input_dir, output_dir):
  360. for filename in ADDITIONAL_FILENAMES:
  361. src_path = input_dir / filename
  362. if not src_path.exists():
  363. continue
  364. dst_path = output_dir / filename
  365. shutil.copy(src_path, dst_path)
  366. logging.info(f"Copied {src_path} to {dst_path}")
  367. paddle_model_dir = Path(paddle_model_dir)
  368. onnx_model_dir = Path(onnx_model_dir)
  369. logging.info(f"Input dir: {paddle_model_dir}")
  370. logging.info(f"Output dir: {onnx_model_dir}")
  371. pd_model_file_ext = ".json"
  372. if not FLAGS_json_format_model:
  373. if not (paddle_model_dir / f"{PD_MODEL_FILE_PREFIX}.json").exists():
  374. pd_model_file_ext = ".pdmodel"
  375. _check_input_dir(paddle_model_dir, pd_model_file_ext)
  376. _check_paddle2onnx()
  377. _run_paddle2onnx(paddle_model_dir, pd_model_file_ext, onnx_model_dir, opset_version)
  378. if not (onnx_model_dir.exists() and onnx_model_dir.samefile(paddle_model_dir)):
  379. _copy_config_file(paddle_model_dir, onnx_model_dir)
  380. _copy_additional_files(paddle_model_dir, onnx_model_dir)
  381. logging.info("Done")
  382. # for CLI
  383. def main():
  384. """API for commad line"""
  385. parser, pipeline_args = args_cfg()
  386. args = parser.parse_args()
  387. if len(sys.argv) == 1:
  388. logging.warning("No arguments provided. Displaying help information:")
  389. parser.print_help()
  390. sys.exit(2)
  391. if args.install:
  392. install(args)
  393. elif args.serve:
  394. serve(
  395. args.pipeline,
  396. device=args.device,
  397. use_hpip=args.use_hpip,
  398. host=args.host,
  399. port=args.port,
  400. )
  401. elif args.paddle2onnx:
  402. paddle_to_onnx(
  403. args.paddle_model_dir,
  404. args.onnx_model_dir,
  405. opset_version=args.opset_version,
  406. )
  407. else:
  408. if args.get_pipeline_config is not None:
  409. interactive_get_pipeline(args.get_pipeline_config, args.save_path)
  410. else:
  411. pipeline_args_dict = {}
  412. for arg in pipeline_args:
  413. arg_name = arg["name"].lstrip("-")
  414. if hasattr(args, arg_name):
  415. pipeline_args_dict[arg_name] = getattr(args, arg_name)
  416. else:
  417. logging.warning(f"Argument {arg_name} is missing in args")
  418. return pipeline_predict(
  419. args.pipeline,
  420. args.input,
  421. args.device,
  422. args.save_path,
  423. use_hpip=args.use_hpip,
  424. **pipeline_args_dict,
  425. )