command.py 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111
  1. # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. from six import text_type as _text_type
  15. import argparse
  16. import sys
  17. import paddlex.utils.logging as logging
  18. def arg_parser():
  19. parser = argparse.ArgumentParser()
  20. parser.add_argument(
  21. "--model_dir",
  22. "-m",
  23. type=_text_type,
  24. default=None,
  25. help="define model directory path")
  26. parser.add_argument(
  27. "--save_dir",
  28. "-s",
  29. type=_text_type,
  30. default=None,
  31. help="path to save inference model")
  32. parser.add_argument(
  33. "--version",
  34. "-v",
  35. action="store_true",
  36. default=False,
  37. help="get version of PaddleX")
  38. parser.add_argument(
  39. "--export_inference",
  40. "-e",
  41. action="store_true",
  42. default=False,
  43. help="export inference model for C++/Python deployment")
  44. parser.add_argument(
  45. "--export_onnx",
  46. "-eo",
  47. action="store_true",
  48. default=False,
  49. help="export onnx model for deployment")
  50. parser.add_argument(
  51. "--fixed_input_shape",
  52. "-fs",
  53. default=None,
  54. help="export inference model with fixed input shape:[w,h]")
  55. return parser
  56. def main():
  57. import os
  58. os.environ['CUDA_VISIBLE_DEVICES'] = ""
  59. import paddlex as pdx
  60. if len(sys.argv) < 2:
  61. print("Use command 'paddlex -h` to print the help information\n")
  62. return
  63. parser = arg_parser()
  64. args = parser.parse_args()
  65. if args.version:
  66. print("PaddleX-{}".format(pdx.__version__))
  67. print("Repo: https://github.com/PaddlePaddle/PaddleX.git")
  68. print("Email: paddlex@baidu.com")
  69. return
  70. if args.export_inference:
  71. assert args.model_dir is not None, "--model_dir should be defined while exporting inference model"
  72. assert args.save_dir is not None, "--save_dir should be defined to save inference model"
  73. fixed_input_shape = None
  74. if args.fixed_input_shape is not None:
  75. fixed_input_shape = eval(args.fixed_input_shape)
  76. assert len(
  77. fixed_input_shape
  78. ) == 2, "len of fixed input shape must == 2, such as [224,224]"
  79. else:
  80. fixed_input_shape = None
  81. model = pdx.load_model(args.model_dir, fixed_input_shape)
  82. model.export_inference_model(args.save_dir)
  83. if args.export_onnx:
  84. assert args.model_dir is not None, "--model_dir should be defined while exporting onnx model"
  85. assert args.save_dir is not None, "--save_dir should be defined to create onnx model"
  86. model = pdx.load_model(args.model_dir)
  87. if model.status == "Normal" or model.status == "Prune":
  88. logging.error(
  89. "Only support inference model, try to export model first as below,",
  90. exit=False)
  91. logging.error(
  92. "paddlex --export_inference --model_dir model_path --save_dir infer_model"
  93. )
  94. pdx.convertor.export_onnx_model(model, args.save_dir)
  95. if __name__ == "__main__":
  96. main()