converter.py 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127
  1. # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import os
  15. import os.path as osp
  16. from six import text_type as _text_type
  17. import argparse
  18. import sys
  19. import yaml
  20. import paddlex as pdx
  21. assert pdx.__version__ >= '1.2.6', "paddlex >= 1.2.6 is required."
  22. def arg_parser():
  23. parser = argparse.ArgumentParser()
  24. parser.add_argument(
  25. "--model_dir",
  26. "-m",
  27. type=_text_type,
  28. default=None,
  29. help="define model directory path")
  30. parser.add_argument(
  31. "--save_dir",
  32. "-s",
  33. type=_text_type,
  34. default=None,
  35. help="path to save inference model")
  36. parser.add_argument(
  37. "--fixed_input_shape",
  38. "-fs",
  39. default=None,
  40. help="export openvino model with input shape:[w,h]")
  41. parser.add_argument(
  42. "--data_type",
  43. "-dp",
  44. default="FP32",
  45. help="option, FP32 or FP16, the data_type of openvino IR")
  46. return parser
  47. def export_openvino_model(model, args):
  48. #convert paddle inference model to onnx
  49. onnx_save_file = os.path.join(args.save_dir, 'paddle2onnx_model.onnx')
  50. if model.__class__.__name__ == "YOLOv3":
  51. pdx.converter.export_onnx_model(model, onnx_save_file)
  52. else:
  53. pdx.converter.export_onnx_model(model, onnx_save_file, 11)
  54. #convert onnx to openvino ir
  55. try:
  56. import mo.main as mo
  57. from mo.utils.cli_parser import get_onnx_cli_parser
  58. except Exception as e:
  59. print("convert failed! ", e)
  60. print(
  61. "if error is 'no module name mo',please init openvino environment first"
  62. )
  63. print(
  64. "see https://github.com/PaddlePaddle/PaddleX/blob/develop/docs/deploy/openvino/faq.md"
  65. )
  66. else:
  67. onnx_parser = get_onnx_cli_parser()
  68. onnx_parser.add_argument("--model_dir", type=_text_type)
  69. onnx_parser.add_argument("--save_dir", type=_text_type)
  70. onnx_parser.add_argument("--fixed_input_shape")
  71. onnx_parser.set_defaults(input_model=onnx_save_file)
  72. onnx_parser.set_defaults(output_dir=args.save_dir)
  73. shape_list = args.fixed_input_shape[1:-1].split(',')
  74. with open(osp.join(args.model_dir, "model.yml")) as f:
  75. info = yaml.load(f.read(), Loader=yaml.Loader)
  76. input_channel = 3
  77. if 'input_channel' in info['_init_params']:
  78. input_channel = info['_init_params']['input_channel']
  79. shape = '[1,{},' + shape_list[1] + ',' + shape_list[0] + ']'
  80. shape = shape.format(input_channel)
  81. if model.__class__.__name__ == "YOLOv3":
  82. shape = shape + ",[1,2]"
  83. inputs = "image,im_size"
  84. onnx_parser.set_defaults(input=inputs)
  85. onnx_parser.set_defaults(input_shape=shape)
  86. mo.main(onnx_parser, 'onnx')
  87. def main():
  88. parser = arg_parser()
  89. args = parser.parse_args()
  90. assert args.model_dir is not None, "--model_dir should be defined while exporting openvino model"
  91. assert args.save_dir is not None, "--save_dir should be defined to create openvino model"
  92. model = pdx.load_model(args.model_dir)
  93. if model.status == "Normal" or model.status == "Prune":
  94. print(
  95. "Only support inference model, try to export inference model first as below,"
  96. )
  97. print(
  98. "see https://github.com/PaddlePaddle/PaddleX/blob/develop/docs/deploy/openvino/faq.md"
  99. )
  100. else:
  101. prog = model.test_prog
  102. for var in prog.list_vars():
  103. if var.name == "image":
  104. shape = list(var.shape)
  105. if shape[2] == -1 and shape[3] == -1:
  106. print(
  107. "convert failed, please export paddle inference model by fixed_input_shape"
  108. )
  109. print(
  110. "see https://github.com/PaddlePaddle/PaddleX/blob/develop/docs/deploy/openvino/faq.md"
  111. )
  112. else:
  113. export_openvino_model(model, args)
  114. if __name__ == "__main__":
  115. main()