converter.py 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899
  1. # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import os
  15. import os.path as osp
  16. from six import text_type as _text_type
  17. import argparse
  18. import sys
  19. import yaml
  20. import paddlex as pdx
  21. assert pdx.__version__ >= '1.2.6', "paddlex >= 1.2.6 is required."
  22. def arg_parser():
  23. parser = argparse.ArgumentParser()
  24. parser.add_argument(
  25. "--model_dir",
  26. "-m",
  27. type=_text_type,
  28. default=None,
  29. help="define model directory path")
  30. parser.add_argument(
  31. "--save_dir",
  32. "-s",
  33. type=_text_type,
  34. default=None,
  35. help="path to save inference model")
  36. parser.add_argument(
  37. "--fixed_input_shape",
  38. "-fs",
  39. default=None,
  40. help="export openvino model with input shape:[w,h]")
  41. parser.add_argument(
  42. "--data_type",
  43. "-dp",
  44. default="FP32",
  45. help="option, FP32 or FP16, the data_type of openvino IR")
  46. return parser
  47. def export_openvino_model(model, args):
  48. onnx_save_file = os.path.join(args.save_dir, 'paddle2onnx_model.onnx')
  49. if model.__class__.__name__ == "YOLOv3":
  50. pdx.converter.export_onnx_model(model, onnx_save_file)
  51. else:
  52. pdx.converter.export_onnx_model(model, onnx_save_file, 11)
  53. import mo.main as mo
  54. from mo.utils.cli_parser import get_onnx_cli_parser
  55. onnx_parser = get_onnx_cli_parser()
  56. onnx_parser.add_argument("--model_dir", type=_text_type)
  57. onnx_parser.add_argument("--save_dir", type=_text_type)
  58. onnx_parser.add_argument("--fixed_input_shape")
  59. onnx_parser.set_defaults(input_model=onnx_save_file)
  60. onnx_parser.set_defaults(output_dir=args.save_dir)
  61. shape_list = args.fixed_input_shape[1:-1].split(',')
  62. with open(osp.join(args.model_dir, "model.yml")) as f:
  63. info = yaml.load(f.read(), Loader=yaml.Loader)
  64. input_channel = 3
  65. if 'input_channel' in info['_init_params']:
  66. input_channel = info['_init_params']['input_channel']
  67. shape = '[1,{},' + shape_list[1] + ',' + shape_list[0] + ']'
  68. shape = shape.format(input_channel)
  69. if model.__class__.__name__ == "YOLOv3":
  70. shape = shape + ",[1,2]"
  71. inputs = "image,im_size"
  72. onnx_parser.set_defaults(input=inputs)
  73. onnx_parser.set_defaults(input_shape=shape)
  74. mo.main(onnx_parser, 'onnx')
  75. def main():
  76. parser = arg_parser()
  77. args = parser.parse_args()
  78. assert args.model_dir is not None, "--model_dir should be defined while exporting openvino model"
  79. assert args.save_dir is not None, "--save_dir should be defined to create openvino model"
  80. model = pdx.load_model(args.model_dir)
  81. if model.status == "Normal" or model.status == "Prune":
  82. print(
  83. "Only support inference model, try to export model first as below,",
  84. exit=False)
  85. export_openvino_model(model, args)
  86. if __name__ == "__main__":
  87. main()