__init__.py 6.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181
  1. # Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. from __future__ import absolute_import
  15. import logging
  16. import os
  17. import sys
  18. import platform
  19. # Create a symbol link to tensorrt library.
  20. trt_directory = os.path.join(
  21. os.path.dirname(os.path.abspath(__file__)), "libs/third_libs/tensorrt/lib/"
  22. )
  23. if os.name != "nt" and os.path.exists(trt_directory):
  24. logging.basicConfig(level=logging.INFO)
  25. for trt_lib in [
  26. "libnvcaffe_parser.so",
  27. "libnvinfer_plugin.so",
  28. "libnvinfer.so",
  29. "libnvonnxparser.so",
  30. "libnvparsers.so",
  31. ]:
  32. dst = os.path.join(trt_directory, trt_lib)
  33. src = os.path.join(trt_directory, trt_lib + ".8")
  34. if not os.path.exists(dst):
  35. try:
  36. os.symlink(src, dst)
  37. logging.info(f"Create a symbolic link pointing to {src} named {dst}.")
  38. except OSError as e:
  39. logging.warning(
  40. f"Failed to create a symbolic link pointing to {src} by an unprivileged user. "
  41. "It may failed when you use Paddle TensorRT backend. "
  42. "Please use administrator privilege to import ultra_infer at first time."
  43. )
  44. break
  45. # HACK: Reset the root logger config that got messed up by FD.
  46. root_logger = logging.getLogger()
  47. root_logger.level = logging.WARNING
  48. for handler in root_logger.handlers[:]:
  49. root_logger.removeHandler(handler)
  50. from .code_version import version, git_version, extra_version_info
  51. from .code_version import enable_trt_backend, enable_paddle_backend, with_gpu
  52. # Note(zhoushunjie): Fix the import order of paddle and ultra_infer library.
  53. # This solution will be removed it when the conflict of paddle and
  54. # ultra_infer is fixed.
  55. # Note(qiuyanjun): Add backward compatible for paddle 2.4.x
  56. sys_platform = platform.platform().lower()
  57. def get_paddle_version():
  58. paddle_version = ""
  59. try:
  60. import pkg_resources
  61. paddle_version = pkg_resources.require("paddlepaddle-gpu")[0].version.split(
  62. ".post"
  63. )[0]
  64. except:
  65. try:
  66. paddle_version = pkg_resources.require("paddlepaddle")[0].version.split(
  67. ".post"
  68. )[0]
  69. except:
  70. pass
  71. return paddle_version
  72. def should_import_paddle():
  73. if ("paddle2.4" in extra_version_info) or ("post24" in extra_version_info):
  74. paddle_version = get_paddle_version()
  75. if (
  76. paddle_version != ""
  77. and paddle_version <= "2.4.2"
  78. and paddle_version != "0.0.0"
  79. ):
  80. return True
  81. return False
  82. def should_set_tensorrt():
  83. if (
  84. with_gpu == "ON"
  85. and enable_paddle_backend == "ON"
  86. and enable_trt_backend == "ON"
  87. ):
  88. return True
  89. return False
  90. def tensorrt_is_avaliable():
  91. # Note(qiuyanjun): Only support linux now.
  92. found_trt_lib = False
  93. if ("linux" in sys_platform) and ("LD_LIBRARY_PATH" in os.environ.keys()):
  94. for lib_path in os.environ["LD_LIBRARY_PATH"].split(":"):
  95. if os.path.exists(os.path.join(lib_path, "libnvinfer.so")):
  96. found_trt_lib = True
  97. break
  98. return found_trt_lib
  99. try:
  100. # windows: no conflict between ultra_infer and paddle.
  101. # linux: must import paddle first to solve the conflict.
  102. # macos: still can not solve the conflict between ultra_infer and paddle,
  103. # due to the global flags redefined in paddle/paddle_inference so.
  104. # we got the error (ERROR: flag 'xxx' was defined more than once).
  105. if "linux" in sys_platform:
  106. if should_import_paddle():
  107. import paddle # need import paddle first for paddle2.4.x
  108. # check whether tensorrt in LD_LIBRARY_PATH for ultra_infer
  109. if should_set_tensorrt() and (not tensorrt_is_avaliable()):
  110. if os.path.exists(trt_directory):
  111. logging.info(
  112. "\n[WARNING] Can not find TensorRT lib in LD_LIBRARY_PATH for UltraInfer! \
  113. \n[WARNING] Please export [ YOUR CUSTOM TensorRT ] lib path to LD_LIBRARY_PATH first, or run the command: \
  114. \n[WARNING] Linux: 'export LD_LIBRARY_PATH=$(python -c 'from ultra_infer import trt_directory; print(trt_directory)'):$LD_LIBRARY_PATH'"
  115. )
  116. else:
  117. logging.info(
  118. "\n[WARNING] Can not find TensorRT lib in LD_LIBRARY_PATH for UltraInfer! \
  119. \n[WARNING] Please export [YOUR CUSTOM TensorRT] lib path to LD_LIBRARY_PATH first."
  120. )
  121. except:
  122. pass
  123. from .c_lib_wrap import (
  124. ModelFormat,
  125. Backend,
  126. FDDataType,
  127. TensorInfo,
  128. Device,
  129. is_built_with_gpu,
  130. is_built_with_ort,
  131. ModelFormat,
  132. is_built_with_paddle,
  133. is_built_with_trt,
  134. get_default_cuda_directory,
  135. is_built_with_openvino,
  136. is_built_with_om,
  137. )
  138. def set_logger(enable_info=True, enable_warning=True):
  139. """Set behaviour of logger while using UltraInfer
  140. :param enable_info: (boolean)Whether to print out log level of INFO
  141. :param enable_warning: (boolean)Whether to print out log level of WARNING, recommend to set to True
  142. """
  143. from .c_lib_wrap import set_logger
  144. set_logger(enable_info, enable_warning)
  145. from .runtime import Runtime, RuntimeOption
  146. from .model import UltraInferModel
  147. from . import c_lib_wrap as C
  148. from . import vision
  149. from . import pipeline
  150. from . import text
  151. from . import ts
  152. from .download import download, download_and_decompress, download_model, get_model_list
  153. __version__ = version