basic_predictor.py 4.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127
  1. # copyright (c) 2024 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. from typing import Dict, Any, Iterator
  15. from abc import abstractmethod
  16. from .....utils.subclass_register import AutoRegisterABCMetaClass
  17. from .....utils.flags import (
  18. INFER_BENCHMARK,
  19. INFER_BENCHMARK_WARMUP,
  20. )
  21. from .....utils import logging
  22. from ....utils.pp_option import PaddlePredictorOption
  23. from ....utils.benchmark import benchmark
  24. from .base_predictor import BasePredictor
  25. class BasicPredictor(
  26. BasePredictor,
  27. metaclass=AutoRegisterABCMetaClass,
  28. ):
  29. """BasicPredictor."""
  30. __is_base = True
  31. def __init__(
  32. self,
  33. model_dir: str,
  34. config: Dict[str, Any] = None,
  35. device: str = None,
  36. pp_option: PaddlePredictorOption = None,
  37. ) -> None:
  38. """Initializes the BasicPredictor.
  39. Args:
  40. model_dir (str): The directory where the model files are stored.
  41. config (Dict[str, Any], optional): The configuration dictionary. Defaults to None.
  42. device (str, optional): The device to run the inference engine on. Defaults to None.
  43. pp_option (PaddlePredictorOption, optional): The inference engine options. Defaults to None.
  44. """
  45. super().__init__(model_dir=model_dir, config=config)
  46. if not pp_option:
  47. pp_option = PaddlePredictorOption(model_name=self.model_name)
  48. if device:
  49. pp_option.device = device
  50. self.pp_option = pp_option
  51. logging.debug(f"{self.__class__.__name__}: {self.model_dir}")
  52. self.benchmark = benchmark
  53. def __call__(
  54. self,
  55. input: Any,
  56. batch_size: int = None,
  57. device: str = None,
  58. pp_option: PaddlePredictorOption = None,
  59. **kwargs: Dict[str, Any],
  60. ) -> Iterator[Any]:
  61. """
  62. Predict with the input data.
  63. Args:
  64. input (Any): The input data to be predicted.
  65. batch_size (int, optional): The batch size to use. Defaults to None.
  66. device (str, optional): The device to run the predictor on. Defaults to None.
  67. pp_option (PaddlePredictorOption, optional): The predictor options to set. Defaults to None.
  68. **kwargs (Dict[str, Any]): Additional keyword arguments to set up predictor.
  69. Returns:
  70. Iterator[Any]: An iterator yielding the prediction output.
  71. """
  72. self.set_predictor(batch_size, device, pp_option)
  73. if self.benchmark:
  74. self.benchmark.start()
  75. if INFER_BENCHMARK_WARMUP > 0:
  76. output = self.apply(input, **kwargs)
  77. warmup_num = 0
  78. for _ in range(INFER_BENCHMARK_WARMUP):
  79. try:
  80. next(output)
  81. warmup_num += 1
  82. except StopIteration:
  83. logging.warning(
  84. f"There are only {warmup_num} batches in input data, but `INFER_BENCHMARK_WARMUP` has been set to {INFER_BENCHMARK_WARMUP}."
  85. )
  86. break
  87. self.benchmark.warmup_stop(warmup_num)
  88. output = list(self.apply(input, **kwargs))
  89. self.benchmark.collect(len(output))
  90. else:
  91. yield from self.apply(input, **kwargs)
  92. def set_predictor(
  93. self,
  94. batch_size: int = None,
  95. device: str = None,
  96. pp_option: PaddlePredictorOption = None,
  97. ) -> None:
  98. """
  99. Sets the predictor configuration.
  100. Args:
  101. batch_size (int, optional): The batch size to use. Defaults to None.
  102. device (str, optional): The device to run the predictor on. Defaults to None.
  103. pp_option (PaddlePredictorOption, optional): The predictor options to set. Defaults to None.
  104. Returns:
  105. None
  106. """
  107. if batch_size:
  108. self.batch_sampler.batch_size = batch_size
  109. self.pp_option.batch_size = batch_size
  110. if device and device != self.pp_option.device:
  111. self.pp_option.device = device
  112. if pp_option and pp_option != self.pp_option:
  113. self.pp_option = pp_option