|
|
@@ -17,16 +17,25 @@ import codecs
|
|
|
from pathlib import Path
|
|
|
from abc import abstractmethod
|
|
|
|
|
|
+import GPUtil
|
|
|
+
|
|
|
from ...utils.subclass_register import AutoRegisterABCMetaClass
|
|
|
+from ..utils.device import constr_device
|
|
|
from ...utils import logging
|
|
|
from ..components.base import BaseComponent, ComponentsEngine
|
|
|
from ..components.paddle_predictor.option import PaddlePredictorOption
|
|
|
from ..utils.process_hook import generatorable_method
|
|
|
|
|
|
|
|
|
-class BasePredictor(BaseComponent, metaclass=AutoRegisterABCMetaClass):
|
|
|
- __is_base = True
|
|
|
+def _get_default_device():
|
|
|
+ avail_gpus = GPUtil.getAvailable()
|
|
|
+ if not avail_gpus:
|
|
|
+ return "cpu"
|
|
|
+ else:
|
|
|
+ return constr_device("gpu", [avail_gpus[0]])
|
|
|
+
|
|
|
|
|
|
+class BasePredictor(BaseComponent):
|
|
|
INPUT_KEYS = "x"
|
|
|
DEAULT_INPUTS = {"x": "x"}
|
|
|
OUTPUT_KEYS = "result"
|
|
|
@@ -36,33 +45,55 @@ class BasePredictor(BaseComponent, metaclass=AutoRegisterABCMetaClass):
|
|
|
|
|
|
MODEL_FILE_PREFIX = "inference"
|
|
|
|
|
|
- def __init__(self, model_dir, config=None, device=None, pp_option=None, **kwargs):
|
|
|
+ def __init__(self, model_dir, config=None, device=None, **kwargs):
|
|
|
super().__init__()
|
|
|
self.model_dir = Path(model_dir)
|
|
|
self.config = config if config else self.load_config(self.model_dir)
|
|
|
+ self.device = device if device else _get_default_device()
|
|
|
self.kwargs = self._check_args(kwargs)
|
|
|
+ # alias predict() to the __call__()
|
|
|
+ self.predict = self.__call__
|
|
|
|
|
|
- self.pp_option = PaddlePredictorOption() if pp_option is None else pp_option
|
|
|
- if device is not None:
|
|
|
- self.pp_option.set_device(device)
|
|
|
+ @property
|
|
|
+ def config_path(self):
|
|
|
+ return self.get_config_path(self.model_dir)
|
|
|
|
|
|
- self.components = self._build_components()
|
|
|
- self.engine = ComponentsEngine(self.components)
|
|
|
+ @property
|
|
|
+ def model_name(self) -> str:
|
|
|
+ return self.config["Global"]["model_name"]
|
|
|
|
|
|
- # alias predict() to the __call__()
|
|
|
- self.predict = self.__call__
|
|
|
+ @abstractmethod
|
|
|
+ def apply(self, x):
|
|
|
+ raise NotImplementedError
|
|
|
|
|
|
- logging.debug(
|
|
|
- f"-------------------- {self.__class__.__name__} --------------------\nModel: {self.model_dir}\nEnv: {self.pp_option}"
|
|
|
- )
|
|
|
+ @classmethod
|
|
|
+ def get_config_path(cls, model_dir):
|
|
|
+ return model_dir / f"{cls.MODEL_FILE_PREFIX}.yml"
|
|
|
|
|
|
@classmethod
|
|
|
def load_config(cls, model_dir):
|
|
|
- config_path = model_dir / f"{cls.MODEL_FILE_PREFIX}.yml"
|
|
|
+ config_path = cls.get_config_path(model_dir)
|
|
|
with codecs.open(config_path, "r", "utf-8") as file:
|
|
|
dic = yaml.load(file, Loader=yaml.FullLoader)
|
|
|
return dic
|
|
|
|
|
|
+ def _check_args(self, kwargs):
|
|
|
+ return kwargs
|
|
|
+
|
|
|
+
|
|
|
+class BasicPredictor(BasePredictor, metaclass=AutoRegisterABCMetaClass):
|
|
|
+ __is_base = True
|
|
|
+
|
|
|
+ def __init__(self, model_dir, config=None, device=None, pp_option=None, **kwargs):
|
|
|
+ super().__init__(model_dir=model_dir, config=config, device=device, **kwargs)
|
|
|
+ self.pp_option = PaddlePredictorOption() if pp_option is None else pp_option
|
|
|
+ self.pp_option.set_device(self.device)
|
|
|
+ self.components = self._build_components()
|
|
|
+ self.engine = ComponentsEngine(self.components)
|
|
|
+ logging.debug(
|
|
|
+ f"-------------------- {self.__class__.__name__} --------------------\nModel: {self.model_dir}\nEnv: {self.pp_option}"
|
|
|
+ )
|
|
|
+
|
|
|
def apply(self, x):
|
|
|
"""predict"""
|
|
|
yield from self._generate_res(self.engine(x))
|
|
|
@@ -71,9 +102,6 @@ class BasePredictor(BaseComponent, metaclass=AutoRegisterABCMetaClass):
|
|
|
def _generate_res(self, data):
|
|
|
return self._pack_res(data)
|
|
|
|
|
|
- def _check_args(self, kwargs):
|
|
|
- return kwargs
|
|
|
-
|
|
|
@abstractmethod
|
|
|
def _build_components(self):
|
|
|
raise NotImplementedError
|