trainer.py 4.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134
  1. # copyright (c) 2024 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. from pathlib import Path
  15. import paddle
  16. from ..base import BaseTrainer, BaseTrainDeamon
  17. from ...utils.config import AttrDict
  18. from ...utils import logging
  19. from .model_list import MODELS
  20. class DetTrainer(BaseTrainer):
  21. """ Object Detection Model Trainer """
  22. entities = MODELS
  23. def build_deamon(self, config: AttrDict) -> "DetTrainDeamon":
  24. """build deamon thread for saving training outputs timely
  25. Args:
  26. config (AttrDict): PaddleX pipeline config, which is loaded from pipeline yaml file.
  27. Returns:
  28. DetTrainDeamon: the training deamon thread object for saving training outputs timely.
  29. """
  30. return DetTrainDeamon(config)
  31. def _update_dataset(self):
  32. """update dataset settings
  33. """
  34. self.pdx_config.update_dataset(self.global_config.dataset_dir,
  35. "COCODetDataset")
  36. def update_config(self):
  37. """update training config
  38. """
  39. if self.train_config.log_interval:
  40. self.pdx_config.update_log_interval(self.train_config.log_interval)
  41. if self.train_config.eval_interval:
  42. self.pdx_config.update_eval_interval(
  43. self.train_config.eval_interval)
  44. self._update_dataset()
  45. if self.train_config.num_classes is not None:
  46. self.pdx_config.update_num_class(self.train_config.num_classes)
  47. if self.train_config.pretrain_weight_path and self.train_config.pretrain_weight_path != "":
  48. self.pdx_config.update_pretrained_weights(
  49. self.train_config.pretrain_weight_path)
  50. if self.train_config.batch_size is not None:
  51. self.pdx_config.update_batch_size(self.train_config.batch_size)
  52. if self.train_config.learning_rate is not None:
  53. self.pdx_config.update_learning_rate(
  54. self.train_config.learning_rate)
  55. if self.train_config.epochs_iters is not None:
  56. self.pdx_config.update_epochs(self.train_config.epochs_iters)
  57. epochs_iters = self.train_config.epochs_iters
  58. else:
  59. epochs_iters = self.pdx_config.get_epochs_iters()
  60. if self.global_config.output is not None:
  61. self.pdx_config.update_save_dir(self.global_config.output)
  62. if "PicoDet" in self.global_config.model:
  63. assigner_epochs = max(int(epochs_iters / 10), 1)
  64. try:
  65. self.pdx_config.update_static_assigner_epochs(assigner_epochs)
  66. except Exception:
  67. logging.info(
  68. f"The model({self.global_config.model}) don't support to update_static_assigner_epochs!"
  69. )
  70. def get_train_kwargs(self) -> dict:
  71. """get key-value arguments of model training function
  72. Returns:
  73. dict: the arguments of training function.
  74. """
  75. train_args = {"device": self.get_device()}
  76. if self.train_config.resume_path is not None and self.train_config.resume_path != "":
  77. train_args["resume_path"] = self.train_config.resume_path
  78. return train_args
  79. class DetTrainDeamon(BaseTrainDeamon):
  80. """ DetTrainResultDemon """
  81. def __init__(self, *args, **kwargs):
  82. super().__init__(*args, **kwargs)
  83. def get_the_pdparams_suffix(self):
  84. """ get the suffix of pdparams file """
  85. return "pdparams"
  86. def get_the_pdema_suffix(self):
  87. """ get the suffix of pdema file """
  88. return "pdema"
  89. def get_the_pdopt_suffix(self):
  90. """ get the suffix of pdopt file """
  91. return "pdopt"
  92. def get_the_pdstates_suffix(self):
  93. """ get the suffix of pdstates file """
  94. return "pdstates"
  95. def get_ith_ckp_prefix(self, epoch_id):
  96. """ get the prefix of the epoch_id checkpoint file """
  97. return f"{epoch_id}"
  98. def get_best_ckp_prefix(self):
  99. """ get the prefix of the best checkpoint file """
  100. return "best_model"
  101. def get_score(self, pdstates_path):
  102. """ get the score by pdstates file """
  103. if not Path(pdstates_path).exists():
  104. return 0
  105. return paddle.load(pdstates_path)["metric"]
  106. def get_epoch_id_by_pdparams_prefix(self, pdparams_prefix):
  107. """ get the epoch_id by pdparams file """
  108. return int(pdparams_prefix)