trainer.py 4.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138
  1. # copyright (c) 2024 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. from pathlib import Path
  15. import lazy_paddle as paddle
  16. from ..base import BaseTrainer, BaseTrainDeamon
  17. from ...utils.config import AttrDict
  18. from ...utils import logging
  19. from .model_list import MODELS
  20. class DetTrainer(BaseTrainer):
  21. """Object Detection Model Trainer"""
  22. entities = MODELS
  23. def build_deamon(self, config: AttrDict) -> "DetTrainDeamon":
  24. """build deamon thread for saving training outputs timely
  25. Args:
  26. config (AttrDict): PaddleX pipeline config, which is loaded from pipeline yaml file.
  27. Returns:
  28. DetTrainDeamon: the training deamon thread object for saving training outputs timely.
  29. """
  30. return DetTrainDeamon(config)
  31. def _update_dataset(self):
  32. """update dataset settings"""
  33. self.pdx_config.update_dataset(self.global_config.dataset_dir, "COCODetDataset")
  34. def update_config(self):
  35. """update training config"""
  36. if self.train_config.log_interval:
  37. self.pdx_config.update_log_interval(self.train_config.log_interval)
  38. if self.train_config.eval_interval:
  39. self.pdx_config.update_eval_interval(self.train_config.eval_interval)
  40. self._update_dataset()
  41. if self.train_config.num_classes is not None:
  42. self.pdx_config.update_num_class(self.train_config.num_classes)
  43. if (
  44. self.train_config.pretrain_weight_path
  45. and self.train_config.pretrain_weight_path != ""
  46. ):
  47. self.pdx_config.update_pretrained_weights(
  48. self.train_config.pretrain_weight_path
  49. )
  50. if self.train_config.batch_size is not None:
  51. self.pdx_config.update_batch_size(self.train_config.batch_size)
  52. if self.train_config.learning_rate is not None:
  53. self.pdx_config.update_learning_rate(self.train_config.learning_rate)
  54. if self.train_config.epochs_iters is not None:
  55. self.pdx_config.update_epochs(self.train_config.epochs_iters)
  56. epochs_iters = self.train_config.epochs_iters
  57. else:
  58. epochs_iters = self.pdx_config.get_epochs_iters()
  59. if self.global_config.output is not None:
  60. self.pdx_config.update_save_dir(self.global_config.output)
  61. if "PicoDet" in self.global_config.model:
  62. assigner_epochs = max(int(epochs_iters / 10), 1)
  63. try:
  64. self.pdx_config.update_static_assigner_epochs(assigner_epochs)
  65. except Exception:
  66. logging.info(
  67. f"The model({self.global_config.model}) don't support to update_static_assigner_epochs!"
  68. )
  69. def get_train_kwargs(self) -> dict:
  70. """get key-value arguments of model training function
  71. Returns:
  72. dict: the arguments of training function.
  73. """
  74. train_args = {"device": self.get_device()}
  75. if (
  76. self.train_config.resume_path is not None
  77. and self.train_config.resume_path != ""
  78. ):
  79. train_args["resume_path"] = self.train_config.resume_path
  80. train_args["dy2st"] = self.train_config.get("dy2st", False)
  81. return train_args
  82. class DetTrainDeamon(BaseTrainDeamon):
  83. """DetTrainResultDemon"""
  84. def __init__(self, *args, **kwargs):
  85. super().__init__(*args, **kwargs)
  86. def get_the_pdparams_suffix(self):
  87. """get the suffix of pdparams file"""
  88. return "pdparams"
  89. def get_the_pdema_suffix(self):
  90. """get the suffix of pdema file"""
  91. return "pdema"
  92. def get_the_pdopt_suffix(self):
  93. """get the suffix of pdopt file"""
  94. return "pdopt"
  95. def get_the_pdstates_suffix(self):
  96. """get the suffix of pdstates file"""
  97. return "pdstates"
  98. def get_ith_ckp_prefix(self, epoch_id):
  99. """get the prefix of the epoch_id checkpoint file"""
  100. return f"{epoch_id}"
  101. def get_best_ckp_prefix(self):
  102. """get the prefix of the best checkpoint file"""
  103. return "best_model"
  104. def get_score(self, pdstates_path):
  105. """get the score by pdstates file"""
  106. if not Path(pdstates_path).exists():
  107. return 0
  108. return paddle.load(pdstates_path)["metric"]
  109. def get_epoch_id_by_pdparams_prefix(self, pdparams_prefix):
  110. """get the epoch_id by pdparams file"""
  111. return int(pdparams_prefix)