trainer.py 7.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206
  1. # copyright (c) 2024 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import os
  15. import glob
  16. from pathlib import Path
  17. import paddle
  18. from ..base import BaseTrainer, BaseTrainDeamon
  19. from ...utils.config import AttrDict
  20. from .model_list import MODELS
  21. class SegTrainer(BaseTrainer):
  22. """ Semantic Segmentation Model Trainer """
  23. entities = MODELS
  24. def build_deamon(self, config: AttrDict) -> "SegTrainDeamon":
  25. """build deamon thread for saving training outputs timely
  26. Args:
  27. config (AttrDict): PaddleX pipeline config, which is loaded from pipeline yaml file.
  28. Returns:
  29. SegTrainDeamon: the training deamon thread object for saving training outputs timely.
  30. """
  31. return SegTrainDeamon(config)
  32. def update_config(self):
  33. """update training config
  34. """
  35. self.pdx_config.update_dataset(self.global_config.dataset_dir,
  36. "SegDataset")
  37. if self.train_config.num_classes is not None:
  38. self.pdx_config.update_num_classes(self.train_config.num_classes)
  39. if self.train_config.pretrain_weight_path and self.train_config.pretrain_weight_path != "":
  40. self.pdx_config.update_pretrained_weights(
  41. self.train_config.pretrain_weight_path, is_backbone=True)
  42. def get_train_kwargs(self) -> dict:
  43. """get key-value arguments of model training function
  44. Returns:
  45. dict: the arguments of training function.
  46. """
  47. train_args = {"device": self.get_device()}
  48. if self.train_config.batch_size is not None:
  49. train_args["batch_size"] = self.train_config.batch_size
  50. if self.train_config.learning_rate is not None:
  51. train_args["learning_rate"] = self.train_config.learning_rate
  52. if self.train_config.epochs_iters is not None:
  53. train_args["epochs_iters"] = self.train_config.epochs_iters
  54. if self.train_config.resume_path is not None and self.train_config.resume_path != "":
  55. train_args["resume_path"] = self.train_config.resume_path
  56. if self.global_config.output is not None:
  57. train_args["save_dir"] = self.global_config.output
  58. if self.train_config.log_interval:
  59. train_args["log_iters"] = self.train_config.log_interval
  60. if self.train_config.eval_interval:
  61. train_args["do_eval"] = True
  62. train_args["save_interval"] = self.train_config.eval_interval
  63. return train_args
  64. class SegTrainDeamon(BaseTrainDeamon):
  65. """ SegTrainResultDemon """
  66. last_k = 1
  67. def __init__(self, *args, **kwargs):
  68. super().__init__(*args, **kwargs)
  69. def get_the_pdparams_suffix(self):
  70. """ get the suffix of pdparams file """
  71. return "pdparams"
  72. def get_the_pdema_suffix(self):
  73. """ get the suffix of pdema file """
  74. return "pdema"
  75. def get_the_pdopt_suffix(self):
  76. """ get the suffix of pdopt file """
  77. return "pdopt"
  78. def get_the_pdstates_suffix(self):
  79. """ get the suffix of pdstates file """
  80. return "pdstates"
  81. def get_ith_ckp_prefix(self, epoch_id):
  82. """ get the prefix of the epoch_id checkpoint file """
  83. return f"iter_{epoch_id}/model"
  84. def get_best_ckp_prefix(self):
  85. """ get the prefix of the best checkpoint file """
  86. return "best_model/model"
  87. def get_score(self, pdstates_path):
  88. """ get the score by pdstates file """
  89. if not Path(pdstates_path).exists():
  90. return 0
  91. return paddle.load(pdstates_path)["mIoU"]
  92. def get_epoch_id_by_pdparams_prefix(self, pdparams_dir):
  93. """ get the epoch_id by pdparams file """
  94. return int(pdparams_dir.parent.name.split("_")[-1])
  95. def update_result(self, result, train_output):
  96. """ update every result """
  97. train_output = Path(train_output).resolve()
  98. config_path = train_output.joinpath("config.yaml").resolve()
  99. if not config_path.exists():
  100. return result
  101. model_name = result["model_name"]
  102. if model_name in self.config_recorder and self.config_recorder[
  103. model_name] != config_path:
  104. result["models"] = self.init_model_pkg()
  105. result["config"] = config_path
  106. self.config_recorder[model_name] = config_path
  107. result["visualdl_log"] = self.update_vdl_log(train_output)
  108. result["label_dict"] = self.update_label_dict(train_output)
  109. model = self.get_model(result["model_name"], config_path)
  110. params_path_list = list(
  111. train_output.glob(".".join([
  112. self.get_ith_ckp_prefix("[0-9]*"), self.get_the_pdparams_suffix(
  113. )
  114. ])))
  115. iter_ids = []
  116. for params_path in params_path_list:
  117. iter_id = self.get_epoch_id_by_pdparams_prefix(params_path)
  118. iter_ids.append(iter_id)
  119. iter_ids.sort()
  120. # TODO(gaotingquan): how to avoid that the latest ckp files is being saved
  121. # epoch_ids = epoch_ids[:-1]
  122. for i in range(1, self.last_k + 1):
  123. if len(iter_ids) < i:
  124. break
  125. self.update_models(result, model, train_output, f"last_{i}",
  126. self.get_ith_ckp_prefix(iter_ids[-i]))
  127. self.update_models(result, model, train_output, "best",
  128. self.get_best_ckp_prefix())
  129. return result
  130. def update_models(self, result, model, train_output, model_key, ckp_prefix):
  131. """ update info of the models to be saved """
  132. pdparams = train_output.joinpath(".".join(
  133. [ckp_prefix, self.get_the_pdparams_suffix()]))
  134. if pdparams.exists():
  135. recorder_key = f"{train_output.name}_{model_key}"
  136. if model_key != "best" and recorder_key in self.model_recorder and self.model_recorder[
  137. recorder_key] == pdparams:
  138. return
  139. self.model_recorder[recorder_key] = pdparams
  140. pdema = ""
  141. pdema_suffix = self.get_the_pdema_suffix()
  142. if pdema_suffix:
  143. pdema = pdparams.parents[1].joinpath(".".join(
  144. [ckp_prefix, pdema_suffix]))
  145. if not pdema.exists():
  146. pdema = ""
  147. pdopt = ""
  148. pdopt_suffix = self.get_the_pdopt_suffix()
  149. if pdopt_suffix:
  150. pdopt = pdparams.parents[1].joinpath(".".join(
  151. [ckp_prefix, pdopt_suffix]))
  152. if not pdopt.exists():
  153. pdopt = ""
  154. pdstates = ""
  155. pdstates_suffix = self.get_the_pdstates_suffix()
  156. if pdstates_suffix:
  157. pdstates = pdparams.parents[1].joinpath(".".join(
  158. [ckp_prefix, pdstates_suffix]))
  159. if not pdstates.exists():
  160. pdstates = ""
  161. score = self.get_score(Path(pdstates).resolve().as_posix())
  162. result["models"][model_key] = {
  163. "score": score,
  164. "pdparams": pdparams,
  165. "pdema": pdema,
  166. "pdopt": pdopt,
  167. "pdstates": pdstates
  168. }
  169. self.update_inference_model(model, pdparams,
  170. train_output.joinpath(f"{ckp_prefix}"),
  171. result["models"][model_key])