trainer.py 7.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208
  1. # copyright (c) 2024 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import os
  15. import glob
  16. from pathlib import Path
  17. import paddle
  18. from ..base import BaseTrainer, BaseTrainDeamon
  19. from ...utils.config import AttrDict
  20. from .model_list import MODELS
  21. class SegTrainer(BaseTrainer):
  22. """ Semantic Segmentation Model Trainer """
  23. entities = MODELS
  24. def build_deamon(self, config: AttrDict) -> "SegTrainDeamon":
  25. """build deamon thread for saving training outputs timely
  26. Args:
  27. config (AttrDict): PaddleX pipeline config, which is loaded from pipeline yaml file.
  28. Returns:
  29. SegTrainDeamon: the training deamon thread object for saving training outputs timely.
  30. """
  31. return SegTrainDeamon(config)
  32. def update_config(self):
  33. """update training config
  34. """
  35. self.pdx_config.update_dataset(self.global_config.dataset_dir,
  36. "SegDataset")
  37. if self.train_config.num_classes is not None:
  38. self.pdx_config.update_num_classes(self.train_config.num_classes)
  39. if self.train_config.pretrain_weight_path and self.train_config.pretrain_weight_path != "":
  40. self.pdx_config.update_pretrained_weights(
  41. self.train_config.pretrain_weight_path, is_backbone=True)
  42. def get_train_kwargs(self) -> dict:
  43. """get key-value arguments of model training function
  44. Returns:
  45. dict: the arguments of training function.
  46. """
  47. train_args = {"device": self.get_device()}
  48. # XXX:
  49. os.environ.pop("FLAGS_npu_jit_compile", None)
  50. if self.train_config.batch_size is not None:
  51. train_args["batch_size"] = self.train_config.batch_size
  52. if self.train_config.learning_rate is not None:
  53. train_args["learning_rate"] = self.train_config.learning_rate
  54. if self.train_config.epochs_iters is not None:
  55. train_args["epochs_iters"] = self.train_config.epochs_iters
  56. if self.train_config.resume_path is not None and self.train_config.resume_path != "":
  57. train_args["resume_path"] = self.train_config.resume_path
  58. if self.global_config.output is not None:
  59. train_args["save_dir"] = self.global_config.output
  60. if self.train_config.log_interval:
  61. train_args["log_iters"] = self.train_config.log_interval
  62. if self.train_config.eval_interval:
  63. train_args["do_eval"] = True
  64. train_args["save_interval"] = self.train_config.eval_interval
  65. return train_args
  66. class SegTrainDeamon(BaseTrainDeamon):
  67. """ SegTrainResultDemon """
  68. last_k = 1
  69. def __init__(self, *args, **kwargs):
  70. super().__init__(*args, **kwargs)
  71. def get_the_pdparams_suffix(self):
  72. """ get the suffix of pdparams file """
  73. return "pdparams"
  74. def get_the_pdema_suffix(self):
  75. """ get the suffix of pdema file """
  76. return "pdema"
  77. def get_the_pdopt_suffix(self):
  78. """ get the suffix of pdopt file """
  79. return "pdopt"
  80. def get_the_pdstates_suffix(self):
  81. """ get the suffix of pdstates file """
  82. return "pdstates"
  83. def get_ith_ckp_prefix(self, epoch_id):
  84. """ get the prefix of the epoch_id checkpoint file """
  85. return f"iter_{epoch_id}/model"
  86. def get_best_ckp_prefix(self):
  87. """ get the prefix of the best checkpoint file """
  88. return "best_model/model"
  89. def get_score(self, pdstates_path):
  90. """ get the score by pdstates file """
  91. if not Path(pdstates_path).exists():
  92. return 0
  93. return paddle.load(pdstates_path)["mIoU"]
  94. def get_epoch_id_by_pdparams_prefix(self, pdparams_dir):
  95. """ get the epoch_id by pdparams file """
  96. return int(pdparams_dir.parent.name.split("_")[-1])
  97. def update_result(self, result, train_output):
  98. """ update every result """
  99. train_output = Path(train_output).resolve()
  100. config_path = train_output.joinpath("config.yaml").resolve()
  101. if not config_path.exists():
  102. return result
  103. model_name = result["model_name"]
  104. if model_name in self.config_recorder and self.config_recorder[
  105. model_name] != config_path:
  106. result["models"] = self.init_model_pkg()
  107. result["config"] = config_path
  108. self.config_recorder[model_name] = config_path
  109. result["visualdl_log"] = self.update_vdl_log(train_output)
  110. result["label_dict"] = self.update_label_dict(train_output)
  111. model = self.get_model(result["model_name"], config_path)
  112. params_path_list = list(
  113. train_output.glob(".".join([
  114. self.get_ith_ckp_prefix("[0-9]*"), self.get_the_pdparams_suffix(
  115. )
  116. ])))
  117. iter_ids = []
  118. for params_path in params_path_list:
  119. iter_id = self.get_epoch_id_by_pdparams_prefix(params_path)
  120. iter_ids.append(iter_id)
  121. iter_ids.sort()
  122. # TODO(gaotingquan): how to avoid that the latest ckp files is being saved
  123. # epoch_ids = epoch_ids[:-1]
  124. for i in range(1, self.last_k + 1):
  125. if len(iter_ids) < i:
  126. break
  127. self.update_models(result, model, train_output, f"last_{i}",
  128. self.get_ith_ckp_prefix(iter_ids[-i]))
  129. self.update_models(result, model, train_output, "best",
  130. self.get_best_ckp_prefix())
  131. return result
  132. def update_models(self, result, model, train_output, model_key, ckp_prefix):
  133. """ update info of the models to be saved """
  134. pdparams = train_output.joinpath(".".join(
  135. [ckp_prefix, self.get_the_pdparams_suffix()]))
  136. if pdparams.exists():
  137. recorder_key = f"{train_output.name}_{model_key}"
  138. if model_key != "best" and recorder_key in self.model_recorder and self.model_recorder[
  139. recorder_key] == pdparams:
  140. return
  141. self.model_recorder[recorder_key] = pdparams
  142. pdema = ""
  143. pdema_suffix = self.get_the_pdema_suffix()
  144. if pdema_suffix:
  145. pdema = pdparams.parents[1].joinpath(".".join(
  146. [ckp_prefix, pdema_suffix]))
  147. if not pdema.exists():
  148. pdema = ""
  149. pdopt = ""
  150. pdopt_suffix = self.get_the_pdopt_suffix()
  151. if pdopt_suffix:
  152. pdopt = pdparams.parents[1].joinpath(".".join(
  153. [ckp_prefix, pdopt_suffix]))
  154. if not pdopt.exists():
  155. pdopt = ""
  156. pdstates = ""
  157. pdstates_suffix = self.get_the_pdstates_suffix()
  158. if pdstates_suffix:
  159. pdstates = pdparams.parents[1].joinpath(".".join(
  160. [ckp_prefix, pdstates_suffix]))
  161. if not pdstates.exists():
  162. pdstates = ""
  163. score = self.get_score(Path(pdstates).resolve().as_posix())
  164. result["models"][model_key] = {
  165. "score": score,
  166. "pdparams": pdparams,
  167. "pdema": pdema,
  168. "pdopt": pdopt,
  169. "pdstates": pdstates
  170. }
  171. self.update_inference_model(model, pdparams,
  172. train_output.joinpath(f"{ckp_prefix}"),
  173. result["models"][model_key])