train_deamon.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395
  1. # copyright (c) 2024 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import os
  15. import sys
  16. import time
  17. import json
  18. import traceback
  19. import threading
  20. from abc import ABC, abstractmethod
  21. from pathlib import Path
  22. from ..build_model import build_model
  23. from ....utils.file_interface import write_json_file
  24. from ....utils import logging
  25. def try_except_decorator(func):
  26. """ try-except """
  27. def wrap(self, *args, **kwargs):
  28. try:
  29. func(self, *args, **kwargs)
  30. except Exception as e:
  31. exc_type, exc_value, exc_tb = sys.exc_info()
  32. self.save_json()
  33. traceback.logging.info_exception(exc_type, exc_value, exc_tb)
  34. finally:
  35. self.processing = False
  36. return wrap
  37. class BaseTrainDeamon(ABC):
  38. """ BaseTrainResultDemon """
  39. update_interval = 600
  40. last_k = 5
  41. def __init__(self, global_config):
  42. """ init """
  43. self.global_config = global_config
  44. self.init_pre_hook()
  45. self.output_dir = global_config.output
  46. self.train_outputs = self.get_train_outputs()
  47. self.save_paths = self.get_save_paths()
  48. self.results = self.init_train_result()
  49. self.save_json()
  50. self.models = {}
  51. self.init_post_hook()
  52. self.config_recorder = {}
  53. self.model_recorder = {}
  54. self.processing = False
  55. self.start()
  56. def init_train_result(self):
  57. """ init train result structure """
  58. model_names = self.init_model_names()
  59. configs = self.init_configs()
  60. train_log = self.init_train_log()
  61. vdl = self.init_vdl_log()
  62. results = []
  63. for i, model_name in enumerate(model_names):
  64. results.append({
  65. "model_name": model_name,
  66. "done_flag": False,
  67. "config": configs[i],
  68. "label_dict": "",
  69. "train_log": train_log,
  70. "visualdl_log": vdl,
  71. "models": self.init_model_pkg()
  72. })
  73. return results
  74. def get_save_names(self):
  75. """ get names to save """
  76. return ["train_result.json"]
  77. def get_train_outputs(self):
  78. """ get training outputs dir """
  79. return [Path(self.output_dir)]
  80. def init_model_names(self):
  81. """ get models name """
  82. return [self.global_config.model]
  83. def get_save_paths(self):
  84. """ get the path to save train_result.json """
  85. return [
  86. Path(self.output_dir, save_name)
  87. for save_name in self.get_save_names()
  88. ]
  89. def init_configs(self):
  90. """ get the init value of config field in result """
  91. return [""] * len(self.init_model_names())
  92. def init_train_log(self):
  93. """ get train log """
  94. return ""
  95. def init_vdl_log(self):
  96. """ get visualdl log """
  97. return ""
  98. def init_model_pkg(self):
  99. """ get model package """
  100. init_content = self.init_model_content()
  101. model_pkg = {}
  102. for pkg in self.get_watched_model():
  103. model_pkg[pkg] = init_content
  104. return model_pkg
  105. def normlize_path(self, dict_obj, relative_to):
  106. """ normlize path to string type path relative to the output_dir """
  107. for key in dict_obj:
  108. if isinstance(dict_obj[key], dict):
  109. self.normlize_path(dict_obj[key], relative_to)
  110. if isinstance(dict_obj[key], Path):
  111. dict_obj[key] = dict_obj[key].resolve().relative_to(
  112. relative_to.resolve()).as_posix()
  113. def save_json(self):
  114. """ save result to json """
  115. for i, result in enumerate(self.results):
  116. self.save_paths[i].parent.mkdir(parents=True, exist_ok=True)
  117. self.normlize_path(result, relative_to=self.save_paths[i].parent)
  118. write_json_file(result, self.save_paths[i], indent=2)
  119. def start(self):
  120. """ start deamon thread """
  121. self.exit = False
  122. self.thread = threading.Thread(target=self.run)
  123. self.thread.daemon = True
  124. self.thread.start()
  125. def stop_hook(self):
  126. """ hook befor stop """
  127. for result in self.results:
  128. result["done_flag"] = True
  129. self.update()
  130. def stop(self):
  131. """ stop self """
  132. self.exit = True
  133. while True:
  134. if not self.processing:
  135. self.stop_hook()
  136. break
  137. time.sleep(60)
  138. def run(self):
  139. """ main function """
  140. while not self.exit:
  141. self.update()
  142. if self.exit:
  143. break
  144. time.sleep(self.update_interval)
  145. def update_train_log(self, train_output):
  146. """ update train log """
  147. train_log_path = train_output / "train.log"
  148. if train_log_path.exists():
  149. return train_log_path
  150. def update_vdl_log(self, train_output):
  151. """ update visualdl log """
  152. vdl_path = list(train_output.glob("vdlrecords*log"))
  153. if len(vdl_path) >= 1:
  154. return vdl_path[0]
  155. def update_label_dict(self, train_output):
  156. """ update label dict """
  157. dict_path = train_output.joinpath("label_dict.txt")
  158. if not dict_path.exists():
  159. return ""
  160. return dict_path
  161. @try_except_decorator
  162. def update(self):
  163. """ update train result json """
  164. self.processing = True
  165. for i in range(len(self.results)):
  166. self.results[i] = self.update_result(self.results[i],
  167. self.train_outputs[i])
  168. self.save_json()
  169. self.processing = False
  170. def get_model(self, model_name, config_path):
  171. """ initialize the model """
  172. if model_name not in self.models:
  173. config, model = build_model(
  174. model_name,
  175. device=self.global_config.device,
  176. config_path=config_path)
  177. self.models[model_name] = model
  178. return self.models[model_name]
  179. def get_watched_model(self):
  180. """ get the models needed to be watched """
  181. watched_models = [f"last_{i}" for i in range(1, self.last_k + 1)]
  182. watched_models.append("best")
  183. return watched_models
  184. def init_model_content(self):
  185. """ get model content structure """
  186. return {
  187. "score": "",
  188. "pdparams": "",
  189. "pdema": "",
  190. "pdopt": "",
  191. "pdstates": "",
  192. "inference_config": "",
  193. "pdmodel": "",
  194. "pdiparams": "",
  195. "pdiparams.info": ""
  196. }
  197. def update_result(self, result, train_output):
  198. """ update every result """
  199. train_output = Path(train_output).resolve()
  200. config_path = train_output.joinpath("config.yaml").resolve()
  201. if not config_path.exists():
  202. return result
  203. model_name = result["model_name"]
  204. if model_name in self.config_recorder and self.config_recorder[
  205. model_name] != config_path:
  206. result["models"] = self.init_model_pkg()
  207. result["config"] = config_path
  208. self.config_recorder[model_name] = config_path
  209. result["train_log"] = self.update_train_log(train_output)
  210. result["visualdl_log"] = self.update_vdl_log(train_output)
  211. result["label_dict"] = self.update_label_dict(train_output)
  212. model = self.get_model(result["model_name"], config_path)
  213. params_path_list = list(
  214. train_output.glob(".".join([
  215. self.get_ith_ckp_prefix("[0-9]*"), self.get_the_pdparams_suffix(
  216. )
  217. ])))
  218. epoch_ids = []
  219. for params_path in params_path_list:
  220. epoch_id = self.get_epoch_id_by_pdparams_prefix(params_path.stem)
  221. epoch_ids.append(epoch_id)
  222. epoch_ids.sort()
  223. # TODO(gaotingquan): how to avoid that the latest ckp files is being saved
  224. # epoch_ids = epoch_ids[:-1]
  225. for i in range(1, self.last_k + 1):
  226. if len(epoch_ids) < i:
  227. break
  228. self.update_models(result, model, train_output, f"last_{i}",
  229. self.get_ith_ckp_prefix(epoch_ids[-i]))
  230. self.update_models(result, model, train_output, "best",
  231. self.get_best_ckp_prefix())
  232. return result
  233. def update_models(self, result, model, train_output, model_key, ckp_prefix):
  234. """ update info of the models to be saved """
  235. pdparams = train_output.joinpath(".".join(
  236. [ckp_prefix, self.get_the_pdparams_suffix()]))
  237. if pdparams.exists():
  238. recorder_key = f"{train_output.name}_{model_key}"
  239. if model_key != "best" and recorder_key in self.model_recorder and self.model_recorder[
  240. recorder_key] == pdparams:
  241. return
  242. self.model_recorder[recorder_key] = pdparams
  243. pdema = ""
  244. pdema_suffix = self.get_the_pdema_suffix()
  245. if pdema_suffix:
  246. pdema = pdparams.parent.joinpath(".".join(
  247. [ckp_prefix, pdema_suffix]))
  248. if not pdema.exists():
  249. pdema = ""
  250. pdopt = ""
  251. pdopt_suffix = self.get_the_pdopt_suffix()
  252. if pdopt_suffix:
  253. pdopt = pdparams.parent.joinpath(".".join(
  254. [ckp_prefix, pdopt_suffix]))
  255. if not pdopt.exists():
  256. pdopt = ""
  257. pdstates = ""
  258. pdstates_suffix = self.get_the_pdstates_suffix()
  259. if pdstates_suffix:
  260. pdstates = pdparams.parent.joinpath(".".join(
  261. [ckp_prefix, pdstates_suffix]))
  262. if not pdstates.exists():
  263. pdstates = ""
  264. score = self.get_score(Path(pdstates).resolve().as_posix())
  265. result["models"][model_key] = {
  266. "score": score,
  267. "pdparams": pdparams,
  268. "pdema": pdema,
  269. "pdopt": pdopt,
  270. "pdstates": pdstates
  271. }
  272. self.update_inference_model(model, pdparams,
  273. train_output.joinpath(f"{ckp_prefix}"),
  274. result["models"][model_key])
  275. def update_inference_model(self, model, weight_path, export_save_dir,
  276. result_the_model):
  277. """ update inference model """
  278. export_save_dir.mkdir(parents=True, exist_ok=True)
  279. export_result = model.export(
  280. weight_path=weight_path, save_dir=export_save_dir)
  281. if export_result.returncode == 0:
  282. inference_config = export_save_dir.joinpath("inference.yml")
  283. if not inference_config.exists():
  284. inference_config = ""
  285. pdmodel = export_save_dir.joinpath("inference.pdmodel")
  286. pdiparams = export_save_dir.joinpath("inference.pdiparams")
  287. pdiparams_info = export_save_dir.joinpath(
  288. "inference.pdiparams.info")
  289. else:
  290. inference_config = ""
  291. pdmodel = ""
  292. pdiparams = ""
  293. pdiparams_info = ""
  294. result_the_model["inference_config"] = inference_config
  295. result_the_model["pdmodel"] = pdmodel
  296. result_the_model["pdiparams"] = pdiparams
  297. result_the_model["pdiparams.info"] = pdiparams_info
  298. def init_pre_hook(self):
  299. """ hook func that would be called befor init """
  300. pass
  301. def init_post_hook(self):
  302. """ hook func that would be called after init """
  303. pass
  304. @abstractmethod
  305. def get_the_pdparams_suffix(self):
  306. """ get the suffix of pdparams file """
  307. raise NotImplementedError
  308. @abstractmethod
  309. def get_the_pdema_suffix(self):
  310. """ get the suffix of pdema file """
  311. raise NotImplementedError
  312. @abstractmethod
  313. def get_the_pdopt_suffix(self):
  314. """ get the suffix of pdopt file """
  315. raise NotImplementedError
  316. @abstractmethod
  317. def get_the_pdstates_suffix(self):
  318. """ get the suffix of pdstates file """
  319. raise NotImplementedError
  320. @abstractmethod
  321. def get_ith_ckp_prefix(self, epoch_id):
  322. """ get the prefix of the epoch_id checkpoint file """
  323. raise NotImplementedError
  324. @abstractmethod
  325. def get_best_ckp_prefix(self):
  326. """ get the prefix of the best checkpoint file """
  327. raise NotImplementedError
  328. @abstractmethod
  329. def get_score(self, pdstates_path):
  330. """ get the score by pdstates file """
  331. raise NotImplementedError
  332. @abstractmethod
  333. def get_epoch_id_by_pdparams_prefix(self, pdparams_prefix):
  334. """ get the epoch_id by pdparams file """
  335. raise NotImplementedError