train_deamon.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396
  1. # copyright (c) 2024 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import os
  15. import sys
  16. import time
  17. import json
  18. import traceback
  19. import threading
  20. from abc import ABC, abstractmethod
  21. from pathlib import Path
  22. import paddle
  23. from ..build_model import build_model
  24. from ....utils.file_interface import write_json_file
  25. from ....utils import logging
  26. def try_except_decorator(func):
  27. """ try-except """
  28. def wrap(self, *args, **kwargs):
  29. try:
  30. func(self, *args, **kwargs)
  31. except Exception as e:
  32. exc_type, exc_value, exc_tb = sys.exc_info()
  33. self.save_json()
  34. traceback.print_exception(exc_type, exc_value, exc_tb)
  35. finally:
  36. self.processing = False
  37. return wrap
  38. class BaseTrainDeamon(ABC):
  39. """ BaseTrainResultDemon """
  40. update_interval = 600
  41. last_k = 5
  42. def __init__(self, global_config):
  43. """ init """
  44. self.global_config = global_config
  45. self.init_pre_hook()
  46. self.output = global_config.output
  47. self.train_outputs = self.get_train_outputs()
  48. self.save_paths = self.get_save_paths()
  49. self.results = self.init_train_result()
  50. self.save_json()
  51. self.models = {}
  52. self.init_post_hook()
  53. self.config_recorder = {}
  54. self.model_recorder = {}
  55. self.processing = False
  56. self.start()
  57. def init_train_result(self):
  58. """ init train result structure """
  59. model_names = self.init_model_names()
  60. configs = self.init_configs()
  61. train_log = self.init_train_log()
  62. vdl = self.init_vdl_log()
  63. results = []
  64. for i, model_name in enumerate(model_names):
  65. results.append({
  66. "model_name": model_name,
  67. "done_flag": False,
  68. "config": configs[i],
  69. "label_dict": "",
  70. "train_log": train_log,
  71. "visualdl_log": vdl,
  72. "models": self.init_model_pkg()
  73. })
  74. return results
  75. def get_save_names(self):
  76. """ get names to save """
  77. return ["train_result.json"]
  78. def get_train_outputs(self):
  79. """ get training outputs dir """
  80. return [Path(self.output)]
  81. def init_model_names(self):
  82. """ get models name """
  83. return [self.global_config.model]
  84. def get_save_paths(self):
  85. """ get the path to save train_result.json """
  86. return [
  87. Path(self.output, save_name) for save_name in self.get_save_names()
  88. ]
  89. def init_configs(self):
  90. """ get the init value of config field in result """
  91. return [""] * len(self.init_model_names())
  92. def init_train_log(self):
  93. """ get train log """
  94. return ""
  95. def init_vdl_log(self):
  96. """ get visualdl log """
  97. return ""
  98. def init_model_pkg(self):
  99. """ get model package """
  100. init_content = self.init_model_content()
  101. model_pkg = {}
  102. for pkg in self.get_watched_model():
  103. model_pkg[pkg] = init_content
  104. return model_pkg
  105. def normlize_path(self, dict_obj, relative_to):
  106. """ normlize path to string type path relative to the output """
  107. for key in dict_obj:
  108. if isinstance(dict_obj[key], dict):
  109. self.normlize_path(dict_obj[key], relative_to)
  110. if isinstance(dict_obj[key], Path):
  111. dict_obj[key] = dict_obj[key].resolve().relative_to(
  112. relative_to.resolve()).as_posix()
  113. def save_json(self):
  114. """ save result to json """
  115. for i, result in enumerate(self.results):
  116. self.save_paths[i].parent.mkdir(parents=True, exist_ok=True)
  117. self.normlize_path(result, relative_to=self.save_paths[i].parent)
  118. write_json_file(result, self.save_paths[i], indent=2)
  119. def start(self):
  120. """ start deamon thread """
  121. self.exit = False
  122. self.thread = threading.Thread(target=self.run)
  123. self.thread.daemon = True
  124. self.thread.start()
  125. def stop_hook(self):
  126. """ hook befor stop """
  127. for result in self.results:
  128. result["done_flag"] = True
  129. self.update()
  130. def stop(self):
  131. """ stop self """
  132. self.exit = True
  133. while True:
  134. if not self.processing:
  135. self.stop_hook()
  136. break
  137. time.sleep(60)
  138. def run(self):
  139. """ main function """
  140. while not self.exit:
  141. self.update()
  142. if self.exit:
  143. break
  144. time.sleep(self.update_interval)
  145. def update_train_log(self, train_output):
  146. """ update train log """
  147. train_log_path = train_output / "train.log"
  148. if train_log_path.exists():
  149. return train_log_path
  150. def update_vdl_log(self, train_output):
  151. """ update visualdl log """
  152. vdl_path = list(train_output.glob("vdlrecords*log"))
  153. if len(vdl_path) >= 1:
  154. return vdl_path[0]
  155. def update_label_dict(self, train_output):
  156. """ update label dict """
  157. dict_path = train_output.joinpath("label_dict.txt")
  158. if not dict_path.exists():
  159. return ""
  160. return dict_path
  161. @try_except_decorator
  162. def update(self):
  163. """ update train result json """
  164. self.processing = True
  165. for i in range(len(self.results)):
  166. self.results[i] = self.update_result(self.results[i],
  167. self.train_outputs[i])
  168. self.save_json()
  169. self.processing = False
  170. def get_model(self, model_name, config_path):
  171. """ initialize the model """
  172. if model_name not in self.models:
  173. config, model = build_model(
  174. model_name,
  175. # using CPU to export model
  176. device="cpu",
  177. config_path=config_path)
  178. self.models[model_name] = model
  179. return self.models[model_name]
  180. def get_watched_model(self):
  181. """ get the models needed to be watched """
  182. watched_models = [f"last_{i}" for i in range(1, self.last_k + 1)]
  183. watched_models.append("best")
  184. return watched_models
  185. def init_model_content(self):
  186. """ get model content structure """
  187. return {
  188. "score": "",
  189. "pdparams": "",
  190. "pdema": "",
  191. "pdopt": "",
  192. "pdstates": "",
  193. "inference_config": "",
  194. "pdmodel": "",
  195. "pdiparams": "",
  196. "pdiparams.info": ""
  197. }
  198. def update_result(self, result, train_output):
  199. """ update every result """
  200. train_output = Path(train_output).resolve()
  201. config_path = train_output.joinpath("config.yaml").resolve()
  202. if not config_path.exists():
  203. return result
  204. model_name = result["model_name"]
  205. if model_name in self.config_recorder and self.config_recorder[
  206. model_name] != config_path:
  207. result["models"] = self.init_model_pkg()
  208. result["config"] = config_path
  209. self.config_recorder[model_name] = config_path
  210. result["train_log"] = self.update_train_log(train_output)
  211. result["visualdl_log"] = self.update_vdl_log(train_output)
  212. result["label_dict"] = self.update_label_dict(train_output)
  213. model = self.get_model(result["model_name"], config_path)
  214. params_path_list = list(
  215. train_output.glob(".".join([
  216. self.get_ith_ckp_prefix("[0-9]*"), self.get_the_pdparams_suffix(
  217. )
  218. ])))
  219. epoch_ids = []
  220. for params_path in params_path_list:
  221. epoch_id = self.get_epoch_id_by_pdparams_prefix(params_path.stem)
  222. epoch_ids.append(epoch_id)
  223. epoch_ids.sort()
  224. # TODO(gaotingquan): how to avoid that the latest ckp files is being saved
  225. # epoch_ids = epoch_ids[:-1]
  226. for i in range(1, self.last_k + 1):
  227. if len(epoch_ids) < i:
  228. break
  229. self.update_models(result, model, train_output, f"last_{i}",
  230. self.get_ith_ckp_prefix(epoch_ids[-i]))
  231. self.update_models(result, model, train_output, "best",
  232. self.get_best_ckp_prefix())
  233. return result
  234. def update_models(self, result, model, train_output, model_key, ckp_prefix):
  235. """ update info of the models to be saved """
  236. pdparams = train_output.joinpath(".".join(
  237. [ckp_prefix, self.get_the_pdparams_suffix()]))
  238. if pdparams.exists():
  239. recorder_key = f"{train_output.name}_{model_key}"
  240. if model_key != "best" and recorder_key in self.model_recorder and self.model_recorder[
  241. recorder_key] == pdparams:
  242. return
  243. self.model_recorder[recorder_key] = pdparams
  244. pdema = ""
  245. pdema_suffix = self.get_the_pdema_suffix()
  246. if pdema_suffix:
  247. pdema = pdparams.parent.joinpath(".".join(
  248. [ckp_prefix, pdema_suffix]))
  249. if not pdema.exists():
  250. pdema = ""
  251. pdopt = ""
  252. pdopt_suffix = self.get_the_pdopt_suffix()
  253. if pdopt_suffix:
  254. pdopt = pdparams.parent.joinpath(".".join(
  255. [ckp_prefix, pdopt_suffix]))
  256. if not pdopt.exists():
  257. pdopt = ""
  258. pdstates = ""
  259. pdstates_suffix = self.get_the_pdstates_suffix()
  260. if pdstates_suffix:
  261. pdstates = pdparams.parent.joinpath(".".join(
  262. [ckp_prefix, pdstates_suffix]))
  263. if not pdstates.exists():
  264. pdstates = ""
  265. score = self.get_score(Path(pdstates).resolve().as_posix())
  266. result["models"][model_key] = {
  267. "score": score,
  268. "pdparams": pdparams,
  269. "pdema": pdema,
  270. "pdopt": pdopt,
  271. "pdstates": pdstates
  272. }
  273. self.update_inference_model(model, pdparams,
  274. train_output.joinpath(f"{ckp_prefix}"),
  275. result["models"][model_key])
  276. def update_inference_model(self, model, weight_path, export_save_dir,
  277. result_the_model):
  278. """ update inference model """
  279. export_save_dir.mkdir(parents=True, exist_ok=True)
  280. export_result = model.export(
  281. weight_path=weight_path, save_dir=export_save_dir)
  282. if export_result.returncode == 0:
  283. inference_config = export_save_dir.joinpath("inference.yml")
  284. if not inference_config.exists():
  285. inference_config = ""
  286. use_pir = hasattr(paddle.framework, "use_pir_api") and paddle.framework.use_pir_api()
  287. pdmodel = export_save_dir.joinpath("inference.json") if use_pir else export_save_dir.joinpath("inference.pdmodel")
  288. pdiparams = export_save_dir.joinpath("inference.pdiparams")
  289. pdiparams_info = "" if use_pir else export_save_dir.joinpath("inference.pdiparams.info")
  290. else:
  291. inference_config = ""
  292. pdmodel = ""
  293. pdiparams = ""
  294. pdiparams_info = ""
  295. result_the_model["inference_config"] = inference_config
  296. result_the_model["pdmodel"] = pdmodel
  297. result_the_model["pdiparams"] = pdiparams
  298. result_the_model["pdiparams.info"] = pdiparams_info
  299. def init_pre_hook(self):
  300. """ hook func that would be called befor init """
  301. pass
  302. def init_post_hook(self):
  303. """ hook func that would be called after init """
  304. pass
  305. @abstractmethod
  306. def get_the_pdparams_suffix(self):
  307. """ get the suffix of pdparams file """
  308. raise NotImplementedError
  309. @abstractmethod
  310. def get_the_pdema_suffix(self):
  311. """ get the suffix of pdema file """
  312. raise NotImplementedError
  313. @abstractmethod
  314. def get_the_pdopt_suffix(self):
  315. """ get the suffix of pdopt file """
  316. raise NotImplementedError
  317. @abstractmethod
  318. def get_the_pdstates_suffix(self):
  319. """ get the suffix of pdstates file """
  320. raise NotImplementedError
  321. @abstractmethod
  322. def get_ith_ckp_prefix(self, epoch_id):
  323. """ get the prefix of the epoch_id checkpoint file """
  324. raise NotImplementedError
  325. @abstractmethod
  326. def get_best_ckp_prefix(self):
  327. """ get the prefix of the best checkpoint file """
  328. raise NotImplementedError
  329. @abstractmethod
  330. def get_score(self, pdstates_path):
  331. """ get the score by pdstates file """
  332. raise NotImplementedError
  333. @abstractmethod
  334. def get_epoch_id_by_pdparams_prefix(self, pdparams_prefix):
  335. """ get the epoch_id by pdparams file """
  336. raise NotImplementedError