train_deamon.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403
  1. # copyright (c) 2024 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import os
  15. import sys
  16. import time
  17. import json
  18. import traceback
  19. import threading
  20. from abc import ABC, abstractmethod
  21. from pathlib import Path
  22. import paddle
  23. from ..build_model import build_model
  24. from ....utils.file_interface import write_json_file
  25. from ....utils import logging
  26. def try_except_decorator(func):
  27. """ try-except """
  28. def wrap(self, *args, **kwargs):
  29. try:
  30. func(self, *args, **kwargs)
  31. except Exception as e:
  32. exc_type, exc_value, exc_tb = sys.exc_info()
  33. self.save_json()
  34. traceback.print_exception(exc_type, exc_value, exc_tb)
  35. finally:
  36. self.processing = False
  37. return wrap
  38. class BaseTrainDeamon(ABC):
  39. """ BaseTrainResultDemon """
  40. update_interval = 600
  41. last_k = 5
  42. def __init__(self, config):
  43. """ init """
  44. self.global_config = config.Global
  45. self.disable_deamon = config.get("Benchmark", {}).get(
  46. "disable_deamon", False)
  47. self.init_pre_hook()
  48. self.output = self.global_config.output
  49. self.train_outputs = self.get_train_outputs()
  50. self.save_paths = self.get_save_paths()
  51. self.results = self.init_train_result()
  52. self.save_json()
  53. self.models = {}
  54. self.init_post_hook()
  55. self.config_recorder = {}
  56. self.model_recorder = {}
  57. self.processing = False
  58. self.start()
  59. def init_train_result(self):
  60. """ init train result structure """
  61. model_names = self.init_model_names()
  62. configs = self.init_configs()
  63. train_log = self.init_train_log()
  64. vdl = self.init_vdl_log()
  65. results = []
  66. for i, model_name in enumerate(model_names):
  67. results.append({
  68. "model_name": model_name,
  69. "done_flag": False,
  70. "config": configs[i],
  71. "label_dict": "",
  72. "train_log": train_log,
  73. "visualdl_log": vdl,
  74. "models": self.init_model_pkg()
  75. })
  76. return results
  77. def get_save_names(self):
  78. """ get names to save """
  79. return ["train_result.json"]
  80. def get_train_outputs(self):
  81. """ get training outputs dir """
  82. return [Path(self.output)]
  83. def init_model_names(self):
  84. """ get models name """
  85. return [self.global_config.model]
  86. def get_save_paths(self):
  87. """ get the path to save train_result.json """
  88. return [
  89. Path(self.output, save_name) for save_name in self.get_save_names()
  90. ]
  91. def init_configs(self):
  92. """ get the init value of config field in result """
  93. return [""] * len(self.init_model_names())
  94. def init_train_log(self):
  95. """ get train log """
  96. return ""
  97. def init_vdl_log(self):
  98. """ get visualdl log """
  99. return ""
  100. def init_model_pkg(self):
  101. """ get model package """
  102. init_content = self.init_model_content()
  103. model_pkg = {}
  104. for pkg in self.get_watched_model():
  105. model_pkg[pkg] = init_content
  106. return model_pkg
  107. def normlize_path(self, dict_obj, relative_to):
  108. """ normlize path to string type path relative to the output """
  109. for key in dict_obj:
  110. if isinstance(dict_obj[key], dict):
  111. self.normlize_path(dict_obj[key], relative_to)
  112. if isinstance(dict_obj[key], Path):
  113. dict_obj[key] = dict_obj[key].resolve().relative_to(
  114. relative_to.resolve()).as_posix()
  115. def save_json(self):
  116. """ save result to json """
  117. for i, result in enumerate(self.results):
  118. self.save_paths[i].parent.mkdir(parents=True, exist_ok=True)
  119. self.normlize_path(result, relative_to=self.save_paths[i].parent)
  120. write_json_file(result, self.save_paths[i], indent=2)
  121. def start(self):
  122. """ start deamon thread """
  123. self.exit = False
  124. self.thread = threading.Thread(target=self.run)
  125. self.thread.daemon = True
  126. if not self.disable_deamon:
  127. self.thread.start()
  128. def stop_hook(self):
  129. """ hook befor stop """
  130. for result in self.results:
  131. result["done_flag"] = True
  132. self.update()
  133. def stop(self):
  134. """ stop self """
  135. self.exit = True
  136. while True:
  137. if not self.processing:
  138. self.stop_hook()
  139. break
  140. time.sleep(60)
  141. def run(self):
  142. """ main function """
  143. while not self.exit:
  144. self.update()
  145. if self.exit:
  146. break
  147. time.sleep(self.update_interval)
  148. def update_train_log(self, train_output):
  149. """ update train log """
  150. train_log_path = train_output / "train.log"
  151. if train_log_path.exists():
  152. return train_log_path
  153. def update_vdl_log(self, train_output):
  154. """ update visualdl log """
  155. vdl_path = list(train_output.glob("vdlrecords*log"))
  156. if len(vdl_path) >= 1:
  157. return vdl_path[0]
  158. def update_label_dict(self, train_output):
  159. """ update label dict """
  160. dict_path = train_output.joinpath("label_dict.txt")
  161. if not dict_path.exists():
  162. return ""
  163. return dict_path
  164. @try_except_decorator
  165. def update(self):
  166. """ update train result json """
  167. self.processing = True
  168. for i in range(len(self.results)):
  169. self.results[i] = self.update_result(self.results[i],
  170. self.train_outputs[i])
  171. self.save_json()
  172. self.processing = False
  173. def get_model(self, model_name, config_path):
  174. """ initialize the model """
  175. if model_name not in self.models:
  176. config, model = build_model(
  177. model_name,
  178. # using CPU to export model
  179. device="cpu",
  180. config_path=config_path)
  181. self.models[model_name] = model
  182. return self.models[model_name]
  183. def get_watched_model(self):
  184. """ get the models needed to be watched """
  185. watched_models = [f"last_{i}" for i in range(1, self.last_k + 1)]
  186. watched_models.append("best")
  187. return watched_models
  188. def init_model_content(self):
  189. """ get model content structure """
  190. return {
  191. "score": "",
  192. "pdparams": "",
  193. "pdema": "",
  194. "pdopt": "",
  195. "pdstates": "",
  196. "inference_config": "",
  197. "pdmodel": "",
  198. "pdiparams": "",
  199. "pdiparams.info": ""
  200. }
  201. def update_result(self, result, train_output):
  202. """ update every result """
  203. train_output = Path(train_output).resolve()
  204. config_path = train_output.joinpath("config.yaml").resolve()
  205. if not config_path.exists():
  206. return result
  207. model_name = result["model_name"]
  208. if model_name in self.config_recorder and self.config_recorder[
  209. model_name] != config_path:
  210. result["models"] = self.init_model_pkg()
  211. result["config"] = config_path
  212. self.config_recorder[model_name] = config_path
  213. result["train_log"] = self.update_train_log(train_output)
  214. result["visualdl_log"] = self.update_vdl_log(train_output)
  215. result["label_dict"] = self.update_label_dict(train_output)
  216. model = self.get_model(result["model_name"], config_path)
  217. params_path_list = list(
  218. train_output.glob(".".join([
  219. self.get_ith_ckp_prefix("[0-9]*"), self.get_the_pdparams_suffix(
  220. )
  221. ])))
  222. epoch_ids = []
  223. for params_path in params_path_list:
  224. epoch_id = self.get_epoch_id_by_pdparams_prefix(params_path.stem)
  225. epoch_ids.append(epoch_id)
  226. epoch_ids.sort()
  227. # TODO(gaotingquan): how to avoid that the latest ckp files is being saved
  228. # epoch_ids = epoch_ids[:-1]
  229. for i in range(1, self.last_k + 1):
  230. if len(epoch_ids) < i:
  231. break
  232. self.update_models(result, model, train_output, f"last_{i}",
  233. self.get_ith_ckp_prefix(epoch_ids[-i]))
  234. self.update_models(result, model, train_output, "best",
  235. self.get_best_ckp_prefix())
  236. return result
  237. def update_models(self, result, model, train_output, model_key, ckp_prefix):
  238. """ update info of the models to be saved """
  239. pdparams = train_output.joinpath(".".join(
  240. [ckp_prefix, self.get_the_pdparams_suffix()]))
  241. if pdparams.exists():
  242. recorder_key = f"{train_output.name}_{model_key}"
  243. if model_key != "best" and recorder_key in self.model_recorder and self.model_recorder[
  244. recorder_key] == pdparams:
  245. return
  246. self.model_recorder[recorder_key] = pdparams
  247. pdema = ""
  248. pdema_suffix = self.get_the_pdema_suffix()
  249. if pdema_suffix:
  250. pdema = pdparams.parent.joinpath(".".join(
  251. [ckp_prefix, pdema_suffix]))
  252. if not pdema.exists():
  253. pdema = ""
  254. pdopt = ""
  255. pdopt_suffix = self.get_the_pdopt_suffix()
  256. if pdopt_suffix:
  257. pdopt = pdparams.parent.joinpath(".".join(
  258. [ckp_prefix, pdopt_suffix]))
  259. if not pdopt.exists():
  260. pdopt = ""
  261. pdstates = ""
  262. pdstates_suffix = self.get_the_pdstates_suffix()
  263. if pdstates_suffix:
  264. pdstates = pdparams.parent.joinpath(".".join(
  265. [ckp_prefix, pdstates_suffix]))
  266. if not pdstates.exists():
  267. pdstates = ""
  268. score = self.get_score(Path(pdstates).resolve().as_posix())
  269. result["models"][model_key] = {
  270. "score": score,
  271. "pdparams": pdparams,
  272. "pdema": pdema,
  273. "pdopt": pdopt,
  274. "pdstates": pdstates
  275. }
  276. self.update_inference_model(model, pdparams,
  277. train_output.joinpath(f"{ckp_prefix}"),
  278. result["models"][model_key])
  279. def update_inference_model(self, model, weight_path, export_save_dir,
  280. result_the_model):
  281. """ update inference model """
  282. export_save_dir.mkdir(parents=True, exist_ok=True)
  283. export_result = model.export(
  284. weight_path=weight_path, save_dir=export_save_dir)
  285. if export_result.returncode == 0:
  286. inference_config = export_save_dir.joinpath("inference.yml")
  287. if not inference_config.exists():
  288. inference_config = ""
  289. use_pir = hasattr(paddle.framework,
  290. "use_pir_api") and paddle.framework.use_pir_api()
  291. pdmodel = export_save_dir.joinpath(
  292. "inference.json") if use_pir else export_save_dir.joinpath(
  293. "inference.pdmodel")
  294. pdiparams = export_save_dir.joinpath("inference.pdiparams")
  295. pdiparams_info = "" if use_pir else export_save_dir.joinpath(
  296. "inference.pdiparams.info")
  297. else:
  298. inference_config = ""
  299. pdmodel = ""
  300. pdiparams = ""
  301. pdiparams_info = ""
  302. result_the_model["inference_config"] = inference_config
  303. result_the_model["pdmodel"] = pdmodel
  304. result_the_model["pdiparams"] = pdiparams
  305. result_the_model["pdiparams.info"] = pdiparams_info
  306. def init_pre_hook(self):
  307. """ hook func that would be called befor init """
  308. pass
  309. def init_post_hook(self):
  310. """ hook func that would be called after init """
  311. pass
  312. @abstractmethod
  313. def get_the_pdparams_suffix(self):
  314. """ get the suffix of pdparams file """
  315. raise NotImplementedError
  316. @abstractmethod
  317. def get_the_pdema_suffix(self):
  318. """ get the suffix of pdema file """
  319. raise NotImplementedError
  320. @abstractmethod
  321. def get_the_pdopt_suffix(self):
  322. """ get the suffix of pdopt file """
  323. raise NotImplementedError
  324. @abstractmethod
  325. def get_the_pdstates_suffix(self):
  326. """ get the suffix of pdstates file """
  327. raise NotImplementedError
  328. @abstractmethod
  329. def get_ith_ckp_prefix(self, epoch_id):
  330. """ get the prefix of the epoch_id checkpoint file """
  331. raise NotImplementedError
  332. @abstractmethod
  333. def get_best_ckp_prefix(self):
  334. """ get the prefix of the best checkpoint file """
  335. raise NotImplementedError
  336. @abstractmethod
  337. def get_score(self, pdstates_path):
  338. """ get the score by pdstates file """
  339. raise NotImplementedError
  340. @abstractmethod
  341. def get_epoch_id_by_pdparams_prefix(self, pdparams_prefix):
  342. """ get the epoch_id by pdparams file """
  343. raise NotImplementedError