trainer.py 4.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113
  1. # Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import os
  15. import tarfile
  16. from pathlib import Path
  17. from ...utils.flags import FLAGS_json_format_model
  18. from ..base import BaseTrainer
  19. from .model_list import MODELS
  20. class TSADTrainer(BaseTrainer):
  21. """TS Anomaly Detection Model Trainer"""
  22. entities = MODELS
  23. def train(self):
  24. """firstly, update and dump train config, then train model"""
  25. # XXX: using super().train() instead when the train_hook() is supported.
  26. os.makedirs(self.global_config.output, exist_ok=True)
  27. self.update_config()
  28. self.dump_config()
  29. train_args = self.get_train_kwargs()
  30. export_with_pir = (
  31. self.global_config.get("export_with_pir", False) or FLAGS_json_format_model
  32. )
  33. train_args.update(
  34. {
  35. "uniform_output_enabled": self.train_config.get(
  36. "uniform_output_enabled", True
  37. ),
  38. "export_with_pir": export_with_pir,
  39. }
  40. )
  41. if self.benchmark_config is not None:
  42. train_args.update({"benchmark": self.benchmark_config})
  43. train_result = self.pdx_model.train(**train_args)
  44. assert (
  45. train_result.returncode == 0
  46. ), f"Encountered an unexpected error({train_result.returncode}) in \
  47. training!"
  48. self.make_tar_file()
  49. def make_tar_file(self):
  50. """make tar file to package the training outputs"""
  51. tar_path = Path(self.global_config.output) / "best_accuracy.pdparams.tar"
  52. with tarfile.open(tar_path, "w") as tar:
  53. tar.add(self.global_config.output, arcname="best_accuracy.pdparams")
  54. def update_config(self):
  55. """update training config"""
  56. self.pdx_config.update_dataset(self.global_config.dataset_dir, "TSADDataset")
  57. if self.train_config.input_len is not None:
  58. self.pdx_config.update_input_len(self.train_config.input_len)
  59. if self.train_config.time_col is not None:
  60. self.pdx_config.update_basic_info({"time_col": self.train_config.time_col})
  61. if self.train_config.feature_cols is not None:
  62. if isinstance(self.train_config.feature_cols, tuple):
  63. feature_cols = [str(item) for item in self.train_config.feature_cols]
  64. self.pdx_config.update_basic_info({"feature_cols": feature_cols})
  65. else:
  66. self.pdx_config.update_basic_info(
  67. {"feature_cols": self.train_config.feature_cols.split(",")}
  68. )
  69. if self.train_config.label_col is not None:
  70. self.pdx_config.update_basic_info(
  71. {"label_col": self.train_config.label_col}
  72. )
  73. if self.train_config.freq is not None:
  74. try:
  75. self.train_config.freq = int(self.train_config.freq)
  76. except ValueError:
  77. pass
  78. self.pdx_config.update_basic_info({"freq": self.train_config.freq})
  79. if self.train_config.batch_size is not None:
  80. self.pdx_config.update_batch_size(self.train_config.batch_size)
  81. if self.train_config.learning_rate is not None:
  82. self.pdx_config.update_learning_rate(self.train_config.learning_rate)
  83. if self.train_config.epochs_iters is not None:
  84. self.pdx_config.update_epochs(self.train_config.epochs_iters)
  85. if self.train_config.get("dy2st", False):
  86. self.pdx_config.update_to_static(self.train_config.dy2st)
  87. if self.train_config.log_interval is not None:
  88. self.pdx_config.update_log_interval(self.train_config.log_interval)
  89. if self.global_config.output is not None:
  90. self.pdx_config.update_save_dir(self.global_config.output)
  91. def get_train_kwargs(self) -> dict:
  92. """get key-value arguments of model training function
  93. Returns:
  94. dict: the arguments of training function.
  95. """
  96. train_args = {"device": self.get_device(using_device_number=1)}
  97. if self.global_config.output is not None:
  98. train_args["save_dir"] = self.global_config.output
  99. # amp support 'O1', 'O2', 'OFF'
  100. train_args["amp"] = self.train_config.get("amp", "OFF")
  101. return train_args