trainer.py 4.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111
  1. # copyright (c) 2024 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import os
  15. import shutil
  16. from pathlib import Path
  17. from ..base import BaseTrainer
  18. from ...utils.config import AttrDict
  19. from .model_list import MODELS
  20. class FormulaRecTrainer(BaseTrainer):
  21. """Text Recognition Model Trainer"""
  22. entities = MODELS
  23. def dump_label_dict(self, src_label_dict_path: str):
  24. """dump label dict config
  25. Args:
  26. src_label_dict_path (str): path to label dict file to be saved.
  27. """
  28. dst_label_dict_path = Path(self.global_config.output).joinpath("label_dict.txt")
  29. shutil.copyfile(src_label_dict_path, dst_label_dict_path)
  30. def update_config(self):
  31. """update training config"""
  32. if self.train_config.log_interval:
  33. self.pdx_config.update_log_interval(self.train_config.log_interval)
  34. if self.train_config.eval_interval:
  35. self.pdx_config._update_eval_interval_by_epoch(
  36. self.train_config.eval_interval
  37. )
  38. if self.train_config.save_interval:
  39. self.pdx_config.update_save_interval(self.train_config.save_interval)
  40. if self.global_config["model"] == "LaTeX_OCR_rec":
  41. self.pdx_config.update_dataset(
  42. self.global_config.dataset_dir, "LaTeXOCRDataSet"
  43. )
  44. elif self.global_config["model"] in (
  45. "UniMERNet",
  46. "PP-FormulaNet-L",
  47. "PP-FormulaNet-S",
  48. ):
  49. self.pdx_config.update_dataset(
  50. self.global_config.dataset_dir, "SimpleDataSet"
  51. )
  52. label_dict_path = Path(self.global_config.dataset_dir).joinpath("dict.txt")
  53. if label_dict_path.exists():
  54. self.pdx_config.update_label_dict_path(label_dict_path)
  55. self.dump_label_dict(label_dict_path)
  56. if self.train_config.pretrain_weight_path:
  57. self.pdx_config.update_pretrained_weights(
  58. self.train_config.pretrain_weight_path
  59. )
  60. if self.global_config["model"] == "LaTeX_OCR_rec":
  61. if (
  62. self.train_config.batch_size_train is not None
  63. and self.train_config.batch_size_val is not None
  64. ):
  65. self.pdx_config.update_batch_size_pair(
  66. self.train_config.batch_size_train, self.train_config.batch_size_val
  67. )
  68. else:
  69. if (
  70. self.train_config.batch_size_train is not None
  71. and self.train_config.batch_size_val is not None
  72. ):
  73. self.pdx_config.update_batch_size(
  74. self.train_config.batch_size_train, self.train_config.batch_size_val
  75. )
  76. if self.train_config.learning_rate is not None:
  77. self.pdx_config.update_learning_rate(self.train_config.learning_rate)
  78. if self.train_config.epochs_iters is not None:
  79. self.pdx_config._update_epochs(self.train_config.epochs_iters)
  80. if (
  81. self.train_config.resume_path is not None
  82. and self.train_config.resume_path != ""
  83. ):
  84. self.pdx_config._update_checkpoints(self.train_config.resume_path)
  85. if self.global_config.output is not None:
  86. self.pdx_config._update_output_dir(self.global_config.output)
  87. def get_train_kwargs(self) -> dict:
  88. """get key-value arguments of model training function
  89. Returns:
  90. dict: the arguments of training function.
  91. """
  92. return {
  93. "device": self.get_device(),
  94. "dy2st": self.train_config.get("dy2st", False),
  95. }