__init__.py 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172
  1. # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. from __future__ import absolute_import
  15. from __future__ import division
  16. from __future__ import print_function
  17. import copy
  18. import paddle
  19. from paddlex.ppcls.utils import logger
  20. from . import optimizer
  21. __all__ = ['build_optimizer']
  22. def build_lr_scheduler(lr_config, epochs, step_each_epoch):
  23. from . import learning_rate
  24. lr_config.update({'epochs': epochs, 'step_each_epoch': step_each_epoch})
  25. if 'name' in lr_config:
  26. lr_name = lr_config.pop('name')
  27. lr = getattr(learning_rate, lr_name)(**lr_config)
  28. if isinstance(lr, paddle.optimizer.lr.LRScheduler):
  29. return lr
  30. else:
  31. return lr()
  32. else:
  33. lr = lr_config['learning_rate']
  34. return lr
  35. # model_list is None in static graph
  36. def build_optimizer(config, epochs, step_each_epoch, model_list=None):
  37. config = copy.deepcopy(config)
  38. # step1 build lr
  39. lr = build_lr_scheduler(config.pop('lr'), epochs, step_each_epoch)
  40. logger.debug("build lr ({}) success..".format(lr))
  41. # step2 build regularization
  42. if 'regularizer' in config and config['regularizer'] is not None:
  43. if 'weight_decay' in config:
  44. logger.warning(
  45. "ConfigError: Only one of regularizer and weight_decay can be set in Optimizer Config. \"weight_decay\" has been ignored."
  46. )
  47. reg_config = config.pop('regularizer')
  48. reg_name = reg_config.pop('name') + 'Decay'
  49. reg = getattr(paddle.regularizer, reg_name)(**reg_config)
  50. config["weight_decay"] = reg
  51. logger.debug("build regularizer ({}) success..".format(reg))
  52. # step3 build optimizer
  53. optim_name = config.pop('name')
  54. if 'clip_norm' in config:
  55. clip_norm = config.pop('clip_norm')
  56. grad_clip = paddle.nn.ClipGradByNorm(clip_norm=clip_norm)
  57. else:
  58. grad_clip = None
  59. optim = getattr(optimizer, optim_name)(learning_rate=lr,
  60. grad_clip=grad_clip,
  61. **config)(model_list=model_list)
  62. logger.debug("build optimizer ({}) success..".format(optim))
  63. return optim, lr