register.py 1.9 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859
  1. # copyright (c) 2024 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import os
  15. import os.path as osp
  16. from ...base.register import register_model_info, register_suite_info
  17. from ..ts_base.model import TSModel
  18. from .runner import TSCLSRunner
  19. from .config import TSClassifyConfig
  20. REPO_ROOT_PATH = os.environ.get("PADDLE_PDX_PADDLETS_PATH")
  21. PDX_CONFIG_DIR = osp.abspath(osp.join(osp.dirname(__file__), "..", "configs"))
  22. register_suite_info(
  23. {
  24. "suite_name": "TSClassify",
  25. "model": TSModel,
  26. "runner": TSCLSRunner,
  27. "config": TSClassifyConfig,
  28. "runner_root_path": REPO_ROOT_PATH,
  29. }
  30. )
  31. ################ Models Using Universal Config ################
  32. # timesnet
  33. TimesNetCLS_CFG_PATH = osp.join(PDX_CONFIG_DIR, "TimesNet_cls.yaml")
  34. register_model_info(
  35. {
  36. "model_name": "TimesNet_cls",
  37. "suite": "TSClassify",
  38. "config_path": TimesNetCLS_CFG_PATH,
  39. "supported_apis": ["train", "evaluate", "predict", "export"],
  40. "supported_train_opts": {
  41. "device": ["cpu", "gpu_n1cx", "xpu", "npu", "mlu"],
  42. "dy2st": False,
  43. "amp": [],
  44. },
  45. "supported_evaluate_opts": {
  46. "device": ["cpu", "gpu_n1cx", "xpu", "npu", "mlu"],
  47. "amp": [],
  48. },
  49. "supported_predict_opts": {"device": ["cpu", "gpu", "xpu", "npu", "mlu"]},
  50. "supported_infer_opts": {"device": ["cpu", "gpu", "xpu", "npu", "mlu"]},
  51. }
  52. )