__init__.py 3.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119
  1. # Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import os
  15. import os.path as osp
  16. from pathlib import Path
  17. from ...base import BaseDatasetChecker
  18. from ..model_list import MODELS
  19. from .dataset_src import anaylse_dataset, check_dataset, convert_dataset, split_dataset
  20. class SegDatasetChecker(BaseDatasetChecker):
  21. """Dataset Checker for Semantic Segmentation Model"""
  22. entities = MODELS
  23. sample_num = 10
  24. def get_dataset_root(self, dataset_dir: str) -> str:
  25. """find the dataset root dir
  26. Args:
  27. dataset_dir (str): the directory that contain dataset.
  28. Returns:
  29. str: the root directory of dataset.
  30. """
  31. anno_dirs = list(Path(dataset_dir).glob("**/images"))
  32. if len(anno_dirs) == 1:
  33. dataset_dir = anno_dirs[0].parent.as_posix()
  34. elif len(anno_dirs) == 0:
  35. dataset_dir = Path(dataset_dir)
  36. else:
  37. raise ValueError(
  38. f"Segmentation Dataset Format Error: We currently only support `PaddleX` and `Labelme` formats. "
  39. f"For `PaddleX` format, your dataset root must contain exactly one `images` directory. "
  40. f"For `Labelme` format, your dataset root must contain no `images` directories. "
  41. f"However, your dataset root contains {len(anno_dirs)} `images` directories. "
  42. f"Please adjust your dataset structure to comply with the supported formats."
  43. )
  44. return dataset_dir
  45. def convert_dataset(self, src_dataset_dir: str) -> str:
  46. """convert the dataset from other type to specified type
  47. Args:
  48. src_dataset_dir (str): the root directory of dataset.
  49. Returns:
  50. str: the root directory of converted dataset.
  51. """
  52. return convert_dataset(
  53. self.check_dataset_config.convert.src_dataset_type, src_dataset_dir
  54. )
  55. def split_dataset(self, src_dataset_dir: str) -> str:
  56. """repartition the train and validation dataset
  57. Args:
  58. src_dataset_dir (str): the root directory of dataset.
  59. Returns:
  60. str: the root directory of splited dataset.
  61. """
  62. return split_dataset(
  63. src_dataset_dir,
  64. self.check_dataset_config.split.train_percent,
  65. self.check_dataset_config.split.val_percent,
  66. )
  67. def check_dataset(self, dataset_dir: str, sample_num: int = sample_num) -> dict:
  68. """check if the dataset meets the specifications and get dataset summary
  69. Args:
  70. dataset_dir (str): the root directory of dataset.
  71. sample_num (int): the number to be sampled.
  72. Returns:
  73. dict: dataset summary.
  74. """
  75. return check_dataset(dataset_dir, self.output, sample_num)
  76. def analyse(self, dataset_dir: str) -> dict:
  77. """deep analyse dataset
  78. Args:
  79. dataset_dir (str): the root directory of dataset.
  80. Returns:
  81. dict: the deep analysis results.
  82. """
  83. return anaylse_dataset(dataset_dir, self.output)
  84. def get_show_type(self) -> str:
  85. """get the show type of dataset
  86. Returns:
  87. str: show type
  88. """
  89. return "image"
  90. def get_dataset_type(self) -> str:
  91. """return the dataset type
  92. Returns:
  93. str: dataset type
  94. """
  95. return "SegDataset"