__init__.py 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109
  1. # copyright (c) 2024 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import os
  15. import os.path as osp
  16. from pathlib import Path
  17. from collections import defaultdict, Counter
  18. import json
  19. from ...base import BaseDatasetChecker
  20. from .dataset_src import check, split_dataset, deep_analyse
  21. from ..model_list import MODELS
  22. class TextDetDatasetChecker(BaseDatasetChecker):
  23. """Dataset Checker for Text Detection Model"""
  24. entities = MODELS
  25. def get_dataset_root(self, dataset_dir: str) -> str:
  26. """find the dataset root dir
  27. Args:
  28. dataset_dir (str): the directory that contain dataset.
  29. Returns:
  30. str: the root directory of dataset.
  31. """
  32. anno_dirs = list(Path(dataset_dir).glob("**/images"))
  33. assert len(anno_dirs) == 1
  34. dataset_dir = anno_dirs[0].parent.as_posix()
  35. return dataset_dir
  36. def convert_dataset(self, src_dataset_dir: str) -> str:
  37. """convert the dataset from other type to specified type
  38. Args:
  39. src_dataset_dir (str): the root directory of dataset.
  40. Returns:
  41. str: the root directory of converted dataset.
  42. """
  43. return src_dataset_dir
  44. def split_dataset(self, src_dataset_dir: str) -> str:
  45. """repartition the train and validation dataset
  46. Args:
  47. src_dataset_dir (str): the root directory of dataset.
  48. Returns:
  49. str: the root directory of splited dataset.
  50. """
  51. return split_dataset(
  52. src_dataset_dir,
  53. self.check_dataset_config.split.train_percent,
  54. self.check_dataset_config.split.val_percent,
  55. )
  56. def check_dataset(self, dataset_dir: str) -> dict:
  57. """check if the dataset meets the specifications and get dataset summary
  58. Args:
  59. dataset_dir (str): the root directory of dataset.
  60. Returns:
  61. dict: dataset summary.
  62. """
  63. return check(dataset_dir, self.output, sample_num=10)
  64. def analyse(self, dataset_dir: str) -> dict:
  65. """deep analyse dataset
  66. Args:
  67. dataset_dir (str): the root directory of dataset.
  68. Returns:
  69. dict: the deep analysis results.
  70. """
  71. return deep_analyse(dataset_dir, self.output)
  72. def get_show_type(self) -> str:
  73. """get the show type of dataset
  74. Returns:
  75. str: show type
  76. """
  77. return "image"
  78. def get_dataset_type(self) -> str:
  79. """return the dataset type
  80. Returns:
  81. str: dataset type
  82. """
  83. return "TextDetDataset"