detection.py 1.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748
  1. # copytrue (c) 2020 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import os.path as osp
  15. def prune(best_model_path, dataset_path, sensitivities_path, batch_size):
  16. import paddlex as pdx
  17. model = pdx.load_model(best_model_path)
  18. # build coco dataset
  19. if osp.exists(osp.join(dataset_path, 'JPEGImages')) and \
  20. osp.exists(osp.join(dataset_path, 'train.json')) and \
  21. osp.exists(osp.join(dataset_path, 'val.json')):
  22. data_dir = osp.join(dataset_path, 'JPEGImages')
  23. eval_ann_file = osp.join(dataset_path, 'val.json')
  24. eval_dataset = pdx.datasets.CocoDetection(
  25. data_dir=data_dir,
  26. ann_file=eval_ann_file,
  27. transforms=model.test_transforms)
  28. # build voc
  29. elif osp.exists(osp.join(dataset_path, 'train_list.txt')) and \
  30. osp.exists(osp.join(dataset_path, 'val_list.txt')) and \
  31. osp.exists(osp.join(dataset_path, 'labels.txt')):
  32. eval_file_list = osp.join(dataset_path, 'val_list.txt')
  33. label_list = osp.join(dataset_path, 'labels.txt')
  34. eval_dataset = pdx.datasets.VOCDetection(
  35. data_dir=dataset_path,
  36. file_list=eval_file_list,
  37. label_list=label_list,
  38. transforms=model.test_transforms)
  39. model.analyze_sensitivity(
  40. dataset=eval_dataset,
  41. batch_size=batch_size,
  42. criterion='l1_norm',
  43. save_dir=sensitivities_path)