instance_segmentation.py 3.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687
  1. # Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. from typing import Any, Dict, List
  15. import numpy as np
  16. from .....utils.deps import function_requires_deps, is_dep_available
  17. from ...infra import utils as serving_utils
  18. from ...infra.config import AppConfig
  19. from ...infra.models import AIStudioResultResponse
  20. from ...schemas.instance_segmentation import INFER_ENDPOINT, InferRequest, InferResult
  21. from .._app import create_app, primary_operation
  22. if is_dep_available("fastapi"):
  23. from fastapi import FastAPI
  24. if is_dep_available("pycocotools"):
  25. import pycocotools.mask as mask_util
  26. @function_requires_deps("pycocotools")
  27. def _rle(mask: np.ndarray) -> str:
  28. rle_res = mask_util.encode(np.asarray(mask[..., None], order="F", dtype="uint8"))[0]
  29. return rle_res["counts"].decode("utf-8")
  30. @function_requires_deps("fastapi")
  31. def create_pipeline_app(pipeline: Any, app_config: AppConfig) -> "FastAPI":
  32. app, ctx = create_app(
  33. pipeline=pipeline,
  34. app_config=app_config,
  35. app_aiohttp_session=True,
  36. )
  37. @primary_operation(
  38. app,
  39. INFER_ENDPOINT,
  40. "infer",
  41. )
  42. async def _infer(request: InferRequest) -> AIStudioResultResponse[InferResult]:
  43. pipeline = ctx.pipeline
  44. aiohttp_session = ctx.aiohttp_session
  45. file_bytes = await serving_utils.get_raw_bytes_async(
  46. request.image, aiohttp_session
  47. )
  48. image = serving_utils.image_bytes_to_array(file_bytes)
  49. result = (await pipeline.infer(image, threshold=request.threshold))[0]
  50. instances: List[Dict[str, Any]] = []
  51. for obj, mask in zip(result["boxes"], result["masks"]):
  52. rle_res = _rle(mask)
  53. mask = dict(rleResult=rle_res, size=mask.shape)
  54. instances.append(
  55. dict(
  56. bbox=obj["coordinate"],
  57. categoryId=obj["cls_id"],
  58. categoryName=obj["label"],
  59. score=obj["score"],
  60. mask=mask,
  61. )
  62. )
  63. if ctx.config.visualize:
  64. output_image_base64 = serving_utils.base64_encode(
  65. serving_utils.image_to_bytes(result.img["res"])
  66. )
  67. else:
  68. output_image_base64 = None
  69. return AIStudioResultResponse[InferResult](
  70. logId=serving_utils.generate_log_id(),
  71. result=InferResult(instances=instances, image=output_image_base64),
  72. )
  73. return app