seal_recognition.py 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112
  1. # copyright (c) 2024 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. from typing import List, Optional
  15. from fastapi import FastAPI, HTTPException
  16. from pydantic import BaseModel, Field
  17. from typing_extensions import Annotated, TypeAlias
  18. from .....utils import logging
  19. from ...seal_recognition import SealOCRPipeline
  20. from .. import utils as serving_utils
  21. from ..app import AppConfig, create_app
  22. from ..models import Response, ResultResponse
  23. class InferenceParams(BaseModel):
  24. maxLongSide: Optional[Annotated[int, Field(gt=0)]] = None
  25. class InferRequest(BaseModel):
  26. image: str
  27. inferenceParams: Optional[InferenceParams] = None
  28. Point: TypeAlias = Annotated[List[int], Field(min_length=2, max_length=2)]
  29. Polygon: TypeAlias = Annotated[List[Point], Field(min_length=3)]
  30. class Text(BaseModel):
  31. poly: Polygon
  32. text: str
  33. score: float
  34. class SealImpression(BaseModel):
  35. texts: List[Text]
  36. class InferResult(BaseModel):
  37. sealImpressions: List[SealImpression]
  38. layoutImage: str
  39. def create_pipeline_app(pipeline: SealOCRPipeline, app_config: AppConfig) -> FastAPI:
  40. app, ctx = create_app(
  41. pipeline=pipeline, app_config=app_config, app_aiohttp_session=True
  42. )
  43. @app.post(
  44. "/seal-recognition", operation_id="infer", responses={422: {"model": Response}}
  45. )
  46. async def _infer(request: InferRequest) -> ResultResponse[InferResult]:
  47. pipeline = ctx.pipeline
  48. aiohttp_session = ctx.aiohttp_session
  49. if request.inferenceParams:
  50. max_long_side = request.inferenceParams.maxLongSide
  51. if max_long_side:
  52. raise HTTPException(
  53. status_code=422,
  54. detail="`max_long_side` is currently not supported.",
  55. )
  56. try:
  57. file_bytes = await serving_utils.get_raw_bytes(
  58. request.image, aiohttp_session
  59. )
  60. image = serving_utils.image_bytes_to_array(file_bytes)
  61. result = (await pipeline.infer(image))[0]
  62. seal_impressions: List[SealImpression] = []
  63. for item in result["ocr_result"]:
  64. texts: List[Text] = []
  65. for poly, text, score in zip(
  66. item["dt_polys"], item["rec_text"], item["rec_score"]
  67. ):
  68. texts.append(Text(poly=poly, text=text, score=score))
  69. seal_impressions.append(SealImpression(texts=texts))
  70. layout_image_base64 = serving_utils.image_to_base64(
  71. result["layout_result"].img
  72. )
  73. # TODO: OCR image
  74. return ResultResponse(
  75. logId=serving_utils.generate_log_id(),
  76. errorCode=0,
  77. errorMsg="Success",
  78. result=InferResult(
  79. sealImpressions=seal_impressions,
  80. layoutImage=layout_image_base64,
  81. ),
  82. )
  83. except Exception as e:
  84. logging.exception(e)
  85. raise HTTPException(status_code=500, detail="Internal server error")
  86. return app