image_classification.py 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899
  1. # copyright (c) 2024 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. from itertools import islice
  15. from typing import List, Optional
  16. from fastapi import FastAPI, HTTPException
  17. from pydantic import BaseModel, Field
  18. from typing_extensions import Annotated
  19. from .....utils import logging
  20. from ...single_model_pipeline import ImageClassification
  21. from .. import utils as serving_utils
  22. from ..app import AppConfig, create_app
  23. from ..models import Response, ResultResponse
  24. class InferenceParams(BaseModel):
  25. topK: Optional[Annotated[int, Field(gt=0)]] = None
  26. class InferRequest(BaseModel):
  27. image: str
  28. inferenceParams: Optional[InferenceParams] = None
  29. class Category(BaseModel):
  30. id: int
  31. name: str
  32. score: float
  33. class InferResult(BaseModel):
  34. categories: List[Category]
  35. image: str
  36. def create_pipeline_app(
  37. pipeline: ImageClassification, app_config: AppConfig
  38. ) -> FastAPI:
  39. app, ctx = create_app(
  40. pipeline=pipeline, app_config=app_config, app_aiohttp_session=True
  41. )
  42. @app.post(
  43. "/image-classification",
  44. operation_id="infer",
  45. responses={422: {"model": Response}},
  46. )
  47. async def _infer(request: InferRequest) -> ResultResponse[InferResult]:
  48. pipeline = ctx.pipeline
  49. aiohttp_session = ctx.aiohttp_session
  50. try:
  51. file_bytes = await serving_utils.get_raw_bytes(
  52. request.image, aiohttp_session
  53. )
  54. image = serving_utils.image_bytes_to_array(file_bytes)
  55. top_k: Optional[int] = None
  56. if request.inferenceParams is not None:
  57. if request.inferenceParams.topK is not None:
  58. top_k = request.inferenceParams.topK
  59. result = (await pipeline.infer(image))[0]
  60. if "label_names" in result:
  61. cat_names = result["label_names"]
  62. else:
  63. cat_names = [str(id_) for id_ in result["class_ids"]]
  64. categories: List[Category] = []
  65. for id_, name, score in islice(
  66. zip(result["class_ids"], cat_names, result["scores"]), None, top_k
  67. ):
  68. categories.append(Category(id=id_, name=name, score=score))
  69. output_image_base64 = serving_utils.image_to_base64(result.img)
  70. return ResultResponse(
  71. logId=serving_utils.generate_log_id(),
  72. errorCode=0,
  73. errorMsg="Success",
  74. result=InferResult(categories=categories, image=output_image_base64),
  75. )
  76. except Exception as e:
  77. logging.exception(e)
  78. raise HTTPException(status_code=500, detail="Internal server error")
  79. return app