video_detection.py 3.0 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192
  1. # Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import os
  15. from typing import Any, Dict, List
  16. from .....utils.deps import function_requires_deps, is_dep_available
  17. from ...infra import utils as serving_utils
  18. from ...infra.config import AppConfig
  19. from ...infra.models import AIStudioResultResponse
  20. from ...schemas.video_detection import INFER_ENDPOINT, InferRequest, InferResult
  21. from .._app import create_app, primary_operation
  22. if is_dep_available("fastapi"):
  23. from fastapi import FastAPI, HTTPException
  24. @function_requires_deps("fastapi")
  25. def create_pipeline_app(pipeline: Any, app_config: AppConfig) -> "FastAPI":
  26. app, ctx = create_app(
  27. pipeline=pipeline, app_config=app_config, app_aiohttp_session=True
  28. )
  29. @primary_operation(
  30. app,
  31. INFER_ENDPOINT,
  32. "infer",
  33. )
  34. async def _infer(request: InferRequest) -> AIStudioResultResponse[InferResult]:
  35. pipeline = ctx.pipeline
  36. aiohttp_session = ctx.aiohttp_session
  37. file_bytes = await serving_utils.get_raw_bytes_async(
  38. request.video, aiohttp_session
  39. )
  40. ext = serving_utils.infer_file_ext(request.video)
  41. if ext is None:
  42. raise HTTPException(
  43. status_code=422, detail="File extension cannot be inferred"
  44. )
  45. video_path = await serving_utils.call_async(
  46. serving_utils.write_to_temp_file,
  47. file_bytes,
  48. suffix=ext,
  49. )
  50. try:
  51. result = (
  52. await pipeline.infer(
  53. video_path,
  54. nms_thresh=request.nmsThresh,
  55. score_thresh=request.scoreThresh,
  56. )
  57. )[0]
  58. finally:
  59. await serving_utils.call_async(os.unlink, video_path)
  60. frames: List[Dict[str, Any]] = []
  61. for i, item in enumerate(result["result"]):
  62. objs: List[Dict[str, Any]] = []
  63. for obj in item:
  64. objs.append(
  65. dict(
  66. bbox=obj[0],
  67. categoryName=obj[2],
  68. score=obj[1],
  69. )
  70. )
  71. frames.append(
  72. dict(
  73. index=i,
  74. detectedObjects=objs,
  75. )
  76. )
  77. return AIStudioResultResponse[InferResult](
  78. logId=serving_utils.generate_log_id(),
  79. result=InferResult(frames=frames),
  80. )
  81. return app