client.py 2.2 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667
  1. #!/usr/bin/env python
  2. import argparse
  3. import sys
  4. from paddlex_hps_client import triton_request, utils
  5. from tritonclient import grpc as triton_grpc
  6. def ensure_no_error(output, additional_msg):
  7. if output["errorCode"] != 0:
  8. print(additional_msg, file=sys.stderr)
  9. print(f"Error code: {output['errorCode']}", file=sys.stderr)
  10. print(f"Error message: {output['errorMsg']}", file=sys.stderr)
  11. sys.exit(1)
  12. def main():
  13. parser = argparse.ArgumentParser()
  14. parser.add_argument("--file", type=str, required=True)
  15. parser.add_argument("--key-list", type=str, nargs="+", required=True)
  16. parser.add_argument("--file-type", type=int, choices=[0, 1])
  17. parser.add_argument("--no-visualization", action="store_true")
  18. parser.add_argument("--url", type=str, default="localhost:8001")
  19. args = parser.parse_args()
  20. client = triton_grpc.InferenceServerClient(args.url)
  21. input_ = {"file": utils.prepare_input_file(args.file)}
  22. if args.file_type is not None:
  23. input_["fileType"] = args.file_type
  24. if args.no_visualization:
  25. input_["visualize"] = False
  26. output = triton_request(client, "chatocr-visual", input_)
  27. ensure_no_error(output, "Failed to analyze the images")
  28. result_visual = output["result"]
  29. for i, res in enumerate(result_visual["layoutParsingResults"]):
  30. print(res["prunedResult"])
  31. for img_name, img in res["outputImages"].items():
  32. img_path = f"{img_name}_{i}.jpg"
  33. utils.save_output_file(img, img_path)
  34. print(f"Output image saved at {img_path}")
  35. input_ = {
  36. "visualInfo": result_visual["visualInfo"],
  37. }
  38. output = triton_request(client, "chatocr-vector", input_)
  39. ensure_no_error(output, "Failed to build a vector store")
  40. result_vector = output["result"]
  41. input_ = {
  42. "keyList": args.key_list,
  43. "visualInfo": result_visual["visualInfo"],
  44. "useVectorRetrieval": True,
  45. "vectorInfo": result_vector["vectorInfo"],
  46. }
  47. output = triton_request(client, "chatocr-chat", input_)
  48. ensure_no_error(output, "Failed to chat with the LLM")
  49. result_chat = output["result"]
  50. print("Final result:")
  51. print(result_chat["chatResult"])
  52. if __name__ == "__main__":
  53. main()