llm.py 1.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566
  1. from langchain_openai import ChatOpenAI
  2. from llmops.config import qwen3_32B_model, deepseek_v3_model
  3. # 获取大模型实例
  4. def get_llm(model_name: str, api_key: str, base_url: str, temperature: float = 0.1, streaming: bool = True) -> ChatOpenAI:
  5. """
  6. 获取 LLM 实例
  7. :param model_name:
  8. :param api_key:
  9. :param base_url:
  10. :param temperature:
  11. :param streaming:
  12. :return:
  13. """
  14. return ChatOpenAI(
  15. model=model_name,
  16. base_url=base_url,
  17. api_key=api_key,
  18. temperature=temperature,
  19. streaming=True
  20. )
  21. def get_qwen3_32B_llm() -> ChatOpenAI:
  22. """
  23. 获取 千问 32B 大模型实例的简易函数
  24. :return:
  25. """
  26. try:
  27. model_name = qwen3_32B_model["name"]
  28. api_key = qwen3_32B_model.get("api_key", "")
  29. base_url = qwen3_32B_model.get("base_url", "")
  30. return get_llm(model_name=model_name, api_key=api_key, base_url=base_url)
  31. except Exception as e:
  32. raise e
  33. def get_deepseek_v3_llm() -> ChatOpenAI:
  34. """
  35. 获取 deekseek V3 大模型实例的简易函数
  36. :return:
  37. """
  38. try:
  39. model_name = deepseek_v3_model["name"]
  40. api_key = deepseek_v3_model.get("api_key", "")
  41. base_url = deepseek_v3_model.get("base_url", "")
  42. return get_llm(model_name=model_name, api_key=api_key, base_url=base_url)
  43. except Exception as e:
  44. raise e
  45. if __name__ == '__main__':
  46. """
  47. 程序入口
  48. """
  49. # llm = get_qwen3_32B_llm()
  50. # print(llm)
  51. # response = llm.invoke("你好")
  52. # print(response)
  53. llm = get_deepseek_v3_llm()
  54. response = llm.invoke("你好")
  55. print(response)