| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566 |
- from langchain_openai import ChatOpenAI
- from llmops.config import qwen3_32B_model, deepseek_v3_model
- # 获取大模型实例
- def get_llm(model_name: str, api_key: str, base_url: str, temperature: float = 0.1, streaming: bool = True) -> ChatOpenAI:
- """
- 获取 LLM 实例
- :param model_name:
- :param api_key:
- :param base_url:
- :param temperature:
- :param streaming:
- :return:
- """
- return ChatOpenAI(
- model=model_name,
- base_url=base_url,
- api_key=api_key,
- temperature=temperature,
- streaming=True
- )
- def get_qwen3_32B_llm() -> ChatOpenAI:
- """
- 获取 千问 32B 大模型实例的简易函数
- :return:
- """
- try:
- model_name = qwen3_32B_model["name"]
- api_key = qwen3_32B_model.get("api_key", "")
- base_url = qwen3_32B_model.get("base_url", "")
- return get_llm(model_name=model_name, api_key=api_key, base_url=base_url)
- except Exception as e:
- raise e
- def get_deepseek_v3_llm() -> ChatOpenAI:
- """
- 获取 deekseek V3 大模型实例的简易函数
- :return:
- """
- try:
- model_name = deepseek_v3_model["name"]
- api_key = deepseek_v3_model.get("api_key", "")
- base_url = deepseek_v3_model.get("base_url", "")
- return get_llm(model_name=model_name, api_key=api_key, base_url=base_url)
- except Exception as e:
- raise e
- if __name__ == '__main__':
- """
- 程序入口
- """
- # llm = get_qwen3_32B_llm()
- # print(llm)
- # response = llm.invoke("你好")
- # print(response)
- llm = get_deepseek_v3_llm()
- response = llm.invoke("你好")
- print(response)
|