AI 大模型应用进阶系列(二):大模型调用

155 阅读1分钟
import requests
import json

class SimpleLLMChat:

    def __init__(self, api_url, api_key, model_name="deepseek-reasoner"):
        """
        初始化大模型聊天工具
        :param api_url: 大模型API地址
        :param api_key: 访问API的密钥
        :param model_name: 模型名称
        """
        self.api_url = api_url
        self.api_key = api_key
        self.model_name = model_name
        self.chat_history = []  # 存储历史对话

    def add_to_history(self, role, content):
        """将消息添加到历史对话"""
        self.chat_history.append({"role": role, "content": content})

    def clear_history(self):
        """清空历史对话"""
        self.chat_history = []
        print("已清空对话历史")

    def call_llm(self, user_message, max_tokens=1024, temperature=0.7):
        """
        调用大模型API
        :param user_message: 用户输入的消息
        :param max_tokens: 生成的最大token数
        :param temperature: 生成多样性参数,0-1之间
        :return: 模型返回的响应
        """
        # 将用户消息添加到历史
        self.add_to_history("user", user_message)

        # 构建请求参数
        payload = {
            "model": self.model_name,
            "messages": self.chat_history,
            "max_tokens": max_tokens,
            "temperature": temperature,
        }

        # 设置请求头
        headers = {
            "Content-Type": "application/json",
            "Authorization": f"Bearer {self.api_key}",
        }

        try:
            # 发送请求
            response = requests.post(
                self.api_url, headers=headers, data=json.dumps(payload)
            )

            # 检查响应状态
            if response.status_code == 200:
                result = response.json()
                # 提取模型回复
                assistant_reply = result["choices"][0]["message"]["content"]
                # 将模型回复添加到历史
                self.add_to_history("assistant", assistant_reply)
                return assistant_reply
            else:
                error_msg = (
                    f"API请求失败: 状态码 {response.status_code}, 详情: {response.text}"
                )
                print(error_msg)
                return error_msg

        except Exception as e:
            error_msg = f"调用API时发生错误: {str(e)}"
            print(error_msg)
            return error_msg


def main():
    # 配置API信息(请替换为实际的API地址和密钥)
    API_URL = "https://api.deepseek.com/chat/completions"  # 示例地址
    API_KEY = "your apiKey"  # 请替换为你的API密钥

    # 创建聊天实例
    chat = SimpleLLMChat(API_URL, API_KEY)

    print("欢迎使用简单大模型聊天工具!")
    print("输入消息进行对话,输入 'clear' 清空历史,输入 'exit' 退出程序")

    while True:
        # 获取用户输入
        user_input = input("\n请输入: ")

        # 处理特殊命令
        if user_input.lower() == "exit":
            print("再见!")
            break
        elif user_input.lower() == "clear":
            chat.clear_history()
            continue

        # 调用大模型并获取回复
        print("思考中...")
        reply = chat.call_llm(user_input)

        # 显示回复
        print(f"AI: {reply}")


if __name__ == "__main__":
    main()