1. OpenAI(GPT-3.5/4)封装代码
from langchain.llms.base import LLM
from openai import OpenAI
from langchain_core.messages.ai import AIMessage
from typing import Any, List, Dict
# 替换为你的OpenAI API Key
openai_api_key = "你的OpenAI API密钥"
# 国内用户需配置代理(可选)
# import os
# os.environ["HTTP_PROXY"] = "http://127.0.0.1:7890"
# os.environ["HTTPS_PROXY"] = "http://127.0.0.1:7890"
class ChatOpenAI(LLM):
history: List[Dict[str, str]] = []
client: Any = None
def __init__(self):
super().__init__()
self.client = OpenAI(api_key=openai_api_key)
@property
def _llm_type(self) -> str:
return "ChatOpenAI"
def invoke(self, prompt: str, history: List[Dict[str, str]] = None) -> AIMessage:
if history is None:
history = []
history.append({"role": "user", "content": prompt})
response = self.client.chat.completions.create(
model="gpt-3.5-turbo", # 可选gpt-4/gpt-4o
messages=history,
temperature=0.7
)
result = response.choices[0].message.content
history.append({"role": "assistant", "content": result})
return AIMessage(content=result)
def _call(self, prompt: str, history: List[Dict[str, str]] = None) -> str:
ai_message = self.invoke(prompt, history)
return ai_message.content
def stream(self, prompt: str, history: List[Dict[str, str]] = None):
if history is None:
history = []
history.append({"role": "user", "content": prompt})
response = self.client.chat.completions.create(
model="gpt-3.5-turbo",
messages=history,
stream=True,
temperature=0.7
)
full_content = ""
for chunk in response:
if chunk.choices and chunk.choices[0].delta.content:
content = chunk.choices[0].delta.content
full_content += content
yield content
history.append({"role": "assistant", "content": full_content})
# 测试
if __name__ == "__main__":
llm = ChatOpenAI()
result = llm.invoke("你好,介绍下自己")
print(result.content)
2. 百度文心一言(ERNIE)封装代码
from langchain.llms.base import LLM
from erniebot import ChatCompletion
import os
from langchain_core.messages.ai import AIMessage
from typing import Any, List, Dict
# 配置百度文心一言密钥
os.environ["EB_API_KEY"] = "你的文心一言API Key"
os.environ["EB_SECRET_KEY"] = "你的文心一言Secret Key"
class ChatERNIE(LLM):
history: List[Dict[str, str]] = []
@property
def _llm_type(self) -> str:
return "ChatERNIE"
def invoke(self, prompt: str, history: List[Dict[str, str]] = None) -> AIMessage:
if history is None:
history = []
# 转换历史记录格式适配文心API
messages = [{"role": msg["role"], "content": msg["content"]} for msg in history]
messages.append({"role": "user", "content": prompt})
response = ChatCompletion.create(
model="ernie-4.0", # 可选ernie-3.5/ernie-4.0-turbo
messages=messages,
temperature=0.7
)
result = response.result
history.append({"role": "user", "content": prompt})
history.append({"role": "assistant", "content": result})
return AIMessage(content=result)
def _call(self, prompt: str, history: List[Dict[str, str]] = None) -> str:
ai_message = self.invoke(prompt, history)
return ai_message.content
def stream(self, prompt: str, history: List[Dict[str, str]] = None):
if history is None:
history = []
messages = [{"role": msg["role"], "content": msg["content"]} for msg in history]
messages.append({"role": "user", "content": prompt})
response = ChatCompletion.create(
model="ernie-4.0",
messages=messages,
stream=True,
temperature=0.7
)
full_content = ""
for chunk in response:
if chunk.result:
content = chunk.result
full_content += content
yield content
history.append({"role": "user", "content": prompt})
history.append({"role": "assistant", "content": full_content})
# 测试
if __name__ == "__main__":
llm = ChatERNIE()
result = llm.invoke("你好,介绍下自己")
print(result.content)
3. 阿里通义千问(Qwen)封装代码
from langchain.llms.base import LLM
from openai import OpenAI
from langchain_core.messages.ai import AIMessage
from typing import Any, List, Dict
# 替换为你的通义千问API Key
qwen_api_key = "你的通义千问API密钥"
class ChatQwen(LLM):
history: List[Dict[str, str]] = []
client: Any = None
def __init__(self):
super().__init__()
# 通义千问兼容OpenAI接口规范
self.client = OpenAI(
api_key=qwen_api_key,
base_url="https://dashscope.aliyuncs.com/compatible-mode/v1"
)
@property
def _llm_type(self) -> str:
return "ChatQwen"
def invoke(self, prompt: str, history: List[Dict[str, str]] = None) -> AIMessage:
if history is None:
history = []
history.append({"role": "user", "content": prompt})
response = self.client.chat.completions.create(
model="qwen-turbo", # 可选qwen-plus/qwen-max
messages=history,
temperature=0.7
)
result = response.choices[0].message.content
history.append({"role": "assistant", "content": result})
return AIMessage(content=result)
def _call(self, prompt: str, history: List[Dict[str, str]] = None) -> str:
ai_message = self.invoke(prompt, history)
return ai_message.content
def stream(self, prompt: str, history: List[Dict[str, str]] = None):
if history is None:
history = []
history.append({"role": "user", "content": prompt})
response = self.client.chat.completions.create(
model="qwen-turbo",
messages=history,
stream=True,
temperature=0.7
)
full_content = ""
for chunk in response:
if chunk.choices and chunk.choices[0].delta.content:
content = chunk.choices[0].delta.content
full_content += content
yield content
history.append({"role": "assistant", "content": full_content})
# 测试
if __name__ == "__main__":
llm = ChatQwen()
result = llm.invoke("你好,介绍下自己")
print(result.content)