LangChain 小试牛刀
1、使用Jupyter notebook进行学习
注:本篇使用的是windows10系统,cmd命令均在jupyter -> 新建terminals中运行,可通过Setting->Theme->Dark修改主题方便使用
安装Anaconda后找到Jupyter快捷方式并查看属性,复制目标输入框内的内容
D:\Anaconda3\python.exe D:\Anaconda3\cwp.py D:\Anaconda3 D:\Anaconda3\python.exe D:\Anaconda3\Scripts\jupyter-notebook-script.py 【此处替换你的目录】
例如:
D:\Anaconda3\python.exe D:\Anaconda3\cwp.py D:\Anaconda3 D:\Anaconda3\python.exe D:\Anaconda3\Scripts\jupyter-notebook-script.py E:\langchain
在命令行中执行,将会从E:\langchain下启动Jupyter
2、安装依赖包 因为我使用的python3,下方均使用pip3进行安装
新建Terminal窗口,并运行安装命令
pip3 install langchain
也可以在ipynb中安装,前面加上!即可,之后不再赘述
!pip3 install langchain
3、禁用LangSmith
新建Notebook
编写ipynb代码 禁用LangSmith(前期学习不建议使用)
import getpass
import os
# tracing_context块中的代码调用可禁用smith上传
from langsmith import tracing_context
4、调用语言模型(这里使用火山方舟API)
pip3 install -qU langchain-openai
from langchain_openai import ChatOpenAI
# 设置 API Key 和 API Base
os.environ["OPENAI_API_KEY"] = getpass.getpass() # 本地使用可直接写入明文
os.environ["OPENAI_API_BASE"] = "https://ark.cn-beijing.volces.com/api/v3"
model = ChatOpenAI(model="ep-***") # 模型推理点名称
测试调用
from langchain_core.messages import HumanMessage, SystemMessage
messages = [
SystemMessage(content="Translate the following from English into Chinese"),
HumanMessage(content="hi!"),
]
with tracing_context(enabled=False):
result = model.invoke(messages)
print(result)
5、使用Parser
使用parser提取结果(1)
from langchain_core.output_parsers import StrOutputParser
from langchain_core.messages import HumanMessage, SystemMessage
parser = StrOutputParser()
messages = [
SystemMessage(content="Translate the following from English into Chinese"),
HumanMessage(content="hi!"),
]
with tracing_context(enabled=False):
result = model.invoke(messages)
parserResult = parser.invoke(result)
print(parserResult)
或使用链式调用parser(2)
from langchain_core.output_parsers import StrOutputParser
from langchain_core.messages import HumanMessage, SystemMessage
parser = StrOutputParser()
messages = [
SystemMessage(content="Translate the following from English into Chinese"),
HumanMessage(content="hi!"),
]
with tracing_context(enabled=False):
chain = model | parser
parserResult = chain.invoke(messages)
print(parserResult)
6、使用提示词模板
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.output_parsers import StrOutputParser
parser = StrOutputParser()
system_template = "Translate the following into {language}:"
prompt_template = ChatPromptTemplate.from_messages(
[("system", system_template), ("user", "{text}")]
)
with tracing_context(enabled=False):
# 用于生成提示词模板
prompt_result = prompt_template.invoke({"language": "chinese", "text": "hi"})
print(prompt_result.to_messages())
# 链式调用
chain = prompt_template | model | parser
# invoke传入参数key需要与prompt_template定义的参数key保持一致
result = chain.invoke({"language": "chinese", "text": "hello world!"})
print(result)
7、使用LangServe部署一个应用服务器
安装依赖包
pip3 install "langserve[all]"
新建python文件serve.py
import os
import getpass
from fastapi import FastAPI
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.output_parsers import StrOutputParser
from langserve import add_routes
# 1. Create prompt template
system_template = "Translate the following into {language}:"
prompt_template = ChatPromptTemplate.from_messages([
('system', system_template),
('user', '{text}')
])
# 设置 API Key 和 API Base
os.environ["OPENAI_API_KEY"] = getpass.getpass() # 本地使用可直接写入明文
os.environ["OPENAI_API_BASE"] = "https://ark.cn-beijing.volces.com/api/v3" # 假设是这个地址,需确认
os.environ["OPENAI_API_TYPE"] = "openai" # 通常不用设置,除非特殊情况
# 2. Create model
from langchain_openai import ChatOpenAI
model = ChatOpenAI(model="ep-***") # 模型推理点名称
# 3. Create parser
parser = StrOutputParser()
# 4. Create chain
chain = prompt_template | model | parser
# 4. App definition
app = FastAPI(
title="LangChain Server",
version="1.0",
description="A simple API server using LangChain's Runnable interfaces",
)
# 5. Adding chain route
add_routes(
app,
chain,
path="/chain",
)
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="localhost", port=8000)
执行
python serve.py
访问 http://localhost:8000/chain/playground/ 即可使用
http://localhost:8000/docs 查看接口文档
8、服务启动后,即可在其他应用代码中直接请求调用
from langserve import RemoteRunnable
remote_chain = RemoteRunnable("http://localhost:8000/chain")
result = remote_chain.invoke({"language": "chinese", "text": "hello world"})
print(result)
或者使用requests
import requests
url = "http://localhost:8000/chain/invoke"
payload = {
"input": {
"language": "chinese",
"text": "hello world"
}
}
headers = {
"Content-Type": "application/json"
}
response = requests.post(url, json=payload, headers=headers).json()
print(response['output'])