1.大模型可以用阿里百炼和DeepSeek,DeepSeek需要充值,阿里有免费时间
2.为什么使用langchain4j,其支持的模型很多
3.如何使用langchain4j,可以参考官网,先调用通服务,官网地址如下: docs.langchain4j.dev/get-started
根据文档导入maven坐标
低级可只引用这一个
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-open-ai</artifactId>
<version>1.1.0</version>
</dependency>
需要更多高级功能引入
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j</artifactId>
<version>1.1.0</version>
</dependency>
pom管理
<dependencyManagement>
<dependencies>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-bom</artifactId>
<version>1.1.0</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
OpenAiChatModel model = OpenAiChatModel.builder()
.baseUrl("http://langchain4j.dev/demo/openai/v1")
.apiKey("demo")
.modelName("gpt-4o-mini")
.build();
String answer = model.chat("Say 'Hello World'");
System.out.println(answer); // Hello World
具体实现
创建config
@Configuration
public class OpenAiConfig {
@Bean
public ChatModel chatModelDeepSeek(){
return OpenAiChatModel.builder()
.baseUrl("https://dashscope.aliyuncs.com/compatible-mode/v1")
.apiKey("sk-***************")
.modelName("deepseek-r1")
.build();
}
}
controller调用
@Resource
private ChatModel chatModelDeepSeek;
@GetMapping("/hello")
public String hello() {
String chat = chatModelQwen.chat("你是谁");
return chat;
}
springboot结合langchain4j
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-spring-boot-starter</artifactId>
<version>1.1.0-beta7</version>
</dependency>
langchain4j.open-ai.chat-model.api-key=${OPENAI_API_KEY}
langchain4j.open-ai.chat-model.model-name=gpt-4o
langchain4j.open-ai.chat-model.log-requests=true
langchain4j.open-ai.chat-model.log-responses=true
...
您现在可以定义 AI 服务接口,并使用 :@AiService
@AiService
interface Assistant {
@SystemMessage("You are a polite assistant")
String chat(String userMessage);
}
@RestController
class AssistantController {
@Autowired
Assistant assistant;
@GetMapping("/chat")
public String chat(String message) {
return assistant.chat(message);
}
}
chatMessage介绍
流式调用
@Configuration
public class LLMConfig
{
/**
* @Description: 流式对话接口 StreamingChatModel
* @Auther: zzyybs@126.com
*/
@Bean
public StreamingChatModel streamingChatModel(){
return OpenAiStreamingChatModel.builder()
.apiKey(System.getenv("aliQwen-api"))
.modelName("qwen-plus")
.baseUrl("https://dashscope.aliyuncs.com/compatible-mode/v1")
.build();
}
@Bean
public ChatAssistant chatAssistant(StreamingChatModel streamingChatModel){
return AiServices.create(ChatAssistant.class, streamingChatModel);
}
}
public interface ChatAssistant
{
Flux<String> chatFlux(String prompt);
}
@RestController
@Slf4j
public class StreamingChatModelController
{
@Resource //自己封装接口使用 high-level LLM API
private ChatAssistant chatAssistant;
@GetMapping(value = "/chatstream/chat3")
public Flux<String> chat3(@RequestParam(value = "prompt", defaultValue = "南京有什么好吃") String prompt)
{
System.out.println("---come in chat3");
return chatAssistant.chatFlux(prompt);
}
}
持久化(redis 或者 mysql 或者 mongo)此处用的redis
public interface ChatPersistenceAssistant
{
/**
* 聊天
*
* @param userId 用户 ID
* @param message 消息
* @return {@link String }
*/
String chat(@MemoryId Long userId, @UserMessage String message);
}
@Component
public class RedisChatMemoryStore implements ChatMemoryStore
{
public static final String CHAT_MEMORY_PREFIX = "CHAT_MEMORY:";
@Resource
private RedisTemplate<String,String> redisTemplate;
@Override
public List<ChatMessage> getMessages(Object memoryId)
{
// TODO: Implement getting all messages from the persistent store by memory ID.
// ChatMessageDeserializer.messageFromJson(String) and
// ChatMessageDeserializer.messagesFromJson(String) helper methods can be used to
// easily deserialize chat messages from JSON.
String retValue = redisTemplate.opsForValue().get(CHAT_MEMORY_PREFIX + memoryId);
return ChatMessageDeserializer.messagesFromJson(retValue);
}
@Override
public void updateMessages(Object memoryId, List<ChatMessage> messages)
{
// TODO: Implement updating all messages in the persistent store by memory ID.
// ChatMessageSerializer.messageToJson(ChatMessage) and
// ChatMessageSerializer.messagesToJson(List<ChatMessage>) helper methods can be used to
// easily serialize chat messages into JSON.
redisTemplate.opsForValue()
.set(CHAT_MEMORY_PREFIX + memoryId, ChatMessageSerializer.messagesToJson(messages));
}
@Override
public void deleteMessages(Object memoryId)
{
// TODO: Implement deleting all messages in the persistent store by memory ID.
redisTemplate.delete(CHAT_MEMORY_PREFIX + memoryId);
}
}
@Configuration
public class LLMConfig
{
@Resource
private RedisChatMemoryStore redisChatMemoryStore;
@Bean
public ChatModel chatModel()
{
return OpenAiChatModel.builder()
.apiKey(System.getenv("aliQwen-api"))
.modelName("qwen-plus")
.baseUrl("https://dashscope.aliyuncs.com/compatible-mode/v1")
.build();
}
@Bean
public ChatPersistenceAssistant chatMemoryAssistant(ChatModel chatModel)
{
ChatMemoryProvider chatMemoryProvider = memoryId -> MessageWindowChatMemory.builder()
.id(memoryId)
.maxMessages(1000)
.chatMemoryStore(redisChatMemoryStore)
.build();
return AiServices.builder(ChatPersistenceAssistant.class)
.chatModel(chatModel)
.chatMemoryProvider(chatMemoryProvider)
.build();
}
}
@RestController
@Slf4j
public class ChatPersistenceController
{
@Resource
private ChatPersistenceAssistant chatPersistenceAssistant;
// http://localhost:9010/chatpersistence/redis
@GetMapping(value = "/chatpersistence/redis")
public String testChatPersistence()
{
chatPersistenceAssistant.chat(1L, "你好!我的名字是redis");
chatPersistenceAssistant.chat(2L, "你好!我的名字是nacos");
String chat = chatPersistenceAssistant.chat(1L, "我的名字是什么");
System.out.println(chat);
chat = chatPersistenceAssistant.chat(2L, "我的名字是什么");
System.out.println(chat);
return "testChatPersistence success : "+ DateUtil.now();
}
}
*向量数据库 数据转为浮点数数组
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-qdrant</artifactId>
<version>1.1.0-beta7</version>
</dependency>
@Configuration
public class LLMConfig
{
@Bean
public EmbeddingModel embeddingModel()
{
return OpenAiEmbeddingModel.builder()
.apiKey(System.getenv("aliQwen-api"))
.modelName("text-embedding-v3")
.baseUrl("https://dashscope.aliyuncs.com/compatible-mode/v1")
.build();
}
/**
* 创建Qdrant客户端
* @return
*/
@Bean
public QdrantClient qdrantClient() {
QdrantGrpcClient.Builder grpcClientBuilder =
QdrantGrpcClient.newBuilder("127.0.0.1", 6334, false);
return new QdrantClient(grpcClientBuilder.build());
}
@Bean
public EmbeddingStore<TextSegment> embeddingStore() {
return QdrantEmbeddingStore.builder()
.host("127.0.0.1")
.port(6334)
.collectionName("test-qdrant")
.build();
}
}
@RestController
@Slf4j
public class EmbeddinglController
{
@Resource
private EmbeddingModel embeddingModel;
@Resource
private QdrantClient qdrantClient;
@Resource
private EmbeddingStore<TextSegment> embeddingStore;
/**
* 文本向量化测试,看看形成向量后的文本
* http://localhost:9012/embedding/embed
* @return
*/
@GetMapping(value = "/embedding/embed")
public String embed()
{
String prompt = """
咏鸡
鸡鸣破晓光,
红冠映朝阳。
金羽披霞彩,
昂首步高岗。
""";
Response<Embedding> embeddingResponse = embeddingModel.embed(prompt);
System.out.println(embeddingResponse);
return embeddingResponse.content().toString();
}
/**
* 新建向量数据库实例和创建索引:test-qdrant
* 类似mysql create database test-qdrant
* http://localhost:9012/embedding/createCollection
*/
@GetMapping(value = "/embedding/createCollection")
public void createCollection()
{
var vectorParams = Collections.VectorParams.newBuilder()
.setDistance(Collections.Distance.Cosine)
.setSize(1024)
.build();
qdrantClient.createCollectionAsync("test-qdrant", vectorParams);
}
/*
往向量数据库新增文本记录
*/
@GetMapping(value = "/embedding/add")
public String add()
{
String prompt = """
咏鸡
鸡鸣破晓光,
红冠映朝阳。
金羽披霞彩,
昂首步高岗。
""";
TextSegment segment1 = TextSegment.from(prompt);
segment1.metadata().put("author", "zzyy");
Embedding embedding1 = embeddingModel.embed(segment1).content();
String result = embeddingStore.add(embedding1, segment1);
System.out.println(result);
return result;
}
@GetMapping(value = "/embedding/query1")
public void query1(){
Embedding queryEmbedding = embeddingModel.embed("咏鸡说的是什么").content();
EmbeddingSearchRequest embeddingSearchRequest = EmbeddingSearchRequest.builder()
.queryEmbedding(queryEmbedding)
.maxResults(1)
.build();
EmbeddingSearchResult<TextSegment> searchResult = embeddingStore.search(embeddingSearchRequest);
System.out.println(searchResult.matches().get(0).embedded().text());
}
@GetMapping(value = "/embedding/query2")
public void query2(){
Embedding queryEmbedding = embeddingModel.embed("咏鸡").content();
EmbeddingSearchRequest embeddingSearchRequest = EmbeddingSearchRequest.builder()
.queryEmbedding(queryEmbedding)
.filter(metadataKey("author").isEqualTo("zzyy2"))
.maxResults(1)
.build();
EmbeddingSearchResult<TextSegment> searchResult = embeddingStore.search(embeddingSearchRequest);
System.out.println(searchResult.matches().get(0).embedded().text());
}
}
*RAG 增强检索 两个阶段 索引和检索
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-easy-rag</artifactId>
<version>1.1.0-beta7</version>
</dependency>
@Configuration
public class LLMConfig
{
@Bean
public ChatModel chatModel()
{
return OpenAiChatModel.builder()
.apiKey(System.getenv("aliQwen-api"))
.modelName("qwen-plus")
.baseUrl("https://dashscope.aliyuncs.com/compatible-mode/v1")
.build();
}
/**
* 需要预处理文档并将其存储在专门的嵌入存储(也称为矢量数据库)中。当用户提出问题时,这对于快速找到相关信息是必要的。
* 我们可以使用我们支持的 15 多个嵌入存储中的任何一个,但为了简单起见,我们将使用内存中的嵌入存储:
*
* https://docs.langchain4j.dev/integrations/embedding-stores/in-memory
*
* @return
*/
@Bean
public InMemoryEmbeddingStore<TextSegment> embeddingStore() {
return new InMemoryEmbeddingStore<>();
}
public interface ChatAssistant {
/**
* 聊天
*
* @param message 消息
* @return {@link String }
*/
String chat(String message);
}
@RestController
@Slf4j
public class RAGController
{
@Resource
InMemoryEmbeddingStore<TextSegment> embeddingStore;
@Resource
ChatAssistant chatAssistant;
// http://localhost:9013/rag/add
@GetMapping(value = "/rag/add")
public String testAdd() throws FileNotFoundException
{
//Document document = FileSystemDocumentLoader.loadDocument("D:\44\alibaba-java.docx");
FileInputStream fileInputStream = new FileInputStream("D:\44\alibaba-java.docx");
Document document = new ApacheTikaDocumentParser().parse(fileInputStream);
EmbeddingStoreIngestor.ingest(document, embeddingStore);
String result = chatAssistant.chat("错误码00000和A0001分别是什么");
System.out.println(result);
return result;
}
}
*MCP(大模型版的openfeigen)
@RestController
public class McpCallServerController
{
@Autowired
private StreamingChatModel streamingChatModel;
@GetMapping("/mcp/chat")
public Flux<String> chat(@RequestParam("question") String question) throws Exception
{
/**1.构建McpTransport协议
*
* 1.1 cmd:启动 Windows 命令行解释器。
* 1.2 /c:告诉 cmd 执行完后面的命令后关闭自身。
* 1.3 npx:npx = npm execute package,Node.js 的一个工具,用于执行 npm 包中的可执行文件。
* 1.4 -y 或 --yes:自动确认操作(类似于默认接受所有提示)。
* 1.5 @baidumap/mcp-server-baidu-map:要通过 npx 执行的 npm 包名
* 1.6 BAIDU_MAP_API_KEY 是访问百度地图开放平台API的AK
*/
McpTransport transport = new StdioMcpTransport.Builder()
.command(List.of("cmd", "/c", "npx", "-y", "@baidumap/mcp-server-baidu-map"))
.environment(Map.of("BAIDU_MAP_API_KEY", System.getenv("BAIDU_MAP_API_KEY")))
.build();
// 2.构建McpClient客户端
McpClient mcpClient = new DefaultMcpClient.Builder()
.transport(transport)
.build();
// 3.创建工具集和原生的FunctionCalling类似
ToolProvider toolProvider = McpToolProvider.builder()
.mcpClients(mcpClient)
.build();
// 4.通过AiServivces给我们自定义接口McpService构建实现类并将工具集和大模型赋值给AiService
McpService mcpService = AiServices.builder(McpService.class)
.streamingChatModel(streamingChatModel)
.toolProvider(toolProvider)
.build();
// 5.调用我们定义的HighApi接口,通过大模型对百度mcpserver调用
try {
return mcpService.chat(question);
} finally {
mcpClient.close();
}
}
}
public interface McpService
{
Flux<String> chat(String question);
}