集成
获取大模型ai key

导入依赖
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59
| <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion>
<parent> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-parent</artifactId> <version>3.5.3</version> <relativePath/> </parent>
<groupId>com.kuang</groupId> <artifactId>springai</artifactId> <version>1.0-SNAPSHOT</version>
<properties> <maven.compiler.source>8</maven.compiler.source> <maven.compiler.target>8</maven.compiler.target> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <spring-ai.version>1.0.0</spring-ai.version> <lombok.version>>1.18.30</lombok.version> </properties>
<dependencies> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-web</artifactId> </dependency> <dependency> <groupId>org.springframework.ai</groupId> <artifactId>spring-ai-starter-model-zhipuai</artifactId> </dependency> <dependency> <groupId>org.projectlombok</groupId> <artifactId>lombok</artifactId> <version>1.18.30</version> </dependency> </dependencies>
<dependencyManagement> <dependencies> <dependency> <groupId>org.springframework.ai</groupId> <artifactId>spring-ai-bom</artifactId> <version>${spring-ai.version}</version> <type>pom</type> <scope>import</scope> </dependency> <dependency> <groupId>org.projectlombok</groupId> <artifactId>lombok</artifactId> <version>1.18.30</version> </dependency> </dependencies> </dependencyManagement>
</project>
|
配置文件
1 2 3 4 5 6
| server: port: 8080 spring: ai: zhipuai: api-key: xxxxxxxxxxxxxxxxxx
|
Bean注入
1 2
| @Autowired private ChatModel zhiPuAiChatModel;
|
Client初始化
1
| ChatClient client = ChatClient.create(zhiPuAiChatModel);
|
结构化输出
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20
| record User(String userName, Integer age, Integer sex) {}
@GetMapping("/ai") public String generation(@RequestParam(defaultValue = "你觉得知识能改变命运么?") String userInput) { ChatClient client = ChatClient.create(zhiPuAiChatModel); return client.prompt(userInput).call().content(); }
@GetMapping("/ai/json") public Object generationJson(@RequestParam(defaultValue = "帮我生成用户信息") String userInput) { ChatClient client = ChatClient.create(zhiPuAiChatModel); return client.prompt(userInput).call().entity(User.class); }
@GetMapping("/ai/json/list") public Object generationJsonList(@RequestParam(defaultValue = "帮我生成五个用户信息") String userInput) { ChatClient client = ChatClient.create(zhiPuAiChatModel); return client.prompt(userInput).call().entity(new ParameterizedTypeReference<List<User>>() { }); }
|
流式相应
1 2 3 4 5 6 7 8
| @GetMapping(value = "/ai/stream", produces = MediaType.TEXT_EVENT_STREAM_VALUE + ";charset=UTF-8") public Flux<String> streamContent(@RequestParam(defaultValue = "讲一个笑话") String userInput) { ChatClient client = ChatClient.create(zhiPuAiChatModel); return client.prompt(userInput) .advisors(new SimpleLoggerAdvisor()) .stream() .content(); }
|
提示词模板
在人工智能(AI)领域中,”prompt” 是指向模型提供输入以引导其生成特定输出的文本或指令。它是与模型进行交互时用户提供的文本段落,用于描述用户想要从模型获取的信息、回答、文本等内容。Prompt 的目的是引导模型产生所需的回应,以便更好地控制生成的输出。
对于语言模型,prompt 可以是一个简短的问题、一个完整的段落,或者是一组指令,这取决于用户的需求和场景。在生成文本时,模型会试图理解 prompt 并根据其理解生成相应的响应。这就是为什么 prompt 很重要,因为它直接影响着模型生成的文本的内容、风格和质量。
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18
| @GetMapping( "/ai/prompt/template") public Object promptTemplate(@RequestParam(defaultValue = "晴天") String name) { ChatClient client = ChatClient.create(zhiPuAiChatModel); return client.prompt() .user(u -> u.text("请帮我写一首歌,主题是{name},内容围绕主题去写").param("name", name).param("key", "value")) .advisors(new SimpleLoggerAdvisor()) .call() .content(); } @GetMapping( "/ai/prompt/template/map") public Object promptTemplateMap(@RequestParam(defaultValue = "晴天") String name) { ChatClient client = ChatClient.create(zhiPuAiChatModel); return client.prompt() .user(u -> u.text("请帮我写一首歌,主题是{name},内容围绕主题去写").params(Map.of("name", name))) .advisors(new SimpleLoggerAdvisor()) .call() .content(); }
|
多厂商多模型自定义配置
多厂商
引入deepseek
1 2 3 4
| <dependency> <groupId>org.springframework.ai</groupId> <artifactId>spring-ai-starter-model-deepseek</artifactId> </dependency>
|
1 2 3 4 5 6
| spring: ai: deepseek: api-key: xxxxxxxxxxxxxxxxxx zhipuai: api-key: xxxxxxxxxxxxxxxxxx
|

多模型(同一厂商不同模型)
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22
| @Configuration public class ZhiPuConfig {
@Value("$(spring.ai.zhipuai.api-key)") private String zpApiKey;
@Bean public ChatClient zpChatGlm4vFlashClient() { ZhiPuAiApi zhiPuAiApi = new ZhiPuAiApi(zpApiKey); ZhiPuAiChatOptions options = ZhiPuAiChatOptions.builder().model("glm-4v-flash").build(); ChatModel zhiPuChatModel = new ZhiPuAiChatModel(zhiPuAiApi, options); return ChatClient.builder(zhiPuChatModel).build(); }
@Bean public ChatClient zpChatGlm4PlusClient() { ZhiPuAiApi zhiPuAiApi = new ZhiPuAiApi(zpApiKey); ZhiPuAiChatOptions options = ZhiPuAiChatOptions.builder().model("glm-4-plus").build(); ChatModel zhiPuChatModel = new ZhiPuAiChatModel(zhiPuAiApi, options); return ChatClient.builder(zhiPuChatModel).build(); } }
|
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21
| @Autowired private ChatModel zpChatGlm4vFlashClient;
@Autowired private ChatModel zpChatGlm4PlusClient;
@GetMapping( "/ai/client/configuration/model1") public Object clientConfigurationModel1(@RequestParam(defaultValue = "请问下你是什么模型") String keyWord) { ChatClient client = ChatClient.create(zpChatGlm4vFlashClient); return client.prompt(keyWord) .call() .content(); }
@GetMapping( "/ai/client/configuration/model2") public Object clientConfigurationModel2(@RequestParam(defaultValue = "请问下你是什么模型") String keyWord) { ChatClient client = ChatClient.create(zpChatGlm4PlusClient); return client.prompt(keyWord) .call() .content(); }
|
Chat memory
核心功能:
保存对话历史:记录用户提问和AI回复,避免多轮对话中信息丢失
上下文注入:在调用模型时自动拼接历史会话,让AI理解当前问题的语境
会话隔离:通过会话ID区分不同用户的对话,支持多用户场景
1 2 3 4 5 6 7 8 9 10 11 12
| @Autowired private ChatMemory chatMemory;
@GetMapping( "/ai/chat/memory") public Object chatMemory(@RequestParam(defaultValue = "请问下你是什么模型") String keyWord) { ChatClient client = ChatClient.create(zhiPuAiChatModel); return client.prompt(keyWord) .advisors(new SimpleLoggerAdvisor(), MessageChatMemoryAdvisor.builder(chatMemory).build()) .advisors(a -> a.param(ChatMemory.CONVERSATION_ID, "conversationId")) .call() .content(); }
|


function calling
Fullction Calling是Spring AI中连接大语言模型(LLM)与外部工具的核心机制,允许AI在对话过程中自动调用预设工具(如数据库、API、计算器等)获取实时数据或执行操作,从而提升回答的准确性和实用性
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48
| public class DateTimeTool {
@Tool(description = "获取当前时间") public String getCurrentDateTime() { return LocalDateTime.now().atZone(LocaleContextHolder.getTimeZone().toZoneId()).toString(); }
@Tool(description = "获取当前时间,带时区,入参是时区如CTT,出参是时间如2025-08-10 00:00:00") public String getCurrentDateTimeWithZone(String zoneId) {
Date date = new Date(LocalDateTime.now().atZone(ZoneId.of(zoneId)).getNano() / 1000); SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); return simpleDateFormat.format(date); } }
|
1 2 3 4 5 6 7 8 9
| @GetMapping( "/ai/tools") public Object tools(@RequestParam(defaultValue = "CTT") String keyWord) { ChatClient client = ChatClient.create(zhiPuAiChatModel); return client.prompt(keyWord) .advisors(new SimpleLoggerAdvisor()) .tools(new DateTimeTool()) .call() .content(); }
|


MCP集成
Spring AI MCP即Model Context Protocol(模型上下文协议),是由Anthropic推出的开放标准协议,Spring AI对其进行了深度集成。它旨在标准化AI模型与外部工具、数据源和服务之间的交互方式,解决传统AI应用中工具集成复杂问题等
核心作用:通过标准化协议(基于JSON-RPC)让AI模型能安全、高效地调用数据库、API等外部工具,解决集成复杂问题
关键优势:统一交互标准、支持安全调用、适配多场景拓展,主力AI应用与现有系统快速对接
创建mcp-server
创建一个新的module,引入以下依赖
1 2 3 4
| <dependency> <groupId>org.springframework.ai</groupId> <artifactId>spring-ai-starter-mcp-server-webmvc</artifactId> </dependency>
|
1 2 3 4 5
| public interface TestService {
String recommendSong(); String recommendBook(); }
|
1 2 3 4 5 6 7 8 9 10 11 12 13 14
| @Service public class TestServiceImpl implements TestService { @Override @Tool(description = "推荐一首歌") public String recommendSong() { return "《反方向的钟》"; }
@Override @Tool(description = "推荐一本书") public String recommendBook() { return "《活着》"; } }
|
将service注入到provider中
1 2 3 4 5 6 7 8
| @Configuration public class ToolCallbackProviderConfig {
@Bean public ToolCallbackProvider recommendTools(TestService testService) { return MethodToolCallbackProvider.builder().toolObjects(testService).build(); } }
|
修改配置文件
1 2 3 4 5 6 7 8 9
| server: port: 8090 spring: ai: mcp: server: name: mcp-server sse-message-endpoint: /mcp/message sse-endpoint: /sse
|
注册成功会打印以下日志

创建MCP-Client
引入依赖
1 2 3 4
| <dependency> <groupId>org.springframework.ai</groupId> <artifactId>spring-ai-starter-mcp-server-webmvc</artifactId> </dependency>
|
配置文件
1 2 3 4 5 6 7 8 9 10 11 12 13 14
| server: port: 8100 spring: ai: mcp: client: name: mcp-client sse: connections: server1: url: http://localhost:8090 sse-endpoint: /sse toolcallback: enabled: true
|
1 2 3 4 5 6 7 8 9 10 11 12
| @Autowired private ToolCallbackProvider toolCallbackProvider;
@GetMapping( "/ai/mcp") public Object tools(@RequestParam(defaultValue = "input") String input) { ChatClient client = ChatClient.create(zhiPuAiChatModel); return client.prompt(input) .advisors(new SimpleLoggerAdvisor()) .toolCallbacks(toolCallbackProvider) .call() .content(); }
|
启动时mcp-server会打印以下日志,说明连接成功

集成RAG
核心逻辑
RAG(检索增强生成)让大模型回答时先查外部知识库,再结合检索结果生成答案,解决“胡说八道”问题。Spring Al 集成 RAG 后,可:
- 查资料再回答:所户提问时,先从企业文档、数据库里找相关信息,避免模型凭空编造;
- 接企业自有数据:支持接入PDF、Exce1、API等内部数据,让AI懂企业“自家事”;
- 简化开发流程:用Spring生态的工具(如自动配置、安全框架)快速流程,不用写一堆底层代码。
关键组件与优势
- 文档处理:把文件转成机器能查的“索引”(类似图书馆分类)
- 智能检索:按问题匹配最相关的资料片段,可调检索精度;
- 上下文融合:把资料“喂”给模型当参考,生成回答时带依据;
- Spring 加持:一键集成数据库、管权限、动态调参数,适合企业级场景(如客服、内部问答)
一句话总结
Spring AI让 RAG 技术能轻松接入企业自有数据、通过“先检索后生成”让AI回答更准,且开发和管理更简单
安装Chroma
1 2 3 4 5 6 7
| # 拉取官方镜像或基于新环境构建 docker run -d \ -v $(pwd)/chromadb:/chromadb \ -p 8000:8000 \ --name chroma_db \ ghcr.io/chroma-core/chroma:latest \ run --path /chromadb --host 0.0.0.0 --port 8000
|

导入依赖
1 2 3 4 5 6 7 8 9 10 11 12
| <dependency> <groupId>org.springframework.ai</groupId> <artifactId>spring-ai-chroma-store</artifactId> </dependency> <dependency> <groupId>org.springframework.ai</groupId> <artifactId>spring-ai-tika-document-reader</artifactId> </dependency> <dependency> <groupId>org.springframework.ai</groupId> <artifactId>spring-ai-advisors-vector-store</artifactId> </dependency>
|
配置类
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23
| @Configuration public class ChromaConfig {
@Bean public DocumentTransformer documentTransformer() { return new TokenTextSplitter(); }
@Bean public ChromaApi chromaApi(RestClient.Builder restClientBuilder) { String chromaUrl = "http://127.0.0.1:8000"; return new ChromaApi(chromaUrl, restClientBuilder, new ObjectMapper()); }
@Bean @Lazy public VectorStore chromaVectorStore(EmbeddingModel embeddingModel, ChromaApi chromaApi) { return ChromaVectorStore.builder(chromaApi, embeddingModel) .collectionName("TestCollection") .initializeSchema(true) .build(); } }
|
初始化文档
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
| @Component public class InitConfig {
@Autowired private DocumentTransformer documentTransformer;
@Autowired private VectorStore vectorStore;
@PostConstruct public void init() { Resource resource = new PathResource("E:/桌面/简历.pdf"); TikaDocumentReader tikaDocumentReader = new TikaDocumentReader(resource); List<Document> documents = tikaDocumentReader.get(); List<Document> transformerDocs = documentTransformer.apply(documents); vectorStore.accept(transformerDocs);
} }
|
启动项目
1 2 3 4 5 6 7 8 9 10 11 12
| @Autowired private VectorStore vectorStore;
@GetMapping( "/ai/rag") public Object rag(@RequestParam(defaultValue = "说下项目经历") String input) { ChatClient client = ChatClient.create(zhiPuAiChatModel); return client.prompt(input) .advisors(new SimpleLoggerAdvisor(), new QuestionAnswerAdvisor(vectorStore)) .tools(new DateTimeTool()) .call() .content(); }
|

其他
日志打印
1 2 3 4 5 6 7 8
| logging: level: org: springframework: ai: chat: client: advisor: DEBUG
|