diff --git a/pom.xml b/pom.xml
index 16afdb3..79326ea 100644
--- a/pom.xml
+++ b/pom.xml
@@ -18,6 +18,10 @@
org.springframework.boot
spring-boot-starter-web
+
+ org.springframework.boot
+ spring-boot-starter-webflux
+
dev.langchain4j
diff --git a/src/main/java/com/ai/config/LanguageModelConfig.java b/src/main/java/com/ai/config/LanguageModelConfig.java
index 2a0aba2..08ba9d0 100644
--- a/src/main/java/com/ai/config/LanguageModelConfig.java
+++ b/src/main/java/com/ai/config/LanguageModelConfig.java
@@ -1,8 +1,10 @@
package com.ai.config;
import dev.langchain4j.community.model.dashscope.QwenLanguageModel;
+import dev.langchain4j.community.model.dashscope.QwenStreamingChatModel;
import dev.langchain4j.model.language.LanguageModel;
import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@@ -22,4 +24,12 @@ public class LanguageModelConfig {
.modelName(modelName)
.build();
}
+
+ @Bean
+ public QwenStreamingChatModel qwenStreamingChatModel(){
+ return QwenStreamingChatModel.builder()
+ .apiKey(apiKey)
+ .modelName(modelName)
+ .build();
+ }
}
diff --git a/src/main/java/com/ai/controller/LangChainController.java b/src/main/java/com/ai/controller/LangChainController.java
index ca95586..fa461a2 100644
--- a/src/main/java/com/ai/controller/LangChainController.java
+++ b/src/main/java/com/ai/controller/LangChainController.java
@@ -2,11 +2,14 @@ package com.ai.controller;
import com.ai.service.*;
import dev.langchain4j.community.model.dashscope.QwenChatModel;
+import dev.langchain4j.community.model.dashscope.QwenStreamingChatModel;
import dev.langchain4j.community.model.zhipu.ZhipuAiImageModel;
import dev.langchain4j.data.image.Image;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
+import dev.langchain4j.model.chat.response.ChatResponse;
+import dev.langchain4j.model.chat.response.StreamingChatResponseHandler;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.service.AiServices;
import org.springframework.beans.factory.annotation.Autowired;
@@ -14,6 +17,7 @@ import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
+import reactor.core.publisher.Flux;
import java.net.URI;
@@ -42,6 +46,9 @@ public class LangChainController {
@Autowired
private EmbeddingService embeddingService;
+ @Autowired
+ private QwenStreamingChatModel qwenStreamingChatModel;
+
@GetMapping("/normal/chat")
public String normalChat(@RequestParam("input") String input) {
System.out.println("start normal chat...");
@@ -91,4 +98,31 @@ public class LangChainController {
embeddingService.embedding(input);
return segmentAssist.chat(input);
}
+
+ @GetMapping(value = "/stream/chat", produces = "text/stream;charset=UTF-8")
+ public Flux streamChat(@RequestParam("input") String input) {
+ System.out.println("start normal chat...");
+ Flux flux = Flux.create(fluxSink -> {
+ qwenStreamingChatModel.chat(input, new StreamingChatResponseHandler() {
+ @Override
+ public void onPartialResponse(String s) {
+ fluxSink.next(s);
+ System.out.println(s);
+ }
+
+ @Override
+ public void onCompleteResponse(ChatResponse chatResponse) {
+ fluxSink.complete();
+ System.out.println("结束");
+ }
+
+ @Override
+ public void onError(Throwable throwable) {
+ fluxSink.error(throwable);
+ throwable.getMessage();
+ }
+ });
+ });
+ return flux;
+ }
}
diff --git a/src/main/java/com/ai/http/chat.http b/src/main/java/com/ai/http/chat.http
index c4dd31a..48049e9 100644
--- a/src/main/java/com/ai/http/chat.http
+++ b/src/main/java/com/ai/http/chat.http
@@ -21,4 +21,7 @@ GET http://localhost:8080/langchain/high/call?input=用5个菲林抽取5个代
GET http://localhost:8080/langchain/embedd/chat?input=我想要个便宜的办公笔记本,有什么推荐吗
### 测试 LangChainController 的 chat 接口
-GET http://localhost:8080/langchain/normal/chat?input=今天天气如何
\ No newline at end of file
+GET http://localhost:8080/langchain/normal/chat?input=今天天气如何
+
+### 测试 LangChainController 的 chat 接口
+GET http://localhost:8080/langchain/stream/chat?input=今天天气如何
\ No newline at end of file