feat(chat): 实现人脸智能聊天核心功能
All checks were successful
ZhenTu-BE/pipeline/head This commit looks good

- 新增小程序人脸聊天控制器 AppChatController,支持会话创建、消息收发、历史查询及会话关闭
- 集成智谱 GLM 模型客户端 GlmClient,支持流式文本生成与回调
- 新增聊天会话与消息实体类及 MyBatis 映射,实现数据持久化
- 提供 FaceChatService 接口及实现,封装聊天业务逻辑包括同步/流式消息发送
- 引入 zai-sdk 依赖以支持调用智谱 AI 大模型能力
- 支持基于人脸 ID 的唯一会话管理与用户权限校验
- 消息记录包含角色、内容、追踪 ID 及延迟信息,便于调试与分析
This commit is contained in:
2025-12-11 17:45:49 +08:00
parent 6e7b4729a8
commit 3b11ddef6a
18 changed files with 811 additions and 0 deletions

View File

@@ -0,0 +1,17 @@
package com.ycwl.basic.integration.glm;
import java.util.List;
/**
* 智谱 GLM 模型调用抽象。
*/
public interface GlmClient {
/**
* 流式回复,实时回调分片,同时返回完整文本。
*/
String streamReply(Long faceId,
Long memberId,
String traceId,
List<ai.z.openapi.service.model.ChatMessage> messages,
java.util.function.Consumer<String> chunkConsumer);
}

View File

@@ -0,0 +1,118 @@
package com.ycwl.basic.integration.glm;
import ai.z.openapi.ZhipuAiClient;
import ai.z.openapi.service.model.ChatCompletionCreateParams;
import ai.z.openapi.service.model.ChatCompletionResponse;
import ai.z.openapi.service.model.ChatMessage;
import ai.z.openapi.service.model.ChatMessageRole;
import ai.z.openapi.service.model.ChatThinking;
import ai.z.openapi.service.model.Delta;
import ai.z.openapi.service.model.ModelData;
import io.reactivex.rxjava3.core.Flowable;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Consumer;
/**
* 智谱 GLM 官方 SDK 调用实现,流式拆分文本。
*/
@Slf4j
@Component
public class GlmClientImpl implements GlmClient {
private static final String DEFAULT_MODEL = "glm-4.5-airx";
private final ZhipuAiClient client;
public GlmClientImpl(@Value("${zhipu.api-key:}") String apiKeyFromCfg) {
String apiKey = StringUtils.isNotBlank(apiKeyFromCfg) ? apiKeyFromCfg : System.getenv("ZHIPU_API_KEY");
if (StringUtils.isBlank(apiKey)) {
log.warn("未配置 ZHIPU_API_KEY,GLM 调用将返回占位结果");
client = null;
} else {
client = ZhipuAiClient.builder().apiKey(apiKey).build();
}
}
@Override
public String streamReply(Long faceId,
Long memberId,
String traceId,
List<ChatMessage> messages,
Consumer<String> chunkConsumer) {
if (client == null) {
String fallback = "GLM 未配置 API KEY,无法调用。";
if (chunkConsumer != null) {
chunkConsumer.accept(fallback);
}
return fallback;
}
List<ChatMessage> finalMessages = messages == null ? new ArrayList<>() : new ArrayList<>(messages);
if (finalMessages.isEmpty()) {
finalMessages.add(ChatMessage.builder()
.role(ChatMessageRole.USER.value())
.content("你好")
.build());
}
finalMessages.addFirst(ChatMessage.builder()
.role(ChatMessageRole.SYSTEM.value())
.content("你是一个智能销售,然后用户是实际想购买的人。你需要尽可能推广景区生成的Vlog、视频、照片等内容给用户,鼓励购买。注意,返回内容需要为普通文本格式,禁止出现Markdown标记。")
.build());
finalMessages.addFirst(ChatMessage.builder()
.role(ChatMessageRole.SYSTEM.value())
.content("注意,返回内容需要为普通文本格式,禁止使用Markdown格式进行返回。")
.build());
ChatCompletionCreateParams request = ChatCompletionCreateParams.builder()
.model(DEFAULT_MODEL)
.messages(finalMessages)
.thinking(ChatThinking.builder().type("enabled").build())
.stream(true)
.maxTokens(4096)
.temperature(0.8f)
.build();
ChatCompletionResponse response = client.chat().createChatCompletion(request);
if (!response.isSuccess()) {
String msg = "GLM 调用失败: " + response.getMsg();
log.warn(msg);
if (chunkConsumer != null) {
chunkConsumer.accept(msg);
}
return msg;
}
StringBuilder sb = new StringBuilder();
Flowable<ModelData> flowable = response.getFlowable();
flowable.blockingSubscribe(
data -> {
if (data.getChoices() == null || data.getChoices().isEmpty()) {
return;
}
Delta delta = data.getChoices().getFirst().getDelta();
if (delta == null) {
return;
}
String piece = delta.getContent();
if (StringUtils.isNotBlank(piece)) {
sb.append(piece);
if (chunkConsumer != null) {
chunkConsumer.accept(piece);
}
}
},
error -> {
log.error("GLM 流式调用异常", error);
String err = "GLM 调用异常:" + error.getMessage();
sb.append(err);
if (chunkConsumer != null) {
chunkConsumer.accept(err);
}
}
);
return sb.toString();
}
}