合并拉取请求 #74

* feat: Implement system parameter management with Redis integration
This commit is contained in:
Dallas98
2025-11-11 22:13:14 +08:00
committed by GitHub
parent c5ccc56cca
commit aa01f52535
30 changed files with 343 additions and 73 deletions

View File

@@ -134,6 +134,10 @@
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-autoconfigure</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-redis</artifactId>
</dependency>
</dependencies>
<build>

View File

@@ -3,6 +3,7 @@ package com.datamate.main;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.annotation.EnableScheduling;
@@ -21,6 +22,7 @@ import org.springframework.transaction.annotation.EnableTransactionManagement;
@EnableTransactionManagement
@EnableAsync
@EnableScheduling
@EnableCaching
public class DataMatePlatformApplication {
public static void main(String[] args) {
SpringApplication.run(DataMatePlatformApplication.class, args);

View File

@@ -22,14 +22,6 @@ spring:
idle-timeout: 600000
max-lifetime: 1800000
# Elasticsearch配置
elasticsearch:
uris: ${ES_URIS:http://localhost:9200}
username: ${ES_USERNAME:}
password: ${ES_PASSWORD:}
connection-timeout: 10s
socket-timeout: 30s
# Jackson配置
jackson:
time-zone: Asia/Shanghai
@@ -61,6 +53,21 @@ spring:
- classpath:config/application-datacollection.yml
- classpath:config/application-datamanagement.yml
# Redis配置
data:
redis:
host: datamate-redis
port: 6379
timeout: 2000
password: ${REDIS_PASSWORD:password}
lettuce:
pool:
max-active: 20
max-idle: 10
min-idle: 5
max-wait: 1000ms
# MyBatis配置(需在顶层,不在 spring 下)
mybatis-plus:
configuration:

View File

@@ -99,7 +99,7 @@ public class KnowledgeBaseService {
return ragFile;
}).toList();
ragFileRepository.saveBatch(ragFiles, 100);
eventPublisher.publishEvent(new DataInsertedEvent(knowledgeBase, request.getProcessType()));
eventPublisher.publishEvent(new DataInsertedEvent(knowledgeBase, request));
}
public PagedResponse<RagFile> listFiles(String knowledgeBaseId, RagFileReq request) {

View File

@@ -1,7 +1,7 @@
package com.datamate.rag.indexer.infrastructure.event;
import com.datamate.rag.indexer.domain.model.KnowledgeBase;
import com.datamate.rag.indexer.interfaces.dto.ProcessType;
import com.datamate.rag.indexer.interfaces.dto.AddFilesReq;
/**
* 数据插入事件
@@ -9,5 +9,5 @@ import com.datamate.rag.indexer.interfaces.dto.ProcessType;
* @author dallas
* @since 2025-10-29
*/
public record DataInsertedEvent(KnowledgeBase knowledgeBase, ProcessType processType) {
public record DataInsertedEvent(KnowledgeBase knowledgeBase, AddFilesReq addFilesReq) {
}

View File

@@ -1,8 +1,8 @@
package com.datamate.rag.indexer.infrastructure.event;
import com.datamate.common.models.domain.entity.ModelConfig;
import com.datamate.common.models.domain.repository.ModelConfigRepository;
import com.datamate.common.models.infrastructure.client.ModelClient;
import com.datamate.common.setting.domain.entity.ModelConfig;
import com.datamate.common.setting.domain.repository.ModelConfigRepository;
import com.datamate.common.setting.infrastructure.client.ModelClient;
import com.datamate.rag.indexer.domain.model.FileStatus;
import com.datamate.rag.indexer.domain.model.RagFile;
import com.datamate.rag.indexer.domain.repository.RagFileRepository;
@@ -109,7 +109,7 @@ public class RagEtlService {
document = new HtmlToTextDocumentTransformer().transform(document);
}
// 使用文档分块器对文档进行分块
DocumentSplitter splitter = documentSplitter(event.processType());
DocumentSplitter splitter = documentSplitter(event.addFilesReq().getProcessType());
List<TextSegment> split = splitter.split(document);
// 更新分块数量

View File

@@ -16,6 +16,9 @@ import java.util.List;
public class AddFilesReq {
private String knowledgeBaseId;
private ProcessType processType;
private Integer chunkSize;
private Integer overlapSize;
private String customSeparator;
private List<FileInfo> files;
public record FileInfo(String id, String name) {

View File

@@ -26,10 +26,6 @@
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-elasticsearch</artifactId>
</dependency>
<dependency>
<groupId>com.mysql</groupId>
<artifactId>mysql-connector-j</artifactId>