searchTags(String keyword) {
+ if (keyword == null || keyword.trim().isEmpty()) {
+ return getAllTags();
+ }
+ return tagMapper.findByKeyword(keyword.trim());
+ }
+
+ /**
+ * 获取标签详情
+ */
+ @Transactional(readOnly = true)
+ public Tag getTag(String tagId) {
+ Tag tag = tagMapper.findById(tagId);
+ if (tag == null) {
+ throw new IllegalArgumentException("Tag not found: " + tagId);
+ }
+ return tag;
+ }
+
+ /**
+ * 根据名称获取标签
+ */
+ @Transactional(readOnly = true)
+ public Tag getTagByName(String name) {
+ Tag tag = tagMapper.findByName(name);
+ if (tag == null) {
+ throw new IllegalArgumentException("Tag not found: " + name);
+ }
+ return tag;
+ }
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/common/enums/DatasetStatusType.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/common/enums/DatasetStatusType.java
new file mode 100644
index 0000000..d9b2ff2
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/common/enums/DatasetStatusType.java
@@ -0,0 +1,41 @@
+package com.datamate.datamanagement.common.enums;
+
+/**
+ * 数据集状态类型
+ * 数据集可以处于以下几种状态:
+ *
草稿(DRAFT):数据集正在创建中,尚未完成。
+ *
活动(ACTIVE):数据集处于活动状态, 可以被查询和使用,也可以被更新和删除。
+ *
处理中(PROCESSING):数据集正在处理中,可能需要一些时间,处理完成后会变成活动状态。
+ *
已归档(ARCHIVED):数据集已被归档,不可以更新文件,可以解锁变成活动状态。
+ *
已发布(PUBLISHED):数据集已被发布,可供外部使用,外部用户可以查询和使用数据集。
+ *
已弃用(DEPRECATED):数据集已被弃用,不建议再使用。
+ *
+ * @author dallas
+ * @since 2025-10-17
+ */
+public enum DatasetStatusType {
+ /**
+ * 草稿状态
+ */
+ DRAFT,
+ /**
+ * 活动状态
+ */
+ ACTIVE,
+ /**
+ * 处理中状态
+ */
+ PROCESSING,
+ /**
+ * 已归档状态
+ */
+ ARCHIVED,
+ /**
+ * 已发布状态
+ */
+ PUBLISHED,
+ /**
+ * 已弃用状态
+ */
+ DEPRECATED
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/common/enums/DatasetType.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/common/enums/DatasetType.java
new file mode 100644
index 0000000..70e895a
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/common/enums/DatasetType.java
@@ -0,0 +1,28 @@
+package com.datamate.datamanagement.common.enums;
+
+import lombok.Getter;
+
+/**
+ * 数据集类型值对象
+ *
+ * @author DataMate
+ * @since 2025-10-15
+ */
+public enum DatasetType {
+ TEXT("text", "文本数据集"),
+ IMAGE("image", "图像数据集"),
+ AUDIO("audio", "音频数据集"),
+ VIDEO("video", "视频数据集"),
+ OTHER("other", "其他数据集");
+
+ @Getter
+ private final String code;
+
+ @Getter
+ private final String description;
+
+ DatasetType(String code, String description) {
+ this.code = code;
+ this.description = description;
+ }
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/contants/DatasetConstant.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/contants/DatasetConstant.java
new file mode 100644
index 0000000..a6fe6e9
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/contants/DatasetConstant.java
@@ -0,0 +1,11 @@
+package com.datamate.datamanagement.domain.contants;
+
+/**
+ * 数据集常量
+ */
+public interface DatasetConstant {
+ /**
+ * 服务ID
+ */
+ String SERVICE_ID = "DATA_MANAGEMENT";
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/model/dataset/Dataset.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/model/dataset/Dataset.java
new file mode 100644
index 0000000..a3d55df
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/model/dataset/Dataset.java
@@ -0,0 +1,146 @@
+package com.datamate.datamanagement.domain.model.dataset;
+
+import com.baomidou.mybatisplus.annotation.TableField;
+import com.baomidou.mybatisplus.annotation.TableName;
+import com.baomidou.mybatisplus.extension.handlers.JacksonTypeHandler;
+import com.datamate.common.domain.model.base.BaseEntity;
+import com.datamate.datamanagement.common.enums.DatasetStatusType;
+import com.datamate.datamanagement.common.enums.DatasetType;
+import lombok.Getter;
+import lombok.Setter;
+
+import java.io.File;
+import java.time.LocalDateTime;
+import java.util.*;
+
+/**
+ * 数据集实体(与数据库表 t_dm_datasets 对齐)
+ */
+@Getter
+@Setter
+@TableName(value = "t_dm_datasets", autoResultMap = true)
+public class Dataset extends BaseEntity {
+ /**
+ * 数据集名称
+ */
+ private String name;
+ /**
+ * 数据集描述
+ */
+ private String description;
+ /**
+ * 数据集类型
+ */
+ private DatasetType datasetType;
+ /**
+ * 数据集分类
+ */
+ private String category;
+ /**
+ * 数据集路径
+ */
+ private String path;
+ /**
+ * 数据集格式
+ */
+ private String format;
+ /**
+ * 数据集模式信息,JSON格式, 用于解析当前数据集的文件结构
+ */
+ private String schemaInfo;
+ /**
+ * 数据集大小(字节)
+ */
+ private Long sizeBytes = 0L;
+ /**
+ * 文件数量
+ */
+ private Long fileCount = 0L;
+ /**
+ * 记录数量
+ */
+ private Long recordCount = 0L;
+ /**
+ * 数据集保留天数
+ */
+ private Integer retentionDays = 0;
+ /**
+ * 标签列表, JSON格式
+ */
+ @TableField(typeHandler = JacksonTypeHandler.class)
+ private Collection tags = new HashSet<>();
+ /**
+ * 额外元数据,JSON格式
+ */
+ private String metadata;
+ /**
+ * 数据集状态
+ */
+ private DatasetStatusType status;
+ /**
+ * 是否为公共数据集
+ */
+ private Boolean isPublic = false;
+ /**
+ * 是否为精选数据集
+ */
+ private Boolean isFeatured = false;
+ /**
+ * 数据集版本号
+ */
+ private Long version = 0L;
+
+ @TableField(exist = false)
+ private List files = new ArrayList<>();
+
+ public Dataset() {
+ }
+
+ public Dataset(String name, String description, DatasetType datasetType, String category, String path,
+ String format, DatasetStatusType status, String createdBy) {
+ this.name = name;
+ this.description = description;
+ this.datasetType = datasetType;
+ this.category = category;
+ this.path = path;
+ this.format = format;
+ this.status = status;
+ this.createdBy = createdBy;
+ this.createdAt = LocalDateTime.now();
+ this.updatedAt = LocalDateTime.now();
+ }
+
+ public void initCreateParam(String datasetBasePath) {
+ this.id = UUID.randomUUID().toString();
+ this.path = datasetBasePath + File.separator + this.id;
+ this.status = DatasetStatusType.DRAFT;
+ }
+
+ public void updateBasicInfo(String name, String description, String category) {
+ if (name != null && !name.isEmpty()) this.name = name;
+ if (description != null) this.description = description;
+ if (category != null) this.category = category;
+ this.updatedAt = LocalDateTime.now();
+ }
+
+ public void updateStatus(DatasetStatusType status, String updatedBy) {
+ this.status = status;
+ this.updatedBy = updatedBy;
+ this.updatedAt = LocalDateTime.now();
+ }
+
+ public void addFile(DatasetFile file) {
+ this.files.add(file);
+ this.fileCount = this.fileCount + 1;
+ this.sizeBytes = this.sizeBytes + (file.getFileSize() != null ? file.getFileSize() : 0L);
+ this.updatedAt = LocalDateTime.now();
+ }
+
+ public void removeFile(DatasetFile file) {
+ if (this.files.remove(file)) {
+ this.fileCount = Math.max(0, this.fileCount - 1);
+ this.sizeBytes = Math.max(0, this.sizeBytes - (file.getFileSize() != null ? file.getFileSize() : 0L));
+ this.updatedAt = LocalDateTime.now();
+ }
+ }
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/model/dataset/DatasetFile.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/model/dataset/DatasetFile.java
new file mode 100644
index 0000000..85fb60b
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/model/dataset/DatasetFile.java
@@ -0,0 +1,35 @@
+package com.datamate.datamanagement.domain.model.dataset;
+
+import com.baomidou.mybatisplus.annotation.TableId;
+import com.baomidou.mybatisplus.annotation.TableName;
+import lombok.*;
+
+import java.time.LocalDateTime;
+import java.util.List;
+
+/**
+ * 数据集文件实体(与数据库表 t_dm_dataset_files 对齐)
+ */
+@Getter
+@Setter
+@Builder
+@NoArgsConstructor
+@AllArgsConstructor
+@TableName("t_dm_dataset_files")
+public class DatasetFile {
+ @TableId
+ private String id; // UUID
+ private String datasetId; // UUID
+ private String fileName;
+ private String filePath;
+ private String fileType; // JPG/PNG/DCM/TXT
+ private Long fileSize; // bytes
+ private String checkSum;
+ private List tags;
+ private String metadata;
+ private String status; // UPLOADED, PROCESSING, COMPLETED, ERROR
+ private LocalDateTime uploadTime;
+ private LocalDateTime lastAccessTime;
+ private LocalDateTime createdAt;
+ private LocalDateTime updatedAt;
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/model/dataset/DatasetFileUploadCheckInfo.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/model/dataset/DatasetFileUploadCheckInfo.java
new file mode 100644
index 0000000..3c1917d
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/model/dataset/DatasetFileUploadCheckInfo.java
@@ -0,0 +1,18 @@
+package com.datamate.datamanagement.domain.model.dataset;
+
+import com.datamate.common.domain.model.UploadCheckInfo;
+import lombok.Getter;
+import lombok.Setter;
+
+/**
+ * 数据集文件上传检查信息
+ */
+@Getter
+@Setter
+public class DatasetFileUploadCheckInfo extends UploadCheckInfo {
+ /** 数据集id */
+ private String datasetId;
+
+ /** 是否为压缩包上传 */
+ private boolean hasArchive;
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/model/dataset/StatusConstants.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/model/dataset/StatusConstants.java
new file mode 100644
index 0000000..05d232d
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/model/dataset/StatusConstants.java
@@ -0,0 +1,33 @@
+package com.datamate.datamanagement.domain.model.dataset;
+
+/**
+ * 状态常量类 - 统一管理所有状态枚举值
+ */
+public final class StatusConstants {
+
+ /**
+ * 数据集状态
+ */
+ public static final class DatasetStatuses {
+ public static final String DRAFT = "DRAFT";
+ public static final String ACTIVE = "ACTIVE";
+ public static final String ARCHIVED = "ARCHIVED";
+ public static final String PROCESSING = "PROCESSING";
+
+ private DatasetStatuses() {}
+ }
+
+ /**
+ * 数据集文件状态
+ */
+ public static final class DatasetFileStatuses {
+ public static final String UPLOADED = "UPLOADED";
+ public static final String PROCESSING = "PROCESSING";
+ public static final String COMPLETED = "COMPLETED";
+ public static final String ERROR = "ERROR";
+
+ private DatasetFileStatuses() {}
+ }
+
+ private StatusConstants() {}
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/model/dataset/Tag.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/model/dataset/Tag.java
new file mode 100644
index 0000000..a37b5f7
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/model/dataset/Tag.java
@@ -0,0 +1,33 @@
+package com.datamate.datamanagement.domain.model.dataset;
+
+import com.datamate.common.domain.model.base.BaseEntity;
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+import lombok.Setter;
+
+/**
+ * 标签实体(与数据库表 t_dm_tags 对齐)
+ */
+@Getter
+@Setter
+@NoArgsConstructor
+@AllArgsConstructor
+public class Tag extends BaseEntity {
+ private String name;
+ private String description;
+ private String category;
+ private String color;
+ private Long usageCount = 0L;
+
+ public Tag(String name, String description, String category, String color) {
+ this.name = name;
+ this.description = description;
+ this.category = category;
+ this.color = color;
+ }
+
+ public void decrementUsage() {
+ if (this.usageCount != null && this.usageCount > 0) this.usageCount--;
+ }
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/client/CollectionTaskClient.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/client/CollectionTaskClient.java
new file mode 100644
index 0000000..5deaa6b
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/client/CollectionTaskClient.java
@@ -0,0 +1,22 @@
+package com.datamate.datamanagement.infrastructure.client;
+
+import com.datamate.common.infrastructure.common.Response;
+import com.datamate.datamanagement.infrastructure.client.dto.CollectionTaskDetailResponse;
+import org.springframework.cloud.openfeign.FeignClient;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+
+/**
+ * 数据归集服务 Feign Client
+ */
+@FeignClient(name = "collection-service", url = "${collection.service.url:http://localhost:8080}")
+public interface CollectionTaskClient {
+
+ /**
+ * 获取归集任务详情
+ * @param taskId 任务ID
+ * @return 任务详情
+ */
+ @GetMapping("/api/data-collection/tasks/{id}")
+ Response getTaskDetail(@PathVariable("id") String taskId);
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/client/dto/CollectionTaskDetailResponse.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/client/dto/CollectionTaskDetailResponse.java
new file mode 100644
index 0000000..5e38d8e
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/client/dto/CollectionTaskDetailResponse.java
@@ -0,0 +1,23 @@
+package com.datamate.datamanagement.infrastructure.client.dto;
+
+import lombok.Data;
+
+import java.time.LocalDateTime;
+import java.util.Map;
+
+/**
+ * 归集任务详情响应
+ */
+@Data
+public class CollectionTaskDetailResponse {
+ private String id;
+ private String name;
+ private String description;
+ private Map config;
+ private String status;
+ private String syncMode;
+ private String scheduleExpression;
+ private String lastExecutionId;
+ private LocalDateTime createdAt;
+ private LocalDateTime updatedAt;
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/client/dto/LocalCollectionConfig.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/client/dto/LocalCollectionConfig.java
new file mode 100644
index 0000000..fff0491
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/client/dto/LocalCollectionConfig.java
@@ -0,0 +1,21 @@
+package com.datamate.datamanagement.infrastructure.client.dto;
+
+import lombok.Data;
+
+import java.util.List;
+
+/**
+ * 本地归集任务配置
+ */
+@Data
+public class LocalCollectionConfig {
+ /**
+ * 归集类型
+ */
+ private String type;
+
+ /**
+ * 文件路径列表
+ */
+ private List filePaths;
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/config/DataManagementConfig.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/config/DataManagementConfig.java
new file mode 100644
index 0000000..623c334
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/config/DataManagementConfig.java
@@ -0,0 +1,37 @@
+package com.datamate.datamanagement.infrastructure.config;
+
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
+import org.springframework.cache.CacheManager;
+import org.springframework.cache.annotation.EnableCaching;
+import org.springframework.cache.concurrent.ConcurrentMapCacheManager;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.transaction.annotation.EnableTransactionManagement;
+import org.springframework.web.multipart.support.StandardServletMultipartResolver;
+
+/**
+ * 数据管理服务配置
+ */
+@Configuration
+@EnableTransactionManagement
+@EnableCaching
+@EnableConfigurationProperties(DataManagementProperties.class)
+public class DataManagementConfig {
+
+ /**
+ * 缓存管理器
+ */
+ @Bean
+ public CacheManager cacheManager() {
+ return new ConcurrentMapCacheManager("datasets", "datasetFiles", "tags");
+ }
+
+ /**
+ * 文件上传解析器
+ */
+ @Bean
+ public StandardServletMultipartResolver multipartResolver() {
+ StandardServletMultipartResolver resolver = new StandardServletMultipartResolver();
+ return resolver;
+ }
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/config/DataManagementProperties.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/config/DataManagementProperties.java
new file mode 100644
index 0000000..6a91a1d
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/config/DataManagementProperties.java
@@ -0,0 +1,82 @@
+package com.datamate.datamanagement.infrastructure.config;
+
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.context.annotation.Configuration;
+
+/**
+ * 数据管理服务配置属性
+ */
+@Configuration
+@ConfigurationProperties(prefix = "datamanagement")
+public class DataManagementProperties {
+
+ private FileStorage fileStorage = new FileStorage();
+ private Cache cache = new Cache();
+
+ public FileStorage getFileStorage() {
+ return fileStorage;
+ }
+
+ public void setFileStorage(FileStorage fileStorage) {
+ this.fileStorage = fileStorage;
+ }
+
+ public Cache getCache() {
+ return cache;
+ }
+
+ public void setCache(Cache cache) {
+ this.cache = cache;
+ }
+
+ public static class FileStorage {
+ private String uploadDir = "./uploads";
+ private long maxFileSize = 10485760; // 10MB
+ private long maxRequestSize = 52428800; // 50MB
+
+ public String getUploadDir() {
+ return uploadDir;
+ }
+
+ public void setUploadDir(String uploadDir) {
+ this.uploadDir = uploadDir;
+ }
+
+ public long getMaxFileSize() {
+ return maxFileSize;
+ }
+
+ public void setMaxFileSize(long maxFileSize) {
+ this.maxFileSize = maxFileSize;
+ }
+
+ public long getMaxRequestSize() {
+ return maxRequestSize;
+ }
+
+ public void setMaxRequestSize(long maxRequestSize) {
+ this.maxRequestSize = maxRequestSize;
+ }
+ }
+
+ public static class Cache {
+ private int ttl = 3600; // 1 hour
+ private int maxSize = 1000;
+
+ public int getTtl() {
+ return ttl;
+ }
+
+ public void setTtl(int ttl) {
+ this.ttl = ttl;
+ }
+
+ public int getMaxSize() {
+ return maxSize;
+ }
+
+ public void setMaxSize(int maxSize) {
+ this.maxSize = maxSize;
+ }
+ }
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/exception/DataManagementErrorCode.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/exception/DataManagementErrorCode.java
new file mode 100644
index 0000000..3be421c
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/exception/DataManagementErrorCode.java
@@ -0,0 +1,39 @@
+package com.datamate.datamanagement.infrastructure.exception;
+
+import com.datamate.common.infrastructure.exception.ErrorCode;
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+
+/**
+ * 数据管理模块错误码
+ *
+ * @author dallas
+ * @since 2025-10-20
+ */
+@Getter
+@AllArgsConstructor
+public enum DataManagementErrorCode implements ErrorCode {
+ /**
+ * 数据集不存在
+ */
+ DATASET_NOT_FOUND("data_management.0001", "数据集不存在"),
+ /**
+ * 数据集已存在
+ */
+ DATASET_ALREADY_EXISTS("data_management.0002", "数据集已存在"),
+ /**
+ * 数据集状态错误
+ */
+ DATASET_STATUS_ERROR("data_management.0003", "数据集状态错误"),
+ /**
+ * 数据集标签不存在
+ */
+ DATASET_TAG_NOT_FOUND("data_management.0004", "数据集标签不存在"),
+ /**
+ * 数据集标签已存在
+ */
+ DATASET_TAG_ALREADY_EXISTS("data_management.0005", "数据集标签已存在");
+
+ private final String code;
+ private final String message;
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/mapper/DatasetFileMapper.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/mapper/DatasetFileMapper.java
new file mode 100644
index 0000000..6b0429c
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/mapper/DatasetFileMapper.java
@@ -0,0 +1,30 @@
+package com.datamate.datamanagement.infrastructure.persistence.mapper;
+
+import com.baomidou.mybatisplus.core.mapper.BaseMapper;
+import com.datamate.datamanagement.domain.model.dataset.DatasetFile;
+import org.apache.ibatis.annotations.Mapper;
+import org.apache.ibatis.annotations.Param;
+import org.apache.ibatis.session.RowBounds;
+
+import java.util.List;
+
+@Mapper
+public interface DatasetFileMapper extends BaseMapper {
+ DatasetFile findById(@Param("id") String id);
+ List findByDatasetId(@Param("datasetId") String datasetId, RowBounds rowBounds);
+ List findByDatasetIdAndStatus(@Param("datasetId") String datasetId, @Param("status") String status, RowBounds rowBounds);
+ List findByDatasetIdAndFileType(@Param("datasetId") String datasetId, @Param("fileType") String fileType, RowBounds rowBounds);
+ Long countByDatasetId(@Param("datasetId") String datasetId);
+ Long countCompletedByDatasetId(@Param("datasetId") String datasetId);
+ Long sumSizeByDatasetId(@Param("datasetId") String datasetId);
+ DatasetFile findByDatasetIdAndFileName(@Param("datasetId") String datasetId, @Param("fileName") String fileName);
+ List findAllByDatasetId(@Param("datasetId") String datasetId);
+ List findByCriteria(@Param("datasetId") String datasetId,
+ @Param("fileType") String fileType,
+ @Param("status") String status,
+ RowBounds rowBounds);
+
+ int insert(DatasetFile file);
+ int update(DatasetFile file);
+ int deleteById(@Param("id") String id);
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/mapper/DatasetMapper.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/mapper/DatasetMapper.java
new file mode 100644
index 0000000..4450511
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/mapper/DatasetMapper.java
@@ -0,0 +1,33 @@
+package com.datamate.datamanagement.infrastructure.persistence.mapper;
+
+import com.baomidou.mybatisplus.core.mapper.BaseMapper;
+import com.datamate.datamanagement.domain.model.dataset.Dataset;
+import com.datamate.datamanagement.interfaces.dto.AllDatasetStatisticsResponse;
+import org.apache.ibatis.annotations.Mapper;
+import org.apache.ibatis.annotations.Param;
+import org.apache.ibatis.session.RowBounds;
+
+import java.util.List;
+
+@Mapper
+public interface DatasetMapper extends BaseMapper {
+ Dataset findById(@Param("id") String id);
+ Dataset findByName(@Param("name") String name);
+ List findByStatus(@Param("status") String status);
+ List findByCreatedBy(@Param("createdBy") String createdBy, RowBounds rowBounds);
+ List findByTypeCode(@Param("typeCode") String typeCode, RowBounds rowBounds);
+ List findByTagNames(@Param("tagNames") List tagNames, RowBounds rowBounds);
+ List findByKeyword(@Param("keyword") String keyword, RowBounds rowBounds);
+ List findByCriteria(@Param("typeCode") String typeCode,
+ @Param("status") String status,
+ @Param("keyword") String keyword,
+ @Param("tagNames") List tagNames,
+ RowBounds rowBounds);
+ long countByCriteria(@Param("typeCode") String typeCode,
+ @Param("status") String status,
+ @Param("keyword") String keyword,
+ @Param("tagNames") List tagNames);
+
+ int deleteById(@Param("id") String id);
+ AllDatasetStatisticsResponse getAllDatasetStatistics();
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/mapper/TagMapper.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/mapper/TagMapper.java
new file mode 100644
index 0000000..84c1bb2
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/mapper/TagMapper.java
@@ -0,0 +1,27 @@
+package com.datamate.datamanagement.infrastructure.persistence.mapper;
+
+import com.datamate.datamanagement.domain.model.dataset.Tag;
+import org.apache.ibatis.annotations.Mapper;
+import org.apache.ibatis.annotations.Param;
+
+import java.util.List;
+
+@Mapper
+public interface TagMapper {
+ Tag findById(@Param("id") String id);
+ Tag findByName(@Param("name") String name);
+ List findByNameIn(@Param("list") List names);
+ List findByIdIn(@Param("ids") List ids);
+ List findByKeyword(@Param("keyword") String keyword);
+ List findAllByOrderByUsageCountDesc();
+
+ int insert(Tag tag);
+ int update(Tag tag);
+ int updateUsageCount(@Param("id") String id, @Param("usageCount") Long usageCount);
+
+ // Relations with dataset
+ int insertDatasetTag(@Param("datasetId") String datasetId, @Param("tagId") String tagId);
+ int deleteDatasetTagsByDatasetId(@Param("datasetId") String datasetId);
+ List findByDatasetId(@Param("datasetId") String datasetId);
+ void deleteTagsById(@Param("ids") List ids);
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/repository/DatasetFileRepository.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/repository/DatasetFileRepository.java
new file mode 100644
index 0000000..de9880f
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/repository/DatasetFileRepository.java
@@ -0,0 +1,27 @@
+package com.datamate.datamanagement.infrastructure.persistence.repository;
+
+import com.baomidou.mybatisplus.extension.repository.IRepository;
+import com.datamate.datamanagement.domain.model.dataset.DatasetFile;
+import org.apache.ibatis.session.RowBounds;
+
+import java.util.List;
+
+/**
+ * 数据集文件仓储接口
+ *
+ * @author dallas
+ * @since 2025-10-15
+ */
+public interface DatasetFileRepository extends IRepository {
+ Long countByDatasetId(String datasetId);
+
+ Long countCompletedByDatasetId(String datasetId);
+
+ Long sumSizeByDatasetId(String datasetId);
+
+ List findAllByDatasetId(String datasetId);
+
+ DatasetFile findByDatasetIdAndFileName(String datasetId, String fileName);
+
+ List findByCriteria(String datasetId, String fileType, String status, RowBounds bounds);
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/repository/DatasetRepository.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/repository/DatasetRepository.java
new file mode 100644
index 0000000..b257161
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/repository/DatasetRepository.java
@@ -0,0 +1,29 @@
+package com.datamate.datamanagement.infrastructure.persistence.repository;
+
+import com.baomidou.mybatisplus.core.metadata.IPage;
+import com.baomidou.mybatisplus.extension.repository.IRepository;
+import com.datamate.datamanagement.domain.model.dataset.Dataset;
+import com.datamate.datamanagement.interfaces.dto.AllDatasetStatisticsResponse;
+import com.datamate.datamanagement.interfaces.dto.DatasetPagingQuery;
+import org.apache.ibatis.session.RowBounds;
+
+import java.util.List;
+
+
+/**
+ * 数据集仓储层
+ *
+ * @author dallas
+ * @since 2025-10-15
+ */
+public interface DatasetRepository extends IRepository {
+ Dataset findByName(String name);
+
+ List findByCriteria(String type, String status, String keyword, List tagList, RowBounds bounds);
+
+ long countByCriteria(String type, String status, String keyword, List tagList);
+
+ AllDatasetStatisticsResponse getAllDatasetStatistics();
+
+ IPage findByCriteria(IPage page, DatasetPagingQuery query);
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/repository/impl/DatasetFileRepositoryImpl.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/repository/impl/DatasetFileRepositoryImpl.java
new file mode 100644
index 0000000..277e576
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/repository/impl/DatasetFileRepositoryImpl.java
@@ -0,0 +1,54 @@
+package com.datamate.datamanagement.infrastructure.persistence.repository.impl;
+
+import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
+import com.baomidou.mybatisplus.extension.repository.CrudRepository;
+import com.datamate.datamanagement.domain.model.dataset.DatasetFile;
+import com.datamate.datamanagement.infrastructure.persistence.mapper.DatasetFileMapper;
+import com.datamate.datamanagement.infrastructure.persistence.repository.DatasetFileRepository;
+import lombok.RequiredArgsConstructor;
+import org.apache.ibatis.session.RowBounds;
+import org.springframework.stereotype.Repository;
+
+import java.util.List;
+
+/**
+ * 数据集文件仓储实现类
+ *
+ * @author dallas
+ * @since 2025-10-15
+ */
+@Repository
+@RequiredArgsConstructor
+public class DatasetFileRepositoryImpl extends CrudRepository implements DatasetFileRepository {
+ private final DatasetFileMapper datasetFileMapper;
+
+ @Override
+ public Long countByDatasetId(String datasetId) {
+ return datasetFileMapper.selectCount(new LambdaQueryWrapper().eq(DatasetFile::getDatasetId, datasetId));
+ }
+
+ @Override
+ public Long countCompletedByDatasetId(String datasetId) {
+ return datasetFileMapper.countCompletedByDatasetId(datasetId);
+ }
+
+ @Override
+ public Long sumSizeByDatasetId(String datasetId) {
+ return datasetFileMapper.sumSizeByDatasetId(datasetId);
+ }
+
+ @Override
+ public List findAllByDatasetId(String datasetId) {
+ return datasetFileMapper.findAllByDatasetId(datasetId);
+ }
+
+ @Override
+ public DatasetFile findByDatasetIdAndFileName(String datasetId, String fileName) {
+ return datasetFileMapper.findByDatasetIdAndFileName(datasetId, fileName);
+ }
+
+ @Override
+ public List findByCriteria(String datasetId, String fileType, String status, RowBounds bounds) {
+ return datasetFileMapper.findByCriteria(datasetId, fileType, status, bounds);
+ }
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/repository/impl/DatasetRepositoryImpl.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/repository/impl/DatasetRepositoryImpl.java
new file mode 100644
index 0000000..3fc5458
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/repository/impl/DatasetRepositoryImpl.java
@@ -0,0 +1,73 @@
+package com.datamate.datamanagement.infrastructure.persistence.repository.impl;
+
+import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
+import com.baomidou.mybatisplus.core.metadata.IPage;
+import com.baomidou.mybatisplus.extension.repository.CrudRepository;
+import com.datamate.datamanagement.domain.model.dataset.Dataset;
+import com.datamate.datamanagement.infrastructure.persistence.mapper.DatasetMapper;
+import com.datamate.datamanagement.infrastructure.persistence.repository.DatasetRepository;
+import com.datamate.datamanagement.interfaces.dto.AllDatasetStatisticsResponse;
+import com.datamate.datamanagement.interfaces.dto.DatasetPagingQuery;
+import lombok.RequiredArgsConstructor;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.ibatis.session.RowBounds;
+import org.springframework.stereotype.Repository;
+
+import java.util.List;
+
+/**
+ * 数据集仓储层实现类
+ *
+ * @author dallas
+ * @since 2025-10-15
+ */
+@Repository
+@RequiredArgsConstructor
+public class DatasetRepositoryImpl extends CrudRepository implements DatasetRepository {
+ private final DatasetMapper datasetMapper;
+
+ @Override
+ public Dataset findByName(String name) {
+ return datasetMapper.selectOne(new LambdaQueryWrapper().eq(Dataset::getName, name));
+ }
+
+ @Override
+ public List findByCriteria(String type, String status, String keyword, List tagList,
+ RowBounds bounds) {
+ return datasetMapper.findByCriteria(type, status, keyword, tagList, bounds);
+ }
+
+ @Override
+ public long countByCriteria(String type, String status, String keyword, List tagList) {
+ return datasetMapper.countByCriteria(type, status, keyword, tagList);
+ }
+
+ @Override
+ public AllDatasetStatisticsResponse getAllDatasetStatistics() {
+ return datasetMapper.getAllDatasetStatistics();
+ }
+
+
+ @Override
+ public IPage findByCriteria(IPage page, DatasetPagingQuery query) {
+ LambdaQueryWrapper wrapper = new LambdaQueryWrapper()
+ .eq(query.getType() != null, Dataset::getDatasetType, query.getType())
+ .eq(query.getStatus() != null, Dataset::getStatus, query.getStatus())
+ .like(StringUtils.isNotBlank(query.getKeyword()), Dataset::getName, query.getKeyword())
+ .like(StringUtils.isNotBlank(query.getKeyword()), Dataset::getDescription, query.getKeyword());
+
+ /*
+ 标签过滤 {@link Tag}
+ */
+ for (String tagName : query.getTags()) {
+ wrapper.and(w ->
+ w.apply("tags IS NOT NULL " +
+ "AND JSON_VALID(tags) = 1 " +
+ "AND JSON_LENGTH(tags) > 0 " +
+ "AND JSON_SEARCH(tags, 'one', {0}, NULL, '$[*].name') IS NOT NULL", tagName)
+ );
+ }
+ wrapper.orderByDesc(Dataset::getCreatedAt);
+ return datasetMapper.selectPage(page, wrapper);
+ }
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/converter/DatasetConverter.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/converter/DatasetConverter.java
new file mode 100644
index 0000000..0247ffe
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/converter/DatasetConverter.java
@@ -0,0 +1,53 @@
+package com.datamate.datamanagement.interfaces.converter;
+
+import com.datamate.datamanagement.interfaces.dto.CreateDatasetRequest;
+import com.datamate.datamanagement.interfaces.dto.DatasetFileResponse;
+import com.datamate.datamanagement.interfaces.dto.DatasetResponse;
+import com.datamate.datamanagement.interfaces.dto.UploadFileRequest;
+import com.datamate.common.domain.model.ChunkUploadRequest;
+import com.datamate.datamanagement.domain.model.dataset.Dataset;
+import com.datamate.datamanagement.domain.model.dataset.DatasetFile;
+import com.datamate.datamanagement.interfaces.dto.*;
+import org.mapstruct.Mapper;
+import org.mapstruct.Mapping;
+import org.mapstruct.factory.Mappers;
+
+import java.util.List;
+
+/**
+ * 数据集文件转换器
+ */
+@Mapper
+public interface DatasetConverter {
+ /** 单例实例 */
+ DatasetConverter INSTANCE = Mappers.getMapper(DatasetConverter.class);
+
+ /**
+ * 将数据集转换为响应
+ */
+ @Mapping(source = "sizeBytes", target = "totalSize")
+ @Mapping(source = "path", target = "targetLocation")
+ DatasetResponse convertToResponse(Dataset dataset);
+
+ /**
+ * 将数据集转换为响应
+ */
+ @Mapping(target = "tags", ignore = true)
+ Dataset convertToDataset(CreateDatasetRequest createDatasetRequest);
+
+ /**
+ * 将上传文件请求转换为分片上传请求
+ */
+ ChunkUploadRequest toChunkUploadRequest(UploadFileRequest uploadFileRequest);
+
+ /**
+ * 将数据集转换为响应
+ */
+ List convertToResponse(List datasets);
+
+ /**
+ *
+ * 将数据集文件转换为响应
+ */
+ DatasetFileResponse convertToResponse(DatasetFile datasetFile);
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/converter/TagConverter.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/converter/TagConverter.java
new file mode 100644
index 0000000..b5a007c
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/converter/TagConverter.java
@@ -0,0 +1,30 @@
+package com.datamate.datamanagement.interfaces.converter;
+
+import com.datamate.datamanagement.domain.model.dataset.Tag;
+import com.datamate.datamanagement.interfaces.dto.TagResponse;
+import com.datamate.datamanagement.interfaces.dto.UpdateTagRequest;
+import org.mapstruct.Mapper;
+import org.mapstruct.factory.Mappers;
+
+/**
+ * 标签转换器
+ */
+@Mapper
+public interface TagConverter {
+ /** 单例实例 */
+ TagConverter INSTANCE = Mappers.getMapper(TagConverter.class);
+
+ /**
+ * 将 UpdateTagRequest 转换为 Tag 实体
+ * @param request 更新标签请求DTO
+ * @return 标签实体
+ */
+ Tag updateRequestToTag(UpdateTagRequest request);
+
+ /**
+ * 将 Tag 实体转换为 TagResponse DTO
+ * @param tag 标签实体
+ * @return 标签响应DTO
+ */
+ TagResponse convertToResponse(Tag tag);
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/AllDatasetStatisticsResponse.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/AllDatasetStatisticsResponse.java
new file mode 100644
index 0000000..7da863f
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/AllDatasetStatisticsResponse.java
@@ -0,0 +1,20 @@
+package com.datamate.datamanagement.interfaces.dto;
+
+import lombok.Getter;
+import lombok.Setter;
+
+/**
+ * 所有数据集统计信息响应DTO
+ */
+@Getter
+@Setter
+public class AllDatasetStatisticsResponse {
+ /** 总数据集数 */
+ private Integer totalDatasets;
+
+ /** 总文件数 */
+ private Long totalSize;
+
+ /** 总大小(字节) */
+ private Long totalFiles;
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/CreateDatasetRequest.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/CreateDatasetRequest.java
new file mode 100644
index 0000000..7e55657
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/CreateDatasetRequest.java
@@ -0,0 +1,35 @@
+package com.datamate.datamanagement.interfaces.dto;
+
+import com.datamate.datamanagement.common.enums.DatasetType;
+import jakarta.validation.constraints.NotBlank;
+import jakarta.validation.constraints.NotNull;
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+import lombok.Setter;
+
+import java.util.List;
+
+/**
+ * 创建数据集请求DTO
+ */
+@Getter
+@Setter
+@NoArgsConstructor
+@AllArgsConstructor
+public class CreateDatasetRequest {
+ /** 数据集名称 */
+ @NotBlank(message = "数据集名称不能为空")
+ private String name;
+ /** 数据集描述 */
+ private String description;
+ /** 数据集类型 */
+ @NotNull(message = "数据集类型不能为空")
+ private DatasetType datasetType;
+ /** 标签列表 */
+ private List tags;
+ /** 数据源 */
+ private String dataSource;
+ /** 目标位置 */
+ private String targetLocation;
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/CreateTagRequest.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/CreateTagRequest.java
new file mode 100644
index 0000000..dca22bb
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/CreateTagRequest.java
@@ -0,0 +1,18 @@
+package com.datamate.datamanagement.interfaces.dto;
+
+import lombok.Getter;
+import lombok.Setter;
+
+/**
+ * 创建标签请求DTO
+ */
+@Getter
+@Setter
+public class CreateTagRequest {
+ /** 标签名称 */
+ private String name;
+ /** 标签颜色 */
+ private String color;
+ /** 标签描述 */
+ private String description;
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/DatasetFileResponse.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/DatasetFileResponse.java
new file mode 100644
index 0000000..ec06ff4
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/DatasetFileResponse.java
@@ -0,0 +1,36 @@
+package com.datamate.datamanagement.interfaces.dto;
+
+import lombok.Getter;
+import lombok.Setter;
+
+import java.time.LocalDateTime;
+
+/**
+ * 数据集文件响应DTO
+ */
+@Getter
+@Setter
+public class DatasetFileResponse {
+ /** 文件ID */
+ private String id;
+ /** 文件名 */
+ private String fileName;
+ /** 原始文件名 */
+ private String originalName;
+ /** 文件类型 */
+ private String fileType;
+ /** 文件大小(字节) */
+ private Long fileSize;
+ /** 文件状态 */
+ private String status;
+ /** 文件描述 */
+ private String description;
+ /** 文件路径 */
+ private String filePath;
+ /** 上传时间 */
+ private LocalDateTime uploadTime;
+ /** 最后更新时间 */
+ private LocalDateTime lastAccessTime;
+ /** 上传者 */
+ private String uploadedBy;
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/DatasetPagingQuery.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/DatasetPagingQuery.java
new file mode 100644
index 0000000..6016f4d
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/DatasetPagingQuery.java
@@ -0,0 +1,42 @@
+package com.datamate.datamanagement.interfaces.dto;
+
+import com.datamate.common.interfaces.PagingQuery;
+import com.datamate.datamanagement.common.enums.DatasetStatusType;
+import com.datamate.datamanagement.common.enums.DatasetType;
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+import lombok.Setter;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * 数据集分页查询请求
+ *
+ */
+@Getter
+@Setter
+@NoArgsConstructor
+@AllArgsConstructor
+public class DatasetPagingQuery extends PagingQuery {
+ /**
+ * 数据集类型过滤
+ */
+ private DatasetType type;
+
+ /**
+ * 标签名过滤
+ */
+ private List tags = new ArrayList<>();
+
+ /**
+ * 关键词搜索(名称或描述)
+ */
+ private String keyword;
+
+ /**
+ * 状态过滤
+ */
+ private DatasetStatusType status;
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/DatasetResponse.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/DatasetResponse.java
new file mode 100644
index 0000000..e7b1779
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/DatasetResponse.java
@@ -0,0 +1,47 @@
+package com.datamate.datamanagement.interfaces.dto;
+
+import lombok.Getter;
+import lombok.Setter;
+
+import java.time.LocalDateTime;
+import java.util.List;
+
+/**
+ * 数据集响应DTO
+ */
+@Getter
+@Setter
+public class DatasetResponse {
+ /** 数据集ID */
+ private String id;
+ /** 数据集名称 */
+ private String name;
+ /** 数据集描述 */
+ private String description;
+ /** 数据集类型 */
+ private String datasetType;
+ /** 数据集状态 */
+ private String status;
+ /** 标签列表 */
+ private List tags;
+ /** 数据源 */
+ private String dataSource;
+ /** 目标位置 */
+ private String targetLocation;
+ /** 文件数量 */
+ private Integer fileCount;
+ /** 总大小(字节) */
+ private Long totalSize;
+ /** 完成率(0-100) */
+ private Float completionRate;
+ /** 创建时间 */
+ private LocalDateTime createdAt;
+ /** 更新时间 */
+ private LocalDateTime updatedAt;
+ /** 创建者 */
+ private String createdBy;
+ /**
+ * 更新者
+ */
+ private String updatedBy;
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/DatasetStatisticsResponse.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/DatasetStatisticsResponse.java
new file mode 100644
index 0000000..6159f80
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/DatasetStatisticsResponse.java
@@ -0,0 +1,26 @@
+package com.datamate.datamanagement.interfaces.dto;
+
+import lombok.Getter;
+import lombok.Setter;
+
+import java.util.Map;
+
+/**
+ * 数据集统计信息响应DTO
+ */
+@Getter
+@Setter
+public class DatasetStatisticsResponse {
+ /** 总文件数 */
+ private Integer totalFiles;
+ /** 已完成文件数 */
+ private Integer completedFiles;
+ /** 总大小(字节) */
+ private Long totalSize;
+ /** 完成率(0-100) */
+ private Float completionRate;
+ /** 文件类型分布 */
+ private Map fileTypeDistribution;
+ /** 状态分布 */
+ private Map statusDistribution;
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/DatasetTypeResponse.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/DatasetTypeResponse.java
new file mode 100644
index 0000000..6f53f7e
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/DatasetTypeResponse.java
@@ -0,0 +1,24 @@
+package com.datamate.datamanagement.interfaces.dto;
+
+import lombok.Getter;
+import lombok.Setter;
+
+import java.util.List;
+
+/**
+ * 数据集类型响应DTO
+ */
+@Getter
+@Setter
+public class DatasetTypeResponse {
+ /** 类型编码 */
+ private String code;
+ /** 类型名称 */
+ private String name;
+ /** 类型描述 */
+ private String description;
+ /** 支持的文件格式 */
+ private List supportedFormats;
+ /** 图标 */
+ private String icon;
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/PagedDatasetFileResponse.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/PagedDatasetFileResponse.java
new file mode 100644
index 0000000..9e8100b
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/PagedDatasetFileResponse.java
@@ -0,0 +1,28 @@
+package com.datamate.datamanagement.interfaces.dto;
+
+import lombok.Getter;
+import lombok.Setter;
+
+import java.util.List;
+
+/**
+ * 数据集文件分页响应DTO
+ */
+@Getter
+@Setter
+public class PagedDatasetFileResponse {
+ /** 文件内容列表 */
+ private List content;
+ /** 当前页码 */
+ private Integer page;
+ /** 每页大小 */
+ private Integer size;
+ /** 总元素数 */
+ private Integer totalElements;
+ /** 总页数 */
+ private Integer totalPages;
+ /** 是否为第一页 */
+ private Boolean first;
+ /** 是否为最后一页 */
+ private Boolean last;
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/PagedDatasetResponse.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/PagedDatasetResponse.java
new file mode 100644
index 0000000..12d6c64
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/PagedDatasetResponse.java
@@ -0,0 +1,28 @@
+package com.datamate.datamanagement.interfaces.dto;
+
+import lombok.Getter;
+import lombok.Setter;
+
+import java.util.List;
+
+/**
+ * 数据集分页响应DTO
+ */
+@Getter
+@Setter
+public class PagedDatasetResponse {
+ /** 数据集内容列表 */
+ private List content;
+ /** 当前页码 */
+ private Integer page;
+ /** 每页大小 */
+ private Integer size;
+ /** 总元素数 */
+ private Integer totalElements;
+ /** 总页数 */
+ private Integer totalPages;
+ /** 是否为第一页 */
+ private Boolean first;
+ /** 是否为最后一页 */
+ private Boolean last;
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/TagResponse.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/TagResponse.java
new file mode 100644
index 0000000..e8294c3
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/TagResponse.java
@@ -0,0 +1,22 @@
+package com.datamate.datamanagement.interfaces.dto;
+
+import lombok.Getter;
+import lombok.Setter;
+
+/**
+ * 标签响应DTO
+ */
+@Getter
+@Setter
+public class TagResponse {
+ /** 标签ID */
+ private String id;
+ /** 标签名称 */
+ private String name;
+ /** 标签颜色 */
+ private String color;
+ /** 标签描述 */
+ private String description;
+ /** 使用次数 */
+ private Integer usageCount;
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/UpdateDatasetRequest.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/UpdateDatasetRequest.java
new file mode 100644
index 0000000..3aea04f
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/UpdateDatasetRequest.java
@@ -0,0 +1,25 @@
+package com.datamate.datamanagement.interfaces.dto;
+
+import com.datamate.datamanagement.common.enums.DatasetStatusType;
+import lombok.Getter;
+import lombok.Setter;
+
+import java.util.List;
+
+/**
+ * 更新数据集请求DTO
+ */
+@Getter
+@Setter
+public class UpdateDatasetRequest {
+ /** 数据集名称 */
+ private String name;
+ /** 数据集描述 */
+ private String description;
+ /** 归集任务id */
+ private String dataSource;
+ /** 标签列表 */
+ private List tags;
+ /** 数据集状态 */
+ private DatasetStatusType status;
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/UpdateTagRequest.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/UpdateTagRequest.java
new file mode 100644
index 0000000..1fb6d13
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/UpdateTagRequest.java
@@ -0,0 +1,20 @@
+package com.datamate.datamanagement.interfaces.dto;
+
+import lombok.Getter;
+import lombok.Setter;
+
+/**
+ * 更新标签请求DTO
+ */
+@Getter
+@Setter
+public class UpdateTagRequest {
+ /** 标签 ID */
+ private String id;
+ /** 标签名称 */
+ private String name;
+ /** 标签颜色 */
+ private String color;
+ /** 标签描述 */
+ private String description;
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/UploadFileRequest.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/UploadFileRequest.java
new file mode 100644
index 0000000..e8c2b69
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/UploadFileRequest.java
@@ -0,0 +1,34 @@
+package com.datamate.datamanagement.interfaces.dto;
+
+import lombok.Getter;
+import lombok.Setter;
+import org.springframework.web.multipart.MultipartFile;
+
+/**
+ * 上传文件请求
+ * 用于分块上传文件时的请求参数封装,支持大文件分片上传功能
+ */
+@Getter
+@Setter
+public class UploadFileRequest {
+ /** 预上传返回的id,用来确认同一个任务 */
+ private String reqId;
+
+ /** 文件编号,用于标识批量上传中的第几个文件 */
+ private int fileNo;
+
+ /** 文件名称 */
+ private String fileName;
+
+ /** 文件总分块数量 */
+ private int totalChunkNum;
+
+ /** 当前分块编号,从1开始 */
+ private int chunkNo;
+
+ /** 上传的文件分块内容 */
+ private MultipartFile file;
+
+ /** 文件分块的校验和(十六进制字符串),用于验证文件完整性 */
+ private String checkSumHex;
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/UploadFilesPreRequest.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/UploadFilesPreRequest.java
new file mode 100644
index 0000000..1bfcc12
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/UploadFilesPreRequest.java
@@ -0,0 +1,22 @@
+package com.datamate.datamanagement.interfaces.dto;
+
+import jakarta.validation.constraints.Min;
+import lombok.Getter;
+import lombok.Setter;
+
+/**
+ * 切片上传预上传请求
+ */
+@Getter
+@Setter
+public class UploadFilesPreRequest {
+ /** 是否为压缩包上传 */
+ private boolean hasArchive;
+
+ /** 总文件数量 */
+ @Min(1)
+ private int totalFileNum;
+
+ /** 总文件大小 */
+ private long totalSize;
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/rest/DatasetController.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/rest/DatasetController.java
new file mode 100644
index 0000000..173a4bb
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/rest/DatasetController.java
@@ -0,0 +1,115 @@
+package com.datamate.datamanagement.interfaces.rest;
+
+import com.datamate.datamanagement.interfaces.dto.*;
+import com.datamate.common.infrastructure.common.Response;
+import com.datamate.common.infrastructure.exception.SystemErrorCode;
+import com.datamate.common.interfaces.PagedResponse;
+import com.datamate.datamanagement.application.DatasetApplicationService;
+import com.datamate.datamanagement.domain.model.dataset.Dataset;
+import com.datamate.datamanagement.interfaces.converter.DatasetConverter;
+import com.datamate.datamanagement.interfaces.dto.*;
+import jakarta.validation.Valid;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.bind.annotation.*;
+
+import java.util.Map;
+
+/**
+ * 数据集 REST 控制器
+ */
+@Slf4j
+@RestController
+@RequiredArgsConstructor
+@RequestMapping("/data-management/datasets")
+public class DatasetController {
+ private final DatasetApplicationService datasetApplicationService;
+
+ /**
+ * 获取数据集列表
+ *
+ * @param query 分页查询参数
+ * @return 分页的数据集列表
+ */
+ @GetMapping
+ public PagedResponse getDatasets(DatasetPagingQuery query) {
+ return datasetApplicationService.getDatasets(query);
+ }
+
+ /**
+ * 创建数据集
+ *
+ * @param createDatasetRequest 创建数据集请求参数
+ * @return 创建的数据集响应
+ */
+ @PostMapping
+ public DatasetResponse createDataset(@RequestBody @Valid CreateDatasetRequest createDatasetRequest) {
+ Dataset dataset = datasetApplicationService.createDataset(createDatasetRequest);
+ return DatasetConverter.INSTANCE.convertToResponse(dataset);
+ }
+
+ /**
+ * 根据ID获取数据集详情
+ *
+ * @param datasetId 数据集ID
+ * @return 数据集响应
+ */
+ @GetMapping("/{datasetId}")
+ public DatasetResponse getDatasetById(@PathVariable("datasetId") String datasetId) {
+ Dataset dataset = datasetApplicationService.getDataset(datasetId);
+ return DatasetConverter.INSTANCE.convertToResponse(dataset);
+ }
+
+ /**
+ * 根据ID更新数据集
+ *
+ * @param datasetId 数据集ID
+ * @param updateDatasetRequest 更新数据集请求参数
+ * @return 更新后的数据集响应
+ */
+ @PutMapping("/{datasetId}")
+ public DatasetResponse updateDataset(@PathVariable("datasetId") String datasetId,
+ @RequestBody UpdateDatasetRequest updateDatasetRequest) {
+ Dataset dataset = datasetApplicationService.updateDataset(datasetId, updateDatasetRequest);
+ return DatasetConverter.INSTANCE.convertToResponse(dataset);
+ }
+
+ /**
+ * 根据ID删除数据集
+ *
+ * @param datasetId 数据集ID
+ */
+ @DeleteMapping("/{datasetId}")
+ public void deleteDataset(@PathVariable("datasetId") String datasetId) {
+ datasetApplicationService.deleteDataset(datasetId);
+ }
+
+ @GetMapping("/{datasetId}/statistics")
+ public ResponseEntity> getDatasetStatistics(
+ @PathVariable("datasetId") String datasetId) {
+ try {
+ Map stats = datasetApplicationService.getDatasetStatistics(datasetId);
+
+ DatasetStatisticsResponse response = new DatasetStatisticsResponse();
+ response.setTotalFiles((Integer) stats.get("totalFiles"));
+ response.setCompletedFiles((Integer) stats.get("completedFiles"));
+ response.setTotalSize((Long) stats.get("totalSize"));
+ response.setCompletionRate((Float) stats.get("completionRate"));
+ response.setFileTypeDistribution((Map) stats.get("fileTypeDistribution"));
+ response.setStatusDistribution((Map) stats.get("statusDistribution"));
+
+ return ResponseEntity.ok(Response.ok(response));
+ } catch (IllegalArgumentException e) {
+ return ResponseEntity.status(HttpStatus.NOT_FOUND).body(Response.error(SystemErrorCode.UNKNOWN_ERROR, null));
+ } catch (Exception e) {
+ return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(Response.error(SystemErrorCode.UNKNOWN_ERROR, null));
+ }
+ }
+
+ @GetMapping("/statistics")
+ public ResponseEntity> getAllStatistics() {
+ return ResponseEntity.ok(Response.ok(datasetApplicationService.getAllDatasetStatistics()));
+ }
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/rest/DatasetFileController.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/rest/DatasetFileController.java
new file mode 100644
index 0000000..24fe4c3
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/rest/DatasetFileController.java
@@ -0,0 +1,163 @@
+package com.datamate.datamanagement.interfaces.rest;
+
+import com.datamate.common.infrastructure.common.IgnoreResponseWrap;
+import com.datamate.common.infrastructure.common.Response;
+import com.datamate.common.infrastructure.exception.SystemErrorCode;
+import com.datamate.datamanagement.application.DatasetFileApplicationService;
+import com.datamate.datamanagement.domain.model.dataset.DatasetFile;
+import com.datamate.datamanagement.interfaces.converter.DatasetConverter;
+import com.datamate.datamanagement.interfaces.dto.DatasetFileResponse;
+import com.datamate.datamanagement.interfaces.dto.PagedDatasetFileResponse;
+import com.datamate.datamanagement.interfaces.dto.UploadFileRequest;
+import com.datamate.datamanagement.interfaces.dto.UploadFilesPreRequest;
+import jakarta.servlet.http.HttpServletResponse;
+import jakarta.validation.Valid;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.core.io.Resource;
+import org.springframework.data.domain.Page;
+import org.springframework.data.domain.PageRequest;
+import org.springframework.data.domain.Pageable;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.MediaType;
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.bind.annotation.*;
+import org.springframework.web.multipart.MultipartFile;
+
+import java.util.stream.Collectors;
+
+/**
+ * 数据集文件 REST 控制器(UUID 模式)
+ */
+@Slf4j
+@RestController
+@RequestMapping("/data-management/datasets/{datasetId}/files")
+public class DatasetFileController {
+
+ private final DatasetFileApplicationService datasetFileApplicationService;
+
+ @Autowired
+ public DatasetFileController(DatasetFileApplicationService datasetFileApplicationService) {
+ this.datasetFileApplicationService = datasetFileApplicationService;
+ }
+
+ @GetMapping
+ public ResponseEntity> getDatasetFiles(
+ @PathVariable("datasetId") String datasetId,
+ @RequestParam(value = "page", required = false, defaultValue = "0") Integer page,
+ @RequestParam(value = "size", required = false, defaultValue = "20") Integer size,
+ @RequestParam(value = "fileType", required = false) String fileType,
+ @RequestParam(value = "status", required = false) String status) {
+ Pageable pageable = PageRequest.of(page != null ? page : 0, size != null ? size : 20);
+
+ Page filesPage = datasetFileApplicationService.getDatasetFiles(
+ datasetId, fileType, status, pageable);
+
+ PagedDatasetFileResponse response = new PagedDatasetFileResponse();
+ response.setContent(filesPage.getContent().stream()
+ .map(DatasetConverter.INSTANCE::convertToResponse)
+ .collect(Collectors.toList()));
+ response.setPage(filesPage.getNumber());
+ response.setSize(filesPage.getSize());
+ response.setTotalElements((int) filesPage.getTotalElements());
+ response.setTotalPages(filesPage.getTotalPages());
+ response.setFirst(filesPage.isFirst());
+ response.setLast(filesPage.isLast());
+
+ return ResponseEntity.ok(Response.ok(response));
+ }
+
+ @PostMapping(consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
+ public ResponseEntity> uploadDatasetFile(
+ @PathVariable("datasetId") String datasetId,
+ @RequestPart(value = "file", required = false) MultipartFile file) {
+ try {
+ DatasetFile datasetFile = datasetFileApplicationService.uploadFile(datasetId, file);
+
+ return ResponseEntity.status(HttpStatus.CREATED).body(Response.ok(DatasetConverter.INSTANCE.convertToResponse(datasetFile)));
+ } catch (IllegalArgumentException e) {
+ return ResponseEntity.badRequest().body(Response.error(SystemErrorCode.UNKNOWN_ERROR, null));
+ } catch (Exception e) {
+ log.error("upload fail", e);
+ return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(Response.error(SystemErrorCode.UNKNOWN_ERROR, null));
+ }
+ }
+
+ @GetMapping("/{fileId}")
+ public ResponseEntity> getDatasetFileById(
+ @PathVariable("datasetId") String datasetId,
+ @PathVariable("fileId") String fileId) {
+ try {
+ DatasetFile datasetFile = datasetFileApplicationService.getDatasetFile(datasetId, fileId);
+ return ResponseEntity.ok(Response.ok(DatasetConverter.INSTANCE.convertToResponse(datasetFile)));
+ } catch (IllegalArgumentException e) {
+ return ResponseEntity.status(HttpStatus.NOT_FOUND).body(Response.error(SystemErrorCode.UNKNOWN_ERROR, null));
+ }
+ }
+
+ @DeleteMapping("/{fileId}")
+ public ResponseEntity> deleteDatasetFile(
+ @PathVariable("datasetId") String datasetId,
+ @PathVariable("fileId") String fileId) {
+ try {
+ datasetFileApplicationService.deleteDatasetFile(datasetId, fileId);
+ return ResponseEntity.ok().build();
+ } catch (IllegalArgumentException e) {
+ return ResponseEntity.status(HttpStatus.NOT_FOUND).body(Response.error(SystemErrorCode.UNKNOWN_ERROR, null));
+ }
+ }
+
+ @IgnoreResponseWrap
+ @GetMapping(value = "/{fileId}/download", produces = MediaType.APPLICATION_OCTET_STREAM_VALUE)
+ public ResponseEntity downloadDatasetFileById(
+ @PathVariable("datasetId") String datasetId,
+ @PathVariable("fileId") String fileId) {
+ try {
+ DatasetFile datasetFile = datasetFileApplicationService.getDatasetFile(datasetId, fileId);
+ Resource resource = datasetFileApplicationService.downloadFile(datasetId, fileId);
+
+ return ResponseEntity.ok()
+ .contentType(MediaType.APPLICATION_OCTET_STREAM)
+ .header(HttpHeaders.CONTENT_DISPOSITION,
+ "attachment; filename=\"" + datasetFile.getFileName() + "\"")
+ .body(resource);
+ } catch (IllegalArgumentException e) {
+ return ResponseEntity.status(HttpStatus.NOT_FOUND).build();
+ } catch (Exception e) {
+ return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build();
+ }
+ }
+
+ @IgnoreResponseWrap
+ @GetMapping(value = "/download", produces = MediaType.APPLICATION_OCTET_STREAM_VALUE)
+ public void downloadDatasetFileAsZip(@PathVariable("datasetId") String datasetId, HttpServletResponse response) {
+ datasetFileApplicationService.downloadDatasetFileAsZip(datasetId, response);
+ }
+
+ /**
+ * 文件上传请求
+ *
+ * @param request 批量文件上传请求
+ * @return 批量上传请求id
+ */
+ @PostMapping("/upload/pre-upload")
+ public ResponseEntity> preUpload(@PathVariable("datasetId") String datasetId, @RequestBody @Valid UploadFilesPreRequest request) {
+
+ return ResponseEntity.ok(Response.ok(datasetFileApplicationService.preUpload(request, datasetId)));
+ }
+
+ /**
+ * 分块上传
+ *
+ * @param uploadFileRequest 上传文件请求
+ */
+ @PostMapping("/upload/chunk")
+ public ResponseEntity chunkUpload(@PathVariable("datasetId") String datasetId, UploadFileRequest uploadFileRequest) {
+ log.info("file upload reqId:{}, fileNo:{}, total chunk num:{}, current chunkNo:{}",
+ uploadFileRequest.getReqId(), uploadFileRequest.getFileNo(), uploadFileRequest.getTotalChunkNum(),
+ uploadFileRequest.getChunkNo());
+ datasetFileApplicationService.chunkUpload(datasetId, uploadFileRequest);
+ return ResponseEntity.ok().build();
+ }
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/rest/DatasetTypeController.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/rest/DatasetTypeController.java
new file mode 100644
index 0000000..dfc3600
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/rest/DatasetTypeController.java
@@ -0,0 +1,53 @@
+package com.datamate.datamanagement.interfaces.rest;
+
+import com.datamate.datamanagement.interfaces.dto.DatasetTypeResponse;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RestController;
+
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * 数据集类型 REST 控制器
+ */
+@RestController
+@RequestMapping("/data-management/dataset-types")
+public class DatasetTypeController {
+
+ /**
+ * 获取所有支持的数据集类型
+ * @return 数据集类型列表
+ */
+ @GetMapping
+ public List getDatasetTypes() {
+ return Arrays.asList(
+ createDatasetType("IMAGE", "图像数据集", "用于机器学习的图像数据集", Arrays.asList("jpg", "jpeg", "png", "bmp", "gif")),
+ createDatasetType("TEXT", "文本数据集", "用于文本分析的文本数据集", Arrays.asList("txt", "csv", "json", "xml")),
+ createDatasetType("AUDIO", "音频数据集", "用于音频处理的音频数据集", Arrays.asList("wav", "mp3", "flac", "aac")),
+ createDatasetType("VIDEO", "视频数据集", "用于视频分析的视频数据集", Arrays.asList("mp4", "avi", "mov", "mkv")),
+ createDatasetType("MULTIMODAL", "多模态数据集", "包含多种数据类型的数据集", List.of("*"))
+ );
+ }
+
+ private DatasetTypeResponse createDatasetType(String code, String name, String description, List supportedFormats) {
+ DatasetTypeResponse response = new DatasetTypeResponse();
+ response.setCode(code);
+ response.setName(name);
+ response.setDescription(description);
+ response.setSupportedFormats(supportedFormats);
+ response.setIcon(getIconForType(code));
+ return response;
+ }
+
+ private String getIconForType(String typeCode) {
+ return switch (typeCode) {
+ case "IMAGE" -> "🖼️";
+ case "TEXT" -> "📄";
+ case "AUDIO" -> "🎵";
+ case "VIDEO" -> "🎬";
+ case "MULTIMODAL" -> "📊";
+ default -> "📁";
+ };
+ }
+}
diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/rest/TagController.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/rest/TagController.java
new file mode 100644
index 0000000..476caeb
--- /dev/null
+++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/rest/TagController.java
@@ -0,0 +1,85 @@
+package com.datamate.datamanagement.interfaces.rest;
+
+import com.datamate.common.infrastructure.common.Response;
+import com.datamate.common.infrastructure.exception.SystemErrorCode;
+import com.datamate.datamanagement.application.TagApplicationService;
+import com.datamate.datamanagement.domain.model.dataset.Tag;
+import com.datamate.datamanagement.interfaces.converter.TagConverter;
+import com.datamate.datamanagement.interfaces.dto.CreateTagRequest;
+import com.datamate.datamanagement.interfaces.dto.TagResponse;
+import com.datamate.datamanagement.interfaces.dto.UpdateTagRequest;
+import jakarta.validation.Valid;
+import jakarta.validation.constraints.Size;
+import org.apache.commons.lang3.StringUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.bind.annotation.*;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * 标签 REST 控制器(UUID 模式)
+ */
+@RestController
+@RequestMapping("/data-management/tags")
+public class TagController {
+
+ private final TagApplicationService tagApplicationService;
+
+ @Autowired
+ public TagController(TagApplicationService tagApplicationService) {
+ this.tagApplicationService = tagApplicationService;
+ }
+
+ /**
+ * 查询标签列表
+ */
+ @GetMapping
+ public ResponseEntity>> getTags(@RequestParam(name = "keyword", required = false) String keyword) {
+ List tags = tagApplicationService.searchTags(keyword);
+ List response = tags.stream()
+ .map(TagConverter.INSTANCE::convertToResponse)
+ .collect(Collectors.toList());
+ return ResponseEntity.ok(Response.ok(response));
+ }
+
+ /**
+ * 创建标签
+ */
+ @PostMapping
+ public ResponseEntity> createTag(@RequestBody CreateTagRequest createTagRequest) {
+ try {
+ Tag tag = tagApplicationService.createTag(
+ createTagRequest.getName(),
+ createTagRequest.getColor(),
+ createTagRequest.getDescription()
+ );
+ return ResponseEntity.ok(Response.ok(TagConverter.INSTANCE.convertToResponse(tag)));
+ } catch (IllegalArgumentException e) {
+ return ResponseEntity.badRequest().body(Response.error(SystemErrorCode.UNKNOWN_ERROR, null));
+ }
+ }
+
+ /**
+ * 更新标签
+ *
+ * @param updateTagRequest 更新参数
+ * @return 更新结果
+ */
+ @PutMapping
+ public ResponseEntity> updateTag(@RequestBody @Valid UpdateTagRequest updateTagRequest) {
+ Tag tag = tagApplicationService.updateTag(TagConverter.INSTANCE.updateRequestToTag(updateTagRequest));
+ return ResponseEntity.ok(Response.ok(TagConverter.INSTANCE.convertToResponse(tag)));
+ }
+
+ @DeleteMapping
+ public ResponseEntity> deleteTag(@RequestParam(value = "ids") @Valid @Size(max = 10) List ids) {
+ try {
+ tagApplicationService.deleteTag(ids.stream().filter(StringUtils::isNoneBlank).distinct().toList());
+ return ResponseEntity.ok(Response.ok(null));
+ } catch (IllegalArgumentException e) {
+ return ResponseEntity.badRequest().body(Response.error(SystemErrorCode.UNKNOWN_ERROR, null));
+ }
+ }
+}
diff --git a/backend/services/data-management-service/src/main/resources/config/application-datamanagement.yml b/backend/services/data-management-service/src/main/resources/config/application-datamanagement.yml
new file mode 100644
index 0000000..df603c3
--- /dev/null
+++ b/backend/services/data-management-service/src/main/resources/config/application-datamanagement.yml
@@ -0,0 +1,11 @@
+dataMate:
+ datamanagement:
+ file-storage:
+ upload-dir: ${FILE_UPLOAD_DIR:./uploads}
+ max-file-size: 10485760 # 10MB
+ max-request-size: 52428800 # 50MB
+ cache:
+ ttl: 3600
+ max-size: 1000
+# MyBatis is configured centrally in main-application (mapper-locations & aliases)
+# to avoid list overriding issues when importing multiple module configs.
diff --git a/backend/services/data-management-service/src/main/resources/mappers/DatasetFileMapper.xml b/backend/services/data-management-service/src/main/resources/mappers/DatasetFileMapper.xml
new file mode 100644
index 0000000..f5c6a1e
--- /dev/null
+++ b/backend/services/data-management-service/src/main/resources/mappers/DatasetFileMapper.xml
@@ -0,0 +1,98 @@
+
+
+
+
+ id, dataset_id, file_name, file_path, file_type, file_size, check_sum, tags, metadata, status,
+ upload_time, last_access_time, created_at, updated_at
+
+
+
+ SELECT
+ FROM t_dm_dataset_files
+ WHERE id = #{id}
+
+
+
+ SELECT
+ FROM t_dm_dataset_files
+ WHERE dataset_id = #{datasetId}
+ ORDER BY upload_time DESC
+
+
+
+ SELECT
+ FROM t_dm_dataset_files
+ WHERE dataset_id = #{datasetId}
+ AND status = #{status}
+ ORDER BY upload_time DESC
+
+
+
+ SELECT
+ FROM t_dm_dataset_files
+ WHERE dataset_id = #{datasetId}
+ AND file_type = #{fileType}
+ ORDER BY upload_time DESC
+
+
+
+ SELECT COUNT(*) FROM t_dm_dataset_files WHERE dataset_id = #{datasetId}
+
+
+
+ SELECT COUNT(*) FROM t_dm_dataset_files WHERE dataset_id = #{datasetId} AND status = 'COMPLETED'
+
+
+
+ SELECT COALESCE(SUM(file_size), 0) FROM t_dm_dataset_files WHERE dataset_id = #{datasetId}
+
+
+
+ SELECT
+ FROM t_dm_dataset_files
+ WHERE dataset_id = #{datasetId} AND file_name = #{fileName}
+ LIMIT 1
+
+
+
+ SELECT
+ FROM t_dm_dataset_files
+ WHERE dataset_id = #{datasetId}
+ ORDER BY upload_time DESC
+
+
+
+ SELECT
+ FROM t_dm_dataset_files
+ WHERE dataset_id = #{datasetId}
+
+
+ AND file_type = #{fileType}
+
+
+ AND status = #{status}
+
+ ORDER BY upload_time DESC
+
+
+
+
+ UPDATE t_dm_dataset_files
+ SET file_name = #{fileName},
+ file_path = #{filePath},
+ file_type = #{fileType},
+ file_size = #{fileSize},
+ upload_time = #{uploadTime},
+ last_access_time = #{lastAccessTime},
+ status = #{status}
+ WHERE id = #{id}
+
+
+
+ DELETE FROM t_dm_dataset_files WHERE id = #{id}
+
+
diff --git a/backend/services/data-management-service/src/main/resources/mappers/DatasetMapper.xml b/backend/services/data-management-service/src/main/resources/mappers/DatasetMapper.xml
new file mode 100644
index 0000000..f266894
--- /dev/null
+++ b/backend/services/data-management-service/src/main/resources/mappers/DatasetMapper.xml
@@ -0,0 +1,152 @@
+
+
+
+
+
+ id, name, description, dataset_type, category, path, format, schema_info, size_bytes, file_count, record_count,
+ retention_days, tags, metadata, status, is_public, is_featured, version, created_at, updated_at, created_by, updated_by
+
+
+
+ d.id AS id,
+ d.name AS name,
+ d.description AS description,
+ d.dataset_type AS dataset_type,
+ d.category AS category,
+ d.path AS path,
+ d.format AS format,
+ d.schema_info AS schema_info,
+ d.size_bytes AS size_bytes,
+ d.file_count AS file_count,
+ d.record_count AS record_count,
+ d.retention_days AS retention_days,
+ d.tags AS tags,
+ d.metadata AS metadata,
+ d.status AS status,
+ d.is_public AS is_public,
+ d.is_featured AS is_featured,
+ d.version AS version,
+ d.created_at AS created_at,
+ d.updated_at AS updated_at,
+ d.created_by AS created_by,
+ d.updated_by AS updated_by
+
+
+
+ SELECT
+ FROM t_dm_datasets
+ WHERE id = #{id}
+
+
+
+ SELECT
+ FROM t_dm_datasets
+ WHERE name = #{name}
+ LIMIT 1
+
+
+
+ SELECT
+ FROM t_dm_datasets
+ WHERE status = #{status}
+ ORDER BY updated_at DESC
+
+
+
+ SELECT
+ FROM t_dm_datasets
+ WHERE created_by = #{createdBy}
+ ORDER BY created_at DESC
+
+
+
+ SELECT
+ FROM t_dm_datasets
+ WHERE dataset_type = #{typeCode}
+ ORDER BY created_at DESC
+
+
+
+ SELECT DISTINCT
+ FROM t_dm_datasets d
+ JOIN t_dm_dataset_tags dt ON d.id = dt.dataset_id
+ JOIN t_dm_tags t ON t.id = dt.tag_id
+ WHERE t.name IN
+
+ #{name}
+
+ ORDER BY d.created_at DESC
+
+
+
+ SELECT
+ FROM t_dm_datasets
+ WHERE name LIKE CONCAT('%', #{keyword}, '%')
+ OR description LIKE CONCAT('%', #{keyword}, '%')
+ ORDER BY created_at DESC
+
+
+
+ SELECT DISTINCT
+ FROM t_dm_datasets d
+ LEFT JOIN t_dm_dataset_tags dt ON d.id = dt.dataset_id
+ LEFT JOIN t_dm_tags t ON t.id = dt.tag_id
+
+
+ AND d.dataset_type = #{typeCode}
+
+
+ AND d.status = #{status}
+
+
+ AND (d.name LIKE CONCAT('%', #{keyword}, '%') OR d.description LIKE CONCAT('%', #{keyword}, '%'))
+
+
+ AND t.name IN
+
+ #{n}
+
+
+
+ ORDER BY d.created_at DESC
+
+
+
+ SELECT COUNT(DISTINCT d.id)
+ FROM t_dm_datasets d
+ LEFT JOIN t_dm_dataset_tags dt ON d.id = dt.dataset_id
+ LEFT JOIN t_dm_tags t ON t.id = dt.tag_id
+
+
+ AND d.dataset_type = #{typeCode}
+
+
+ AND d.status = #{status}
+
+
+ AND (d.name LIKE CONCAT('%', #{keyword}, '%') OR d.description LIKE CONCAT('%', #{keyword}, '%'))
+
+
+ AND t.name IN
+
+ #{n}
+
+
+
+
+
+
+ DELETE FROM t_dm_datasets WHERE id = #{id}
+
+
+
+ SELECT
+ COUNT(*) AS total_datasets,
+ SUM(size_bytes) AS total_size,
+ SUM(file_count) AS total_files
+ FROM t_dm_datasets;
+
+
diff --git a/backend/services/data-management-service/src/main/resources/mappers/TagMapper.xml b/backend/services/data-management-service/src/main/resources/mappers/TagMapper.xml
new file mode 100644
index 0000000..accaad8
--- /dev/null
+++ b/backend/services/data-management-service/src/main/resources/mappers/TagMapper.xml
@@ -0,0 +1,111 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ id, name, description, category, color, usage_count, created_at, updated_at
+
+
+
+ SELECT
+ FROM t_dm_tags
+ WHERE id = #{id}
+
+
+
+ SELECT
+ FROM t_dm_tags
+ WHERE name = #{name}
+ LIMIT 1
+
+
+
+ SELECT
+ FROM t_dm_tags
+ WHERE name IN
+
+ #{n}
+
+
+
+
+ SELECT
+ FROM t_dm_tags
+ WHERE name LIKE CONCAT('%', #{keyword}, '%')
+ ORDER BY usage_count DESC, name ASC
+
+
+
+ SELECT
+ FROM t_dm_tags
+ ORDER BY usage_count DESC, name ASC
+
+
+
+ INSERT INTO t_dm_tags (id, name, description, category, color, usage_count)
+ VALUES (#{id}, #{name}, #{description}, #{category}, #{color}, #{usageCount})
+
+
+
+ UPDATE t_dm_tags
+ SET name = #{name},
+ description = #{description},
+ category = #{category},
+ color = #{color},
+ usage_count = #{usageCount}
+ WHERE id = #{id}
+
+
+
+ UPDATE t_dm_tags
+ SET usage_count = #{usageCount}
+ WHERE id = #{id}
+
+
+
+
+ INSERT INTO t_dm_dataset_tags (dataset_id, tag_id)
+ VALUES (#{datasetId}, #{tagId})
+
+
+
+ DELETE FROM t_dm_dataset_tags WHERE dataset_id = #{datasetId}
+
+
+
+ SELECT t.id, t.name, t.description, t.category, t.color, t.usage_count, t.created_at, t.updated_at
+ FROM t_dm_tags t
+ JOIN t_dm_dataset_tags dt ON dt.tag_id = t.id
+ WHERE dt.dataset_id = #{datasetId}
+ ORDER BY t.usage_count DESC, t.name ASC
+
+
+
+ DELETE FROM t_dm_tags WHERE
+ id IN
+
+ #{id}
+
+
+
+
+ SELECT
+ FROM t_dm_tags
+ WHERE id IN
+
+ #{id}
+
+
+
diff --git a/backend/services/data-synthesis-service/pom.xml b/backend/services/data-synthesis-service/pom.xml
new file mode 100644
index 0000000..bc146a4
--- /dev/null
+++ b/backend/services/data-synthesis-service/pom.xml
@@ -0,0 +1,92 @@
+
+
+ 4.0.0
+
+
+ com.datamate
+ data-mate-platform
+ 1.0.0-SNAPSHOT
+ ../../pom.xml
+
+
+ data-synthesis-service
+ Data Synthesis Service
+ 数据合成服务
+
+
+
+ com.datamate
+ domain-common
+ ${project.version}
+
+
+ org.springframework.boot
+ spring-boot-starter-web
+
+
+ mysql
+ mysql-connector-java
+ ${mysql.version}
+
+
+ org.springframework.boot
+ spring-boot-starter-test
+ test
+
+
+ org.springframework.cloud
+ spring-cloud-starter-openfeign
+
+
+ org.springdoc
+ springdoc-openapi-starter-webmvc-ui
+
+
+ org.openapitools
+ jackson-databind-nullable
+
+
+ jakarta.validation
+ jakarta.validation-api
+
+
+
+
+
+
+ org.springframework.boot
+ spring-boot-maven-plugin
+
+
+ org.openapitools
+ openapi-generator-maven-plugin
+ 6.6.0
+
+
+
+ generate
+
+
+ ${project.basedir}/../../openapi/specs/data-synthesis.yaml
+ spring
+ ${project.build.directory}/generated-sources/openapi
+ com.datamate.synthesis.interfaces.api
+ com.datamate.synthesis.interfaces.dto
+
+ true
+ true
+ true
+ springdoc
+
+
+
+
+
+
+
+
+
+
diff --git a/backend/services/execution-engine-service/pom.xml b/backend/services/execution-engine-service/pom.xml
new file mode 100644
index 0000000..42f9484
--- /dev/null
+++ b/backend/services/execution-engine-service/pom.xml
@@ -0,0 +1,96 @@
+
+
+ 4.0.0
+
+
+ com.datamate
+ data-mate-platform
+ 1.0.0-SNAPSHOT
+ ../../pom.xml
+
+
+ execution-engine-service
+ Execution Engine Service
+ 执行引擎服务
+
+
+
+ com.datamate
+ domain-common
+ ${project.version}
+
+
+ org.springframework.boot
+ spring-boot-starter-web
+
+
+ org.springframework.boot
+ spring-boot-starter-data-redis
+
+
+ mysql
+ mysql-connector-java
+ ${mysql.version}
+
+
+ org.springframework.boot
+ spring-boot-starter-test
+ test
+
+
+ org.springframework.cloud
+ spring-cloud-starter-openfeign
+
+
+ org.springdoc
+ springdoc-openapi-starter-webmvc-ui
+
+
+ org.openapitools
+ jackson-databind-nullable
+
+
+ jakarta.validation
+ jakarta.validation-api
+
+
+
+
+
+
+ org.springframework.boot
+ spring-boot-maven-plugin
+
+
+ org.openapitools
+ openapi-generator-maven-plugin
+ 6.6.0
+
+
+
+ generate
+
+
+ ${project.basedir}/../../openapi/specs/execution-engine.yaml
+ spring
+ ${project.build.directory}/generated-sources/openapi
+ com.datamate.execution.interfaces.api
+ com.datamate.execution.interfaces.dto
+
+ true
+ true
+ true
+ springdoc
+
+
+
+
+
+
+
+
+
+
diff --git a/backend/services/main-application/pom.xml b/backend/services/main-application/pom.xml
new file mode 100644
index 0000000..33e4862
--- /dev/null
+++ b/backend/services/main-application/pom.xml
@@ -0,0 +1,169 @@
+
+
+ 4.0.0
+
+
+ com.datamate
+ data-mate-platform
+ 1.0.0-SNAPSHOT
+ ../../pom.xml
+
+
+ main-application
+ jar
+ Data Mate Platform - Main Application
+ 主启动应用,集成所有服务模
+
+
+
+
+ org.springframework.boot
+ spring-boot-starter-web
+
+
+ org.springframework.boot
+ spring-boot-starter-actuator
+
+
+
+ jakarta.persistence
+ jakarta.persistence-api
+
+
+
+
+ com.datamate
+ domain-common
+ ${project.version}
+
+
+ com.datamate
+ security-common
+ ${project.version}
+
+
+
+ org.apache.commons
+ commons-compress
+ 1.26.1
+
+
+
+
+ com.datamate
+ data-management-service
+ ${project.version}
+
+
+ com.datamate
+ data-collection-service
+ ${project.version}
+
+
+ com.datamate
+ operator-market-service
+ ${project.version}
+
+
+ com.datamate
+ data-cleaning-service
+ ${project.version}
+
+
+ com.datamate
+ data-synthesis-service
+ ${project.version}
+
+
+ com.datamate
+ data-annotation-service
+ ${project.version}
+
+
+ com.datamate
+ data-evaluation-service
+ ${project.version}
+
+
+ com.datamate
+ pipeline-orchestration-service
+ ${project.version}
+
+
+ com.datamate
+ execution-engine-service
+ ${project.version}
+
+
+
+
+ com.datamate
+ rag-indexer-service
+ ${project.version}
+
+
+ com.datamate
+ rag-query-service
+ ${project.version}
+
+
+
+
+ com.baomidou
+ mybatis-plus-spring-boot3-starter
+
+
+
+
+ mysql
+ mysql-connector-java
+ 8.0.33
+ runtime
+
+
+
+
+ org.springframework.boot
+ spring-boot-starter-test
+ test
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+ 3.11.0
+
+ ${maven.compiler.source}
+ ${maven.compiler.target}
+
+ -parameters
+
+
+
+
+
+ org.springframework.boot
+ spring-boot-maven-plugin
+ ${spring-boot.version}
+
+ data-mate
+ com.datamate.main.DataMatePlatformApplication
+
+
+
+
+ repackage
+
+
+
+
+
+
+
+
diff --git a/backend/services/main-application/src/main/java/com/datamate/main/DataMatePlatformApplication.java b/backend/services/main-application/src/main/java/com/datamate/main/DataMatePlatformApplication.java
new file mode 100644
index 0000000..7267967
--- /dev/null
+++ b/backend/services/main-application/src/main/java/com/datamate/main/DataMatePlatformApplication.java
@@ -0,0 +1,49 @@
+package com.datamate.main;
+
+import org.mybatis.spring.annotation.MapperScan;
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.context.annotation.ComponentScan;
+import org.springframework.scheduling.annotation.EnableAsync;
+import org.springframework.scheduling.annotation.EnableScheduling;
+import org.springframework.transaction.annotation.EnableTransactionManagement;
+
+/**
+ * 数据引擎平台主应用
+ * 聚合所有业务服务JAR包的微服务启动类
+ *
+ * @author Data Mate Team
+ * @version 1.0.0
+ */
+@SpringBootApplication
+@ComponentScan(basePackages = {
+ "com.datamate.main",
+ "com.datamate.datamanagement",
+ "com.datamate.collection",
+ "com.datamate.operator",
+ "com.datamate.cleaning",
+ "com.datamate.synthesis",
+ "com.datamate.annotation",
+ "com.datamate.evaluation",
+ "com.datamate.pipeline",
+ "com.datamate.execution",
+ "com.datamate.rag",
+ "com.datamate.shared",
+ "com.datamate.common"
+})
+@MapperScan(basePackages = {
+ "com.datamate.collection.infrastructure.persistence.mapper",
+ "com.datamate.datamanagement.infrastructure.persistence.mapper",
+ "com.datamate.operator.infrastructure.persistence.mapper",
+ "com.datamate.cleaning.infrastructure.persistence.mapper",
+ "com.datamate.common.infrastructure.mapper"
+})
+@EnableTransactionManagement
+@EnableAsync
+@EnableScheduling
+public class DataMatePlatformApplication {
+
+ public static void main(String[] args) {
+ SpringApplication.run(DataMatePlatformApplication.class, args);
+ }
+}
diff --git a/backend/services/main-application/src/main/java/com/datamate/main/config/SecurityConfig.java b/backend/services/main-application/src/main/java/com/datamate/main/config/SecurityConfig.java
new file mode 100644
index 0000000..f71e8e8
--- /dev/null
+++ b/backend/services/main-application/src/main/java/com/datamate/main/config/SecurityConfig.java
@@ -0,0 +1,26 @@
+package com.datamate.main.config;
+
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.security.config.annotation.web.builders.HttpSecurity;
+import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
+import org.springframework.security.web.SecurityFilterChain;
+
+/**
+ * 安全配置 - 暂时禁用所有认证
+ * 开发阶段使用,生产环境需要启用认证
+ */
+@Configuration
+@EnableWebSecurity
+public class SecurityConfig {
+
+ @Bean
+ public SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
+ http.csrf(csrf -> csrf.disable())
+ .authorizeHttpRequests(authz -> authz
+ .anyRequest().permitAll() // 允许所有请求无需认证
+ );
+
+ return http.build();
+ }
+}
diff --git a/backend/services/main-application/src/main/resources/application.yml b/backend/services/main-application/src/main/resources/application.yml
new file mode 100644
index 0000000..9968949
--- /dev/null
+++ b/backend/services/main-application/src/main/resources/application.yml
@@ -0,0 +1,179 @@
+# 数据引擎平台 - 主应用配置
+spring:
+ application:
+ name: data-mate-platform
+
+ # 暂时排除Spring Security自动配置(开发阶段使用)
+ autoconfigure:
+ exclude:
+ - org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration
+ - org.springframework.boot.autoconfigure.security.servlet.UserDetailsServiceAutoConfiguration
+
+ # 数据源配置
+ datasource:
+ driver-class-name: com.mysql.cj.jdbc.Driver
+ url: jdbc:mysql://mysql:3306/datamate?useUnicode=true&characterEncoding=utf8&useSSL=false&serverTimezone=Asia/Shanghai&allowPublicKeyRetrieval=true
+ username: ${DB_USERNAME:root}
+ password: ${DB_PASSWORD:Huawei@123}
+ hikari:
+ maximum-pool-size: 20
+ minimum-idle: 5
+ connection-timeout: 30000
+ idle-timeout: 600000
+ max-lifetime: 1800000
+
+ # Elasticsearch配置
+ elasticsearch:
+ uris: ${ES_URIS:http://localhost:9200}
+ username: ${ES_USERNAME:}
+ password: ${ES_PASSWORD:}
+ connection-timeout: 10s
+ socket-timeout: 30s
+
+ # Jackson配置
+ jackson:
+ time-zone: Asia/Shanghai
+ date-format: yyyy-MM-dd HH:mm:ss
+ serialization:
+ write-dates-as-timestamps: false
+ deserialization:
+ fail-on-unknown-properties: false
+
+ # 文件上传配置
+ servlet:
+ multipart:
+ max-file-size: 100MB
+ max-request-size: 100MB
+
+ # 任务调度配置
+ task:
+ execution:
+ pool:
+ core-size: ${TASK_EXECUTION_CORE_SIZE:10}
+ max-size: ${TASK_EXECUTION_MAX_SIZE:20}
+ queue-capacity: ${TASK_EXECUTION_QUEUE_CAPACITY:100}
+ keep-alive: ${TASK_EXECUTION_KEEP_ALIVE:60s}
+ scheduling:
+ pool:
+ size: ${TASK_SCHEDULING_POOL_SIZE:5}
+ config:
+ import:
+ - classpath:config/application-datacollection.yml
+ - classpath:config/application-datamanagement.yml
+
+# MyBatis配置(需在顶层,不在 spring 下)
+mybatis-plus:
+ configuration:
+ map-underscore-to-camel-case: true
+ default-fetch-size: 100
+ default-statement-timeout: 30
+ use-generated-keys: true
+ cache-enabled: true
+ lazy-loading-enabled: false
+ multiple-result-sets-enabled: true
+ use-column-label: true
+ auto-mapping-behavior: partial
+ auto-mapping-unknown-column-behavior: none
+ default-executor-type: simple
+ call-setters-on-nulls: false
+ return-instance-for-empty-row: false
+ log-impl: org.apache.ibatis.logging.slf4j.Slf4jImpl
+ mapper-locations:
+ - classpath*:mappers/**/*.xml
+
+# 应用配置
+server:
+ port: ${SERVER_PORT:8080}
+ servlet:
+ context-path: /api
+ encoding:
+ charset: UTF-8
+ enabled: true
+ force: true
+
+# 日志配置
+logging:
+ config: classpath:log4j2.xml
+
+# Actuator配置
+management:
+ endpoints:
+ web:
+ exposure:
+ include: health,info,metrics,prometheus
+ endpoint:
+ health:
+ show-details: when-authorized
+ health:
+ elasticsearch:
+ enabled: false # 禁用Elasticsearch健康检查
+
+
+# 平台配置
+datamate:
+ # JWT配置
+ jwt:
+ secret: ${JWT_SECRET:dataMateSecretKey2024ForJWTTokenGeneration}
+ expiration: ${JWT_EXPIRATION:86400} # 24小时,单位秒
+ header: Authorization
+ prefix: "Bearer "
+
+ # 文件存储配置
+ storage:
+ type: ${STORAGE_TYPE:local} # local, minio, s3
+ local:
+ base-path: ${STORAGE_LOCAL_PATH:./data/storage}
+ minio:
+ endpoint: ${MINIO_ENDPOINT:http://localhost:9000}
+ access-key: ${MINIO_ACCESS_KEY:minioadmin}
+ secret-key: ${MINIO_SECRET_KEY:minioadmin}
+ bucket-name: ${MINIO_BUCKET:data-mate}
+
+ # Ray执行器配置
+ ray:
+ enabled: ${RAY_ENABLED:false}
+ address: ${RAY_ADDRESS:ray://localhost:10001}
+ runtime-env:
+ working-dir: ${RAY_WORKING_DIR:./runtime/python-executor}
+ pip-packages:
+ - "ray[default]==2.7.0"
+ - "pandas"
+ - "numpy"
+ - "data-juicer"
+
+ # 数据归集服务配置(可由模块导入叠加)
+ data-collection: {}
+
+ # 算子市场配置
+ operator-market:
+ repository-path: ${OPERATOR_REPO_PATH:./runtime/operators}
+ registry-url: ${OPERATOR_REGISTRY_URL:}
+ max-upload-size: ${OPERATOR_MAX_UPLOAD_SIZE:50MB}
+
+ # 数据处理配置
+ data-processing:
+ max-file-size: ${MAX_FILE_SIZE:1GB}
+ temp-dir: ${TEMP_DIR:./data/temp}
+ batch-size: ${BATCH_SIZE:1000}
+
+ # 标注配置
+ annotation:
+ auto-annotation:
+ enabled: ${AUTO_ANNOTATION_ENABLED:true}
+ model-endpoint: ${ANNOTATION_MODEL_ENDPOINT:}
+ quality-control:
+ enabled: ${QC_ENABLED:true}
+ threshold: ${QC_THRESHOLD:0.8}
+
+ # RAG配置
+ rag:
+ embedding:
+ model: ${RAG_EMBEDDING_MODEL:text-embedding-ada-002}
+ api-key: ${RAG_API_KEY:}
+ dimension: ${RAG_DIMENSION:1536}
+ chunk:
+ size: ${RAG_CHUNK_SIZE:512}
+ overlap: ${RAG_CHUNK_OVERLAP:50}
+ retrieval:
+ top-k: ${RAG_TOP_K:5}
+ score-threshold: ${RAG_SCORE_THRESHOLD:0.7}
diff --git a/backend/services/main-application/src/main/resources/config/application-datacollection.yml b/backend/services/main-application/src/main/resources/config/application-datacollection.yml
new file mode 100644
index 0000000..4591655
--- /dev/null
+++ b/backend/services/main-application/src/main/resources/config/application-datacollection.yml
@@ -0,0 +1,23 @@
+datamate:
+ data-collection:
+ # DataX配置
+ datax:
+ home-path: ${DATAX_HOME:/opt/datax}
+ python-path: ${DATAX_PYTHON_PATH:python3}
+ job-config-path: ${DATAX_JOB_PATH:./data/temp/datax/jobs}
+ log-path: ${DATAX_LOG_PATH:./logs/datax}
+ max-memory: ${DATAX_MAX_MEMORY:2048}
+ channel-count: ${DATAX_CHANNEL_COUNT:5}
+
+ # 执行配置
+ execution:
+ max-concurrent-tasks: ${DATA_COLLECTION_MAX_CONCURRENT_TASKS:10}
+ task-timeout-minutes: ${DATA_COLLECTION_TASK_TIMEOUT:120}
+ retry-count: ${DATA_COLLECTION_RETRY_COUNT:3}
+ retry-interval-seconds: ${DATA_COLLECTION_RETRY_INTERVAL:30}
+
+ # 监控配置
+ monitoring:
+ status-check-interval-seconds: ${DATA_COLLECTION_STATUS_CHECK_INTERVAL:30}
+ log-retention-days: ${DATA_COLLECTION_LOG_RETENTION:30}
+ enable-metrics: ${DATA_COLLECTION_ENABLE_METRICS:true}
diff --git a/backend/services/main-application/src/main/resources/config/application-datamanagement.yml b/backend/services/main-application/src/main/resources/config/application-datamanagement.yml
new file mode 100644
index 0000000..72fb53d
--- /dev/null
+++ b/backend/services/main-application/src/main/resources/config/application-datamanagement.yml
@@ -0,0 +1,11 @@
+datamate:
+ datamanagement:
+ file-storage:
+ upload-dir: ${FILE_UPLOAD_DIR:./uploads}
+ max-file-size: 10485760 # 10MB
+ max-request-size: 52428800 # 50MB
+ cache:
+ ttl: 3600
+ max-size: 1000
+# MyBatis is configured centrally in main-application (mapper-locations & aliases)
+# to avoid list overriding issues when importing multiple module configs.
diff --git a/backend/services/main-application/src/main/resources/log4j2.xml b/backend/services/main-application/src/main/resources/log4j2.xml
new file mode 100644
index 0000000..f9d0cf3
--- /dev/null
+++ b/backend/services/main-application/src/main/resources/log4j2.xml
@@ -0,0 +1,42 @@
+
+
+
+ /var/log/data-mate/backend
+ %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{50} - %msg%n
+ 100MB
+ 30
+ INFO
+ WARN
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/backend/services/operator-market-service/img.png b/backend/services/operator-market-service/img.png
new file mode 100644
index 0000000..a8ec1f2
Binary files /dev/null and b/backend/services/operator-market-service/img.png differ
diff --git a/backend/services/operator-market-service/img_1.png b/backend/services/operator-market-service/img_1.png
new file mode 100644
index 0000000..ed40d49
Binary files /dev/null and b/backend/services/operator-market-service/img_1.png differ
diff --git a/backend/services/operator-market-service/pom.xml b/backend/services/operator-market-service/pom.xml
new file mode 100644
index 0000000..6543a1e
--- /dev/null
+++ b/backend/services/operator-market-service/pom.xml
@@ -0,0 +1,94 @@
+
+
+ 4.0.0
+
+
+ com.datamate
+ data-mate-platform
+ 1.0.0-SNAPSHOT
+ ../../pom.xml
+
+
+ operator-market-service
+ Operator Market Service
+ 算子市场服务
+
+
+
+ com.datamate
+ domain-common
+ ${project.version}
+
+
+ org.springframework.boot
+ spring-boot-starter-web
+
+
+ org.springframework.boot
+ spring-boot-starter-data-redis
+
+
+ mysql
+ mysql-connector-java
+
+
+ org.springframework.boot
+ spring-boot-starter-test
+ test
+
+
+ org.springframework.cloud
+ spring-cloud-starter-openfeign
+
+
+ org.springdoc
+ springdoc-openapi-starter-webmvc-ui
+
+
+ org.openapitools
+ jackson-databind-nullable
+
+
+ jakarta.validation
+ jakarta.validation-api
+
+
+ com.baomidou
+ mybatis-plus-spring-boot3-starter
+
+
+ org.projectlombok
+ lombok
+ provided
+
+
+ org.apache.commons
+ commons-compress
+ 1.26.1
+
+
+
+ org.mapstruct
+ mapstruct
+
+
+
+ org.mapstruct
+ mapstruct-processor
+ ${mapstruct.version}
+ provided
+
+
+
+
+
+
+ org.springframework.boot
+ spring-boot-maven-plugin
+
+
+
+
diff --git a/backend/services/operator-market-service/src/main/java/com/datamate/operator/OperatorMarketServiceConfiguration.java b/backend/services/operator-market-service/src/main/java/com/datamate/operator/OperatorMarketServiceConfiguration.java
new file mode 100644
index 0000000..e05a2bd
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/java/com/datamate/operator/OperatorMarketServiceConfiguration.java
@@ -0,0 +1,24 @@
+package com.datamate.operator;
+
+import org.springframework.boot.autoconfigure.domain.EntityScan;
+import org.springframework.context.annotation.ComponentScan;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.scheduling.annotation.EnableAsync;
+import org.springframework.scheduling.annotation.EnableScheduling;
+
+/**
+ * Operator Market Service Configuration
+ * 算子市场服务配置类 - 版本、安装、评分、仓库
+ */
+@Configuration
+@EnableAsync
+@EnableScheduling
+@EntityScan(basePackages = "com.datamate.operator.domain.modal")
+@ComponentScan(basePackages = {
+ "com.datamate.operator",
+ "com.datamate.shared"
+})
+public class OperatorMarketServiceConfiguration {
+ // Service configuration class for JAR packaging
+ // 作为jar包形式提供服务的配置类
+}
diff --git a/backend/services/operator-market-service/src/main/java/com/datamate/operator/application/CategoryService.java b/backend/services/operator-market-service/src/main/java/com/datamate/operator/application/CategoryService.java
new file mode 100644
index 0000000..45da8d8
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/java/com/datamate/operator/application/CategoryService.java
@@ -0,0 +1,62 @@
+package com.datamate.operator.application;
+
+
+import com.datamate.operator.domain.modal.Category;
+import com.datamate.operator.domain.modal.CategoryRelation;
+import com.datamate.operator.infrastructure.persistence.mapper.CategoryMapper;
+import com.datamate.operator.infrastructure.persistence.mapper.CategoryRelationMapper;
+import com.datamate.operator.interfaces.dto.CategoryTreeResponse;
+import com.datamate.operator.interfaces.dto.SubCategory;
+import lombok.RequiredArgsConstructor;
+import org.springframework.stereotype.Service;
+
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.stream.Collectors;
+
+@Service
+@RequiredArgsConstructor
+public class CategoryService {
+ private final CategoryMapper categoryMapper;
+
+ private final CategoryRelationMapper categoryRelationMapper;
+
+ public List getAllCategories() {
+ List allCategories = categoryMapper.findAllCategories();
+ List allRelations = categoryRelationMapper.findAllRelation();
+
+ Map relationMap = allRelations.stream()
+ .collect(Collectors.groupingBy(
+ CategoryRelation::getCategoryId,
+ Collectors.collectingAndThen(Collectors.counting(), Math::toIntExact)));
+
+ Map nameMap = allCategories.stream()
+ .collect(Collectors.toMap(Category::getId, Category::getName));
+ Map> groupedByParentId = allCategories.stream()
+ .filter(relation -> relation.getParentId() > 0)
+ .collect(Collectors.groupingBy(Category::getParentId));
+
+ return groupedByParentId.entrySet().stream()
+ .sorted(Map.Entry.comparingByKey())
+ .map(entry -> {
+ Integer parentId = entry.getKey();
+ List group = entry.getValue();
+ CategoryTreeResponse response = new CategoryTreeResponse();
+ response.setId(parentId);
+ response.setName(nameMap.get(parentId));
+ AtomicInteger totalCount = new AtomicInteger();
+ response.setCategories(group.stream().map(category -> {
+ SubCategory subCategory = new SubCategory();
+ subCategory.setId(category.getId());
+ subCategory.setName(category.getName());
+ subCategory.setCount(relationMap.getOrDefault(category.getId(), 0));
+ totalCount.getAndAdd(relationMap.getOrDefault(category.getId(), 0));
+ subCategory.setParentId(parentId);
+ return subCategory;
+ }).toList());
+ response.setCount(totalCount.get());
+ return response;
+ }).toList();
+ }
+}
diff --git a/backend/services/operator-market-service/src/main/java/com/datamate/operator/application/LabelService.java b/backend/services/operator-market-service/src/main/java/com/datamate/operator/application/LabelService.java
new file mode 100644
index 0000000..cba6ce4
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/java/com/datamate/operator/application/LabelService.java
@@ -0,0 +1,22 @@
+package com.datamate.operator.application;
+
+import com.datamate.operator.interfaces.dto.Label;
+import com.datamate.operator.interfaces.dto.*;
+import org.springframework.stereotype.Service;
+import java.util.List;
+import java.util.Collections;
+
+@Service
+public class LabelService {
+ public List getLabels(Integer page, Integer size, String keyword) {
+ // TODO: 查询标签列表
+ return Collections.emptyList();
+ }
+ public void updateLabel(String id, List updateLabelRequest) {
+ // TODO: 更新标签
+ }
+ public void createLabels(Label labelsPostRequest) {
+ // TODO: 批量创建标签
+ }
+}
+
diff --git a/backend/services/operator-market-service/src/main/java/com/datamate/operator/application/OperatorService.java b/backend/services/operator-market-service/src/main/java/com/datamate/operator/application/OperatorService.java
new file mode 100644
index 0000000..27856ba
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/java/com/datamate/operator/application/OperatorService.java
@@ -0,0 +1,76 @@
+package com.datamate.operator.application;
+
+import com.datamate.operator.domain.converter.OperatorConverter;
+import com.datamate.operator.infrastructure.persistence.mapper.CategoryRelationMapper;
+import com.datamate.operator.infrastructure.persistence.mapper.OperatorMapper;
+import com.datamate.operator.interfaces.dto.CreateOperatorRequest;
+import com.datamate.operator.interfaces.dto.OperatorResponse;
+import com.datamate.operator.interfaces.dto.UpdateOperatorRequest;
+import com.datamate.operator.interfaces.dto.*;
+import com.datamate.operator.domain.modal.Operator;
+import lombok.RequiredArgsConstructor;
+import org.springframework.stereotype.Service;
+import org.springframework.web.multipart.MultipartFile;
+
+import java.util.List;
+
+@Service
+@RequiredArgsConstructor
+public class OperatorService {
+ private final OperatorMapper operatorMapper;
+
+ private final CategoryRelationMapper relationMapper;
+
+ public List getOperators(Integer page, Integer size, List categories,
+ String operatorName, Boolean isStar) {
+ Integer offset = page * size;
+ List filteredOperators = operatorMapper.findOperatorsByCriteria(size, offset, operatorName,
+ categories, isStar);
+ return filteredOperators.stream()
+ .map(OperatorConverter.INSTANCE::operatorToResponse).toList();
+ }
+
+ public int getOperatorsCount(List categories, String operatorName, Boolean isStar) {
+ return operatorMapper.countOperatorsByCriteria(operatorName, categories, isStar);
+ }
+
+ public OperatorResponse getOperatorById(String id) {
+ Operator operator = operatorMapper.findOperatorById(id);
+ return OperatorConverter.INSTANCE.operatorToResponse(operator);
+ }
+
+ public OperatorResponse createOperator(CreateOperatorRequest req) {
+ Operator operator = new Operator();
+ operator.setId(req.getId());
+ operator.setName(req.getName());
+ operator.setDescription(req.getDescription());
+ operator.setVersion(req.getVersion());
+ operator.setInputs(req.getInputs());
+ operator.setOutputs(req.getOutputs());
+ operator.setRuntime(req.getRuntime());
+ operator.setSettings(req.getSettings());
+ operatorMapper.insertOperator(operator);
+ relationMapper.batchInsert(req.getId(), req.getCategories());
+ return OperatorConverter.INSTANCE.operatorToResponse(operator);
+ }
+
+ public OperatorResponse updateOperator(String id, UpdateOperatorRequest req) {
+ Operator operator = new Operator();
+ operator.setId(id);
+ operator.setName(req.getName());
+ operator.setDescription(req.getDescription());
+ operator.setVersion(req.getVersion());
+ operator.setInputs(req.getInputs());
+ operator.setOutputs(req.getOutputs());
+ operator.setRuntime(req.getRuntime());
+ operator.setSettings(req.getSettings());
+ operatorMapper.updateOperator(operator);
+ relationMapper.batchInsert(id, req.getCategories());
+ return getOperatorById(id);
+ }
+
+ public OperatorResponse uploadOperator(MultipartFile file, String description) {
+ // TODO: 文件上传与解析
+ return new OperatorResponse();
+ }
+}
diff --git a/backend/services/operator-market-service/src/main/java/com/datamate/operator/domain/converter/OperatorConverter.java b/backend/services/operator-market-service/src/main/java/com/datamate/operator/domain/converter/OperatorConverter.java
new file mode 100644
index 0000000..13386c5
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/java/com/datamate/operator/domain/converter/OperatorConverter.java
@@ -0,0 +1,28 @@
+package com.datamate.operator.domain.converter;
+
+import com.datamate.operator.domain.modal.Operator;
+import com.datamate.operator.interfaces.dto.OperatorResponse;
+import org.mapstruct.Mapper;
+import org.mapstruct.Mapping;
+import org.mapstruct.Named;
+import org.mapstruct.factory.Mappers;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+@Mapper
+public interface OperatorConverter {
+ OperatorConverter INSTANCE = Mappers.getMapper(OperatorConverter.class);
+
+ @Mapping(target = "categories", source = "categories", qualifiedByName = "stringToList")
+ OperatorResponse operatorToResponse(Operator operator);
+
+ @Named("stringToList")
+ static List stringToList(String input) {
+ if (input == null || input.isEmpty()) {
+ return Collections.emptyList();
+ }
+ return Arrays.stream(input.split(",")).map(Integer::valueOf).toList();
+ }
+}
diff --git a/backend/services/operator-market-service/src/main/java/com/datamate/operator/domain/modal/Category.java b/backend/services/operator-market-service/src/main/java/com/datamate/operator/domain/modal/Category.java
new file mode 100644
index 0000000..e20afed
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/java/com/datamate/operator/domain/modal/Category.java
@@ -0,0 +1,14 @@
+package com.datamate.operator.domain.modal;
+
+import lombok.Getter;
+import lombok.Setter;
+
+@Setter
+@Getter
+public class Category {
+ private Integer id;
+
+ private String name;
+
+ private Integer parentId;
+}
diff --git a/backend/services/operator-market-service/src/main/java/com/datamate/operator/domain/modal/CategoryRelation.java b/backend/services/operator-market-service/src/main/java/com/datamate/operator/domain/modal/CategoryRelation.java
new file mode 100644
index 0000000..5108816
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/java/com/datamate/operator/domain/modal/CategoryRelation.java
@@ -0,0 +1,12 @@
+package com.datamate.operator.domain.modal;
+
+import lombok.Getter;
+import lombok.Setter;
+
+@Setter
+@Getter
+public class CategoryRelation {
+ private Integer categoryId;
+
+ private String operatorId;
+}
diff --git a/backend/services/operator-market-service/src/main/java/com/datamate/operator/domain/modal/Operator.java b/backend/services/operator-market-service/src/main/java/com/datamate/operator/domain/modal/Operator.java
new file mode 100644
index 0000000..6a5e009
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/java/com/datamate/operator/domain/modal/Operator.java
@@ -0,0 +1,35 @@
+package com.datamate.operator.domain.modal;
+
+import lombok.Getter;
+import lombok.Setter;
+
+import java.time.LocalDateTime;
+
+@Getter
+@Setter
+public class Operator {
+ private String id;
+
+ private String name;
+
+ private String description;
+
+ private String version;
+
+ private String inputs;
+
+ private String outputs;
+
+ private String categories;
+
+ private String runtime;
+
+ private String settings;
+
+ private Boolean isStar;
+
+ private LocalDateTime createdAt;
+
+ private LocalDateTime updatedAt;
+}
+
diff --git a/backend/services/operator-market-service/src/main/java/com/datamate/operator/domain/modal/RelationCategoryDTO.java b/backend/services/operator-market-service/src/main/java/com/datamate/operator/domain/modal/RelationCategoryDTO.java
new file mode 100644
index 0000000..7ab1a4b
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/java/com/datamate/operator/domain/modal/RelationCategoryDTO.java
@@ -0,0 +1,15 @@
+package com.datamate.operator.domain.modal;
+
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+import lombok.Setter;
+
+@Setter
+@Getter
+@AllArgsConstructor
+public class RelationCategoryDTO {
+ private Integer categoryId;
+ private String operatorId;
+ private String name;
+ private Integer parentId;
+}
diff --git a/backend/services/operator-market-service/src/main/java/com/datamate/operator/infrastructure/persistence/mapper/CategoryMapper.java b/backend/services/operator-market-service/src/main/java/com/datamate/operator/infrastructure/persistence/mapper/CategoryMapper.java
new file mode 100644
index 0000000..602689d
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/java/com/datamate/operator/infrastructure/persistence/mapper/CategoryMapper.java
@@ -0,0 +1,12 @@
+package com.datamate.operator.infrastructure.persistence.mapper;
+
+import com.datamate.operator.domain.modal.Category;
+import org.apache.ibatis.annotations.Mapper;
+
+import java.util.List;
+
+@Mapper
+public interface CategoryMapper {
+
+ List findAllCategories();
+}
diff --git a/backend/services/operator-market-service/src/main/java/com/datamate/operator/infrastructure/persistence/mapper/CategoryRelationMapper.java b/backend/services/operator-market-service/src/main/java/com/datamate/operator/infrastructure/persistence/mapper/CategoryRelationMapper.java
new file mode 100644
index 0000000..e85cf33
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/java/com/datamate/operator/infrastructure/persistence/mapper/CategoryRelationMapper.java
@@ -0,0 +1,18 @@
+package com.datamate.operator.infrastructure.persistence.mapper;
+
+import com.datamate.operator.domain.modal.CategoryRelation;
+import com.datamate.operator.domain.modal.RelationCategoryDTO;
+import org.apache.ibatis.annotations.Mapper;
+import org.apache.ibatis.annotations.Param;
+
+import java.util.List;
+
+@Mapper
+public interface CategoryRelationMapper {
+
+ List findAllRelationWithCategory();
+
+ List findAllRelation();
+
+ void batchInsert(@Param("operatorId") String operatorId, @Param("categories") List categories);
+}
diff --git a/backend/services/operator-market-service/src/main/java/com/datamate/operator/infrastructure/persistence/mapper/OperatorMapper.java b/backend/services/operator-market-service/src/main/java/com/datamate/operator/infrastructure/persistence/mapper/OperatorMapper.java
new file mode 100644
index 0000000..dbfd2e1
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/java/com/datamate/operator/infrastructure/persistence/mapper/OperatorMapper.java
@@ -0,0 +1,27 @@
+package com.datamate.operator.infrastructure.persistence.mapper;
+
+import com.datamate.operator.domain.modal.Operator;
+import org.apache.ibatis.annotations.Mapper;
+import org.apache.ibatis.annotations.Param;
+
+import java.util.List;
+
+@Mapper
+public interface OperatorMapper {
+ List findAllOperators();
+
+ List findOperatorsByCriteria(@Param("size") Integer size, @Param("offset") Integer offset,
+ @Param("operatorName") String operatorName,
+ @Param("categories") List categories,
+ @Param("isStar") Boolean isStar);
+
+ Integer countOperatorsByCriteria(@Param("operatorName") String operatorName,
+ @Param("categories") List categories,
+ @Param("isStar") Boolean isStar);
+
+ Operator findOperatorById(@Param("id") String id);
+
+ void updateOperator(Operator operator);
+
+ void insertOperator(Operator operator);
+}
diff --git a/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/api/CategoryController.java b/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/api/CategoryController.java
new file mode 100644
index 0000000..6291631
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/api/CategoryController.java
@@ -0,0 +1,27 @@
+package com.datamate.operator.interfaces.api;
+
+import com.datamate.common.infrastructure.common.Response;
+import com.datamate.common.interfaces.PagedResponse;
+import com.datamate.operator.application.CategoryService;
+import com.datamate.operator.interfaces.dto.CategoryTreeResponse;
+import lombok.RequiredArgsConstructor;
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RestController;
+
+import java.util.List;
+
+
+@RestController
+@RequestMapping("/categories")
+@RequiredArgsConstructor
+public class CategoryController {
+ private final CategoryService categoryService;
+
+ @GetMapping("/tree")
+ public ResponseEntity>> categoryTreeGet() {
+ List allCategories = categoryService.getAllCategories();
+ return ResponseEntity.ok(Response.ok(PagedResponse.of(allCategories)));
+ }
+}
diff --git a/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/api/LabelController.java b/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/api/LabelController.java
new file mode 100644
index 0000000..8210048
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/api/LabelController.java
@@ -0,0 +1,40 @@
+package com.datamate.operator.interfaces.api;
+
+import com.datamate.common.infrastructure.common.Response;
+import com.datamate.common.interfaces.PagedResponse;
+import com.datamate.operator.application.LabelService;
+import com.datamate.operator.interfaces.dto.Label;
+import lombok.RequiredArgsConstructor;
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.bind.annotation.*;
+
+import java.util.List;
+
+@RestController
+@RequestMapping("/labels")
+@RequiredArgsConstructor
+public class LabelController {
+ private final LabelService labelService;
+
+ @GetMapping
+ public ResponseEntity>> labelsGet(@RequestParam("page") Integer page,
+ @RequestParam("size") Integer size,
+ @RequestParam("keyword") String keyword) {
+ return ResponseEntity.ok(Response.ok(PagedResponse.of(labelService.getLabels(page, size, keyword))));
+ }
+
+ @PutMapping("/{id}")
+ public ResponseEntity> labelsIdPut(@PathVariable("id") String id,
+ @RequestBody List updateLabelRequest) {
+ labelService.updateLabel(id, updateLabelRequest);
+ return ResponseEntity.ok(Response.ok(null));
+ }
+
+ @PostMapping
+ public ResponseEntity> labelsPost(@RequestBody Label labelsPostRequest) {
+ labelService.createLabels(labelsPostRequest);
+ return ResponseEntity.ok(Response.ok(null));
+ }
+}
+
+
diff --git a/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/api/OperatorController.java b/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/api/OperatorController.java
new file mode 100644
index 0000000..feb0cf5
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/api/OperatorController.java
@@ -0,0 +1,54 @@
+package com.datamate.operator.interfaces.api;
+
+import com.datamate.common.infrastructure.common.Response;
+import com.datamate.common.interfaces.PagedResponse;
+import com.datamate.operator.application.OperatorService;
+import com.datamate.operator.interfaces.dto.CreateOperatorRequest;
+import com.datamate.operator.interfaces.dto.OperatorResponse;
+import com.datamate.operator.interfaces.dto.OperatorsListPostRequest;
+import com.datamate.operator.interfaces.dto.UpdateOperatorRequest;
+import lombok.RequiredArgsConstructor;
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.bind.annotation.*;
+import org.springframework.web.multipart.MultipartFile;
+
+import java.util.List;
+
+@RestController
+@RequestMapping("/operators")
+@RequiredArgsConstructor
+public class OperatorController {
+ private final OperatorService operatorService;
+
+ @PostMapping("/list")
+ public ResponseEntity>> operatorsListPost(@RequestBody OperatorsListPostRequest request) {
+ List responses = operatorService.getOperators(request.getPage(), request.getSize(),
+ request.getCategories(), request.getOperatorName(), request.getIsStar());
+ int count = operatorService.getOperatorsCount(request.getCategories(), request.getOperatorName(),
+ request.getIsStar());
+ int totalPages = (count + request.getSize() + 1) / request.getSize();
+ return ResponseEntity.ok(Response.ok(PagedResponse.of(responses, request.getPage(), count, totalPages)));
+ }
+
+ @GetMapping("/{id}")
+ public ResponseEntity> operatorsIdGet(@PathVariable("id") String id) {
+ return ResponseEntity.ok(Response.ok(operatorService.getOperatorById(id)));
+ }
+
+ @PutMapping("/{id}")
+ public ResponseEntity> operatorsIdPut(@PathVariable("id") String id,
+ @RequestBody UpdateOperatorRequest updateOperatorRequest) {
+ return ResponseEntity.ok(Response.ok(operatorService.updateOperator(id, updateOperatorRequest)));
+ }
+
+ @PostMapping("/create")
+ public ResponseEntity> operatorsCreatePost(@RequestBody CreateOperatorRequest createOperatorRequest) {
+ return ResponseEntity.ok(Response.ok(operatorService.createOperator(createOperatorRequest)));
+ }
+
+ @PostMapping("/upload")
+ public ResponseEntity> operatorsUploadPost(@RequestPart(value = "file") MultipartFile file,
+ @RequestParam(value = "description") String description) {
+ return ResponseEntity.ok(Response.ok(operatorService.uploadOperator(file, description)));
+ }
+}
diff --git a/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/dto/CategoryTreeResponse.java b/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/dto/CategoryTreeResponse.java
new file mode 100644
index 0000000..be6d6a6
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/dto/CategoryTreeResponse.java
@@ -0,0 +1,22 @@
+package com.datamate.operator.interfaces.dto;
+
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+import lombok.Setter;
+
+import java.util.ArrayList;
+import java.util.List;
+
+
+@Getter
+@Setter
+@NoArgsConstructor
+public class CategoryTreeResponse {
+ private Integer id;
+
+ private String name;
+
+ private Integer count;
+
+ private List categories = new ArrayList<>();
+}
diff --git a/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/dto/CreateOperatorRequest.java b/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/dto/CreateOperatorRequest.java
new file mode 100644
index 0000000..225f4f3
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/dto/CreateOperatorRequest.java
@@ -0,0 +1,36 @@
+package com.datamate.operator.interfaces.dto;
+
+
+import lombok.Getter;
+import lombok.Setter;
+
+import java.util.List;
+
+/**
+ * CreateOperatorRequest
+ */
+
+@Getter
+@Setter
+public class CreateOperatorRequest {
+ private String id;
+
+ private String name;
+
+ private String description;
+
+ private String version;
+
+ private String inputs;
+
+ private String outputs;
+
+ private List categories;
+
+ private String runtime;
+
+ private String settings;
+
+ private String fileName;
+}
+
diff --git a/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/dto/Label.java b/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/dto/Label.java
new file mode 100644
index 0000000..ff32ff2
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/dto/Label.java
@@ -0,0 +1,13 @@
+package com.datamate.operator.interfaces.dto;
+
+import lombok.Getter;
+import lombok.Setter;
+
+@Getter
+@Setter
+public class Label {
+
+ private String id;
+
+ private String name;
+}
diff --git a/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/dto/OperatorResponse.java b/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/dto/OperatorResponse.java
new file mode 100644
index 0000000..0123743
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/dto/OperatorResponse.java
@@ -0,0 +1,43 @@
+package com.datamate.operator.interfaces.dto;
+
+import lombok.Getter;
+import lombok.Setter;
+import org.springframework.format.annotation.DateTimeFormat;
+
+import java.time.LocalDateTime;
+import java.util.List;
+
+/**
+ * OperatorResponse
+ */
+
+@Getter
+@Setter
+public class OperatorResponse {
+ private String id;
+
+ private String name;
+
+ private String description;
+
+ private String version;
+
+ private String inputs;
+
+ private String outputs;
+
+ private List categories;
+
+ private String runtime;
+
+ private String settings;
+
+ private Boolean isStar;
+
+ @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME)
+ private LocalDateTime createdAt;
+
+ @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME)
+ private LocalDateTime updatedAt;
+}
+
diff --git a/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/dto/OperatorsListPostRequest.java b/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/dto/OperatorsListPostRequest.java
new file mode 100644
index 0000000..9731579
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/dto/OperatorsListPostRequest.java
@@ -0,0 +1,30 @@
+package com.datamate.operator.interfaces.dto;
+
+import java.util.ArrayList;
+import java.util.List;
+
+
+import lombok.Getter;
+import lombok.Setter;
+
+/**
+ * OperatorsListPostRequest
+ */
+
+@Getter
+@Setter
+public class OperatorsListPostRequest {
+
+ private Integer page;
+
+ private Integer size;
+
+ private List categories = new ArrayList<>();
+
+ private String operatorName;
+
+ private String labelName;
+
+ private Boolean isStar;
+}
+
diff --git a/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/dto/SubCategory.java b/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/dto/SubCategory.java
new file mode 100644
index 0000000..dc9408a
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/dto/SubCategory.java
@@ -0,0 +1,18 @@
+package com.datamate.operator.interfaces.dto;
+
+import lombok.Getter;
+import lombok.Setter;
+
+@Getter
+@Setter
+public class SubCategory {
+ private long id;
+
+ private String name;
+
+ private long count;
+
+ private String type;
+
+ private long parentId;
+}
diff --git a/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/dto/UpdateOperatorRequest.java b/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/dto/UpdateOperatorRequest.java
new file mode 100644
index 0000000..2438fbe
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/java/com/datamate/operator/interfaces/dto/UpdateOperatorRequest.java
@@ -0,0 +1,32 @@
+package com.datamate.operator.interfaces.dto;
+
+
+import lombok.Getter;
+import lombok.Setter;
+
+import java.util.List;
+
+/**
+ * UpdateOperatorRequest
+ */
+
+@Getter
+@Setter
+public class UpdateOperatorRequest {
+ private String name;
+
+ private String description;
+
+ private String version;
+
+ private String inputs;
+
+ private String outputs;
+
+ private List categories;
+
+ private String runtime;
+
+ private String settings;
+}
+
diff --git a/backend/services/operator-market-service/src/main/resources/mappers/CategoryMapper.xml b/backend/services/operator-market-service/src/main/resources/mappers/CategoryMapper.xml
new file mode 100644
index 0000000..2207a23
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/resources/mappers/CategoryMapper.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+ SELECT * FROM t_operator_category
+
+
diff --git a/backend/services/operator-market-service/src/main/resources/mappers/CategoryRelationMapper.xml b/backend/services/operator-market-service/src/main/resources/mappers/CategoryRelationMapper.xml
new file mode 100644
index 0000000..7cdab8f
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/resources/mappers/CategoryRelationMapper.xml
@@ -0,0 +1,24 @@
+
+
+
+
+
+
+ SELECT tc.id AS id, tcr.operator_id AS operatorId, tc.name AS name, tc.parent_id AS parentId
+ FROM t_operator_category_relation tcr
+ LEFT JOIN t_operator_category tc ON tcr.category_id = tc.id
+
+
+
+ SELECT category_id, operator_id FROM t_operator_category_relation
+
+
+
+ INSERT INTO t_operator_category_relation (operator_id, category_id)
+ VALUES
+
+ (#{operatorId}, #{categoryId})
+
+
+
+
diff --git a/backend/services/operator-market-service/src/main/resources/mappers/OperatorMapper.xml b/backend/services/operator-market-service/src/main/resources/mappers/OperatorMapper.xml
new file mode 100644
index 0000000..7c5e448
--- /dev/null
+++ b/backend/services/operator-market-service/src/main/resources/mappers/OperatorMapper.xml
@@ -0,0 +1,96 @@
+
+
+
+
+
+ SELECT id, name, description, version, inputs, outputs, runtime, settings, is_star, created_at, updated_at FROM
+ t_operator
+
+
+
+ SELECT operator_id as id, operator_name as name, description, version, inputs, outputs, runtime, settings,
+ is_star, created_at, updated_at, GROUP_CONCAT(category_id ORDER BY created_at DESC SEPARATOR ',') AS categories
+ FROM v_operator
+
+
+ AND operator_name LIKE CONCAT('%', #{operatorName}, '%')
+
+
+ AND category_id IN
+
+ #{category}
+
+
+
+ AND is_star = #{isStar}
+
+
+ GROUP BY operator_id
+
+ LIMIT ${size} OFFSET ${offset}
+
+
+
+
+ SELECT COUNT(DISTINCT operator_id) AS count
+ FROM v_operator
+
+
+ AND operator_name LIKE CONCAT('%', #{operatorName}, '%')
+
+
+ AND category_id IN
+
+ #{category}
+
+
+
+ AND is_star = #{isStar}
+
+
+
+
+
+ SELECT operator_id as id, operator_name as name, description, version, inputs, outputs, runtime, settings,
+ is_star, created_at, updated_at, GROUP_CONCAT(category_id ORDER BY created_at DESC SEPARATOR ',') AS categories
+ FROM v_operator
+ WHERE operator_id = #{id}
+
+
+
+ INSERT INTO t_operator (id, name, description, version, inputs, outputs, runtime, settings, is_star, created_at, updated_at)
+ VALUES (#{id}, #{name}, #{description}, #{version}, #{inputs}, #{outputs}, #{runtime}, #{settings}, #{isStar}, NOW(), NOW())
+
+
+
+ UPDATE t_operator
+
+
+ name = #{name},
+
+
+ description = #{description},
+
+
+ version = #{version},
+
+
+ inputs = #{inputs},
+
+
+ outputs = #{outputs},
+
+
+ runtime = #{runtime},
+
+
+ settings = #{settings},
+
+
+ is_star = #{isStar},
+
+ updated_at = NOW()
+
+ WHERE id = #{id}
+
+
diff --git a/backend/services/pipeline-orchestration-service/pom.xml b/backend/services/pipeline-orchestration-service/pom.xml
new file mode 100644
index 0000000..d5dfe1d
--- /dev/null
+++ b/backend/services/pipeline-orchestration-service/pom.xml
@@ -0,0 +1,96 @@
+
+
+ 4.0.0
+
+
+ com.datamate
+ data-mate-platform
+ 1.0.0-SNAPSHOT
+ ../../pom.xml
+
+
+ pipeline-orchestration-service
+ Pipeline Orchestration Service
+ 流程编排服务
+
+
+
+ com.datamate
+ domain-common
+ ${project.version}
+
+
+ org.springframework.boot
+ spring-boot-starter-web
+
+
+ org.springframework.boot
+ spring-boot-starter-data-redis
+
+
+ mysql
+ mysql-connector-java
+ ${mysql.version}
+
+
+ org.springframework.boot
+ spring-boot-starter-test
+ test
+
+
+ org.springframework.cloud
+ spring-cloud-starter-openfeign
+
+
+ org.springdoc
+ springdoc-openapi-starter-webmvc-ui
+
+
+ org.openapitools
+ jackson-databind-nullable
+
+
+ jakarta.validation
+ jakarta.validation-api
+
+
+
+
+
+
+ org.springframework.boot
+ spring-boot-maven-plugin
+
+
+ org.openapitools
+ openapi-generator-maven-plugin
+ 6.6.0
+
+
+
+ generate
+
+
+ ${project.basedir}/../../openapi/specs/pipeline-orchestration.yaml
+ spring
+ ${project.build.directory}/generated-sources/openapi
+ com.datamate.pipeline.interfaces.api
+ com.datamate.pipeline.interfaces.dto
+
+ true
+ true
+ true
+ springdoc
+
+
+
+
+
+
+
+
+
+
diff --git a/backend/services/rag-indexer-service/pom.xml b/backend/services/rag-indexer-service/pom.xml
new file mode 100644
index 0000000..f9e0441
--- /dev/null
+++ b/backend/services/rag-indexer-service/pom.xml
@@ -0,0 +1,96 @@
+
+
+ 4.0.0
+
+
+ com.datamate
+ data-mate-platform
+ 1.0.0-SNAPSHOT
+ ../../pom.xml
+
+
+ rag-indexer-service
+ RAG Indexer Service
+ RAG文档索引服务
+
+
+
+ com.datamate
+ domain-common
+ ${project.version}
+
+
+ org.springframework.boot
+ spring-boot-starter-web
+
+
+ org.springframework.boot
+ spring-boot-starter-data-elasticsearch
+
+
+ mysql
+ mysql-connector-java
+ ${mysql.version}
+
+
+ org.springframework.boot
+ spring-boot-starter-test
+ test
+
+
+ org.springframework.cloud
+ spring-cloud-starter-openfeign
+
+
+ org.springdoc
+ springdoc-openapi-starter-webmvc-ui
+
+
+ org.openapitools
+ jackson-databind-nullable
+
+
+ jakarta.validation
+ jakarta.validation-api
+
+
+
+
+
+
+ org.springframework.boot
+ spring-boot-maven-plugin
+
+
+
+
+
+
+
diff --git a/backend/services/rag-indexer-service/src/main/java/com/dataengine/rag/indexer/RagApplication.java b/backend/services/rag-indexer-service/src/main/java/com/dataengine/rag/indexer/RagApplication.java
new file mode 100644
index 0000000..4f5af38
--- /dev/null
+++ b/backend/services/rag-indexer-service/src/main/java/com/dataengine/rag/indexer/RagApplication.java
@@ -0,0 +1,17 @@
+package com.dataengine.rag.indexer;
+
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+
+/**
+ *
+ *
+ * @author dallas
+ * @since 2025-10-13
+ */
+@SpringBootApplication
+public class RagApplication {
+ public static void main(String[] args) {
+ SpringApplication.run(RagApplication.class, args);
+ }
+}
diff --git a/backend/services/rag-query-service/pom.xml b/backend/services/rag-query-service/pom.xml
new file mode 100644
index 0000000..cc407ef
--- /dev/null
+++ b/backend/services/rag-query-service/pom.xml
@@ -0,0 +1,72 @@
+
+
+ 4.0.0
+
+
+ com.datamate
+ data-mate-platform
+ 1.0.0-SNAPSHOT
+ ../../pom.xml
+
+
+ rag-query-service
+ RAG Query Service
+ RAG查询服务
+
+
+
+ com.datamate
+ domain-common
+ ${project.version}
+
+
+ org.springframework.boot
+ spring-boot-starter-web
+
+
+ org.springframework.boot
+ spring-boot-starter-data-elasticsearch
+
+
+ mysql
+ mysql-connector-java
+ ${mysql.version}
+
+
+ org.springframework.boot
+ spring-boot-starter-test
+ test
+
+
+ org.springframework.cloud
+ spring-cloud-starter-openfeign
+
+
+ org.springdoc
+ springdoc-openapi-starter-webmvc-ui
+
+
+ org.openapitools
+ jackson-databind-nullable
+
+
+ jakarta.validation
+ jakarta.validation-api
+
+
+
+
+
+
+ org.springframework.boot
+ spring-boot-maven-plugin
+
+
+
+
+
+
+
diff --git a/backend/shared/domain-common/pom.xml b/backend/shared/domain-common/pom.xml
new file mode 100644
index 0000000..bf1e933
--- /dev/null
+++ b/backend/shared/domain-common/pom.xml
@@ -0,0 +1,37 @@
+
+
+ 4.0.0
+
+
+ com.datamate
+ data-mate-platform
+ 1.0.0-SNAPSHOT
+ ../../pom.xml
+
+
+ domain-common
+ Domain Common
+ DDD领域通用组件
+
+
+
+ org.springframework.boot
+ spring-boot-starter
+
+
+ org.springframework.boot
+ spring-boot-starter-validation
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+
+
+ com.fasterxml.jackson.datatype
+ jackson-datatype-jsr310
+
+
+
diff --git a/backend/shared/domain-common/src/main/java/com/datamate/common/domain/AggregateRoot.java b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/AggregateRoot.java
new file mode 100644
index 0000000..1cfdab4
--- /dev/null
+++ b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/AggregateRoot.java
@@ -0,0 +1,27 @@
+package com.datamate.common.domain;
+
+import com.datamate.common.domain.model.base.BaseEntity;
+
+/**
+ * DDD聚合根基类
+ */
+public abstract class AggregateRoot extends BaseEntity {
+
+ protected AggregateRoot() {
+ super();
+ }
+
+ protected AggregateRoot(ID id) {
+ super(id);
+ }
+
+ /**
+ * 获取聚合版本号(用于乐观锁)
+ */
+ public abstract Long getVersion();
+
+ /**
+ * 设置聚合版本号
+ */
+ public abstract void setVersion(Long version);
+}
diff --git a/backend/shared/domain-common/src/main/java/com/datamate/common/domain/ValueObject.java b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/ValueObject.java
new file mode 100644
index 0000000..f897bf5
--- /dev/null
+++ b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/ValueObject.java
@@ -0,0 +1,16 @@
+package com.datamate.common.domain;
+
+/**
+ * DDD值对象基类
+ */
+public abstract class ValueObject {
+
+ @Override
+ public abstract boolean equals(Object obj);
+
+ @Override
+ public abstract int hashCode();
+
+ @Override
+ public abstract String toString();
+}
diff --git a/backend/shared/domain-common/src/main/java/com/datamate/common/domain/model/ChunkUploadPreRequest.java b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/model/ChunkUploadPreRequest.java
new file mode 100644
index 0000000..2e1b039
--- /dev/null
+++ b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/model/ChunkUploadPreRequest.java
@@ -0,0 +1,49 @@
+package com.datamate.common.domain.model;
+
+import lombok.Builder;
+import lombok.Getter;
+import lombok.Setter;
+
+import java.time.LocalDateTime;
+
+/**
+ * 文件切片上传请求实体(与数据库表 t_chunk_upload_request 对齐)
+ */
+@Getter
+@Setter
+@Builder
+public class ChunkUploadPreRequest {
+
+ private String id; // UUID
+ private Integer totalFileNum; // 总文件数
+ private Integer uploadedFileNum; // 已上传文件数
+ private String uploadPath; // 文件路径
+ private LocalDateTime timeout; // 上传请求超时时间
+ private String serviceId; // 上传请求所属服务:DATA-MANAGEMENT(数据管理)
+ private String checkInfo; // 业务信息
+
+ /**
+ * 增加已上传文件数
+ */
+ public void incrementUploadedFileNum() {
+ if (this.uploadedFileNum == null) {
+ this.uploadedFileNum = 1;
+ return;
+ }
+ this.uploadedFileNum++;
+ }
+
+ /**
+ * 检查是否已完成上传
+ */
+ public boolean isUploadComplete() {
+ return this.uploadedFileNum != null && this.uploadedFileNum.equals(this.totalFileNum);
+ }
+
+ /**
+ * 检查是否已超时
+ */
+ public boolean isRequestTimeout() {
+ return this.timeout != null && LocalDateTime.now().isAfter(this.timeout);
+ }
+}
diff --git a/backend/shared/domain-common/src/main/java/com/datamate/common/domain/model/ChunkUploadRequest.java b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/model/ChunkUploadRequest.java
new file mode 100644
index 0000000..78bb7ff
--- /dev/null
+++ b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/model/ChunkUploadRequest.java
@@ -0,0 +1,36 @@
+package com.datamate.common.domain.model;
+
+import lombok.Getter;
+import lombok.Setter;
+import org.springframework.web.multipart.MultipartFile;
+
+/**
+ * 文件切片上传请求实体(与数据库表 t_chunk_upload_request 对齐)
+ */
+@Getter
+@Setter
+public class ChunkUploadRequest {
+ /** 预上传返回的id,用来确认同一个任务 */
+ private String reqId;
+
+ /** 文件编号,用于标识批量上传中的第几个文件 */
+ private int fileNo;
+
+ /** 文件名称 */
+ private String fileName;
+
+ /** 文件总分块数量 */
+ private int totalChunkNum;
+
+ /** 当前分块编号,从1开始 */
+ private int chunkNo;
+
+ /** 上传的文件分块内容 */
+ private MultipartFile file;
+
+ /** 切片大小 */
+ private Long fileSize;
+
+ /** 文件分块的校验和(十六进制字符串),用于验证文件完整性 */
+ private String checkSumHex;
+}
diff --git a/backend/shared/domain-common/src/main/java/com/datamate/common/domain/model/FileUploadResult.java b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/model/FileUploadResult.java
new file mode 100644
index 0000000..8c2b98b
--- /dev/null
+++ b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/model/FileUploadResult.java
@@ -0,0 +1,24 @@
+package com.datamate.common.domain.model;
+
+import lombok.Builder;
+import lombok.Getter;
+import lombok.Setter;
+
+import java.io.File;
+
+@Getter
+@Setter
+@Builder
+public class FileUploadResult {
+ /** 切片是否已经全部上传 */
+ boolean isAllFilesUploaded;
+
+ /** 业务上传信息 */
+ String checkInfo;
+
+ /** 保存的文件 */
+ File savedFile;
+
+ /** 真实文件名 */
+ String fileName;
+}
diff --git a/backend/shared/domain-common/src/main/java/com/datamate/common/domain/model/UploadCheckInfo.java b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/model/UploadCheckInfo.java
new file mode 100644
index 0000000..bb32e3a
--- /dev/null
+++ b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/model/UploadCheckInfo.java
@@ -0,0 +1,7 @@
+package com.datamate.common.domain.model;
+
+/**
+ * 上传检查信息基类
+ */
+public abstract class UploadCheckInfo {
+}
diff --git a/backend/shared/domain-common/src/main/java/com/datamate/common/domain/model/base/BaseEntity.java b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/model/base/BaseEntity.java
new file mode 100644
index 0000000..5abd323
--- /dev/null
+++ b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/model/base/BaseEntity.java
@@ -0,0 +1,61 @@
+package com.datamate.common.domain.model.base;
+
+import com.baomidou.mybatisplus.annotation.FieldFill;
+import com.baomidou.mybatisplus.annotation.IdType;
+import com.baomidou.mybatisplus.annotation.TableField;
+import com.baomidou.mybatisplus.annotation.TableId;
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+import lombok.Setter;
+
+import java.io.Serial;
+import java.io.Serializable;
+import java.time.LocalDateTime;
+
+/**
+ * 实体基类
+ *
+ * @param 实体ID类型
+ */
+@Getter
+@Setter
+@NoArgsConstructor
+public abstract class BaseEntity implements Serializable {
+ @Serial
+ private static final long serialVersionUID = 1L;
+
+ /**
+ * 实体ID
+ */
+ @TableId(type = IdType.ASSIGN_ID)
+ protected ID id;
+
+ /**
+ * 创建时间
+ */
+ @TableField(fill = FieldFill.INSERT)
+ protected LocalDateTime createdAt;
+
+ /**
+ * 更新时间
+ */
+ @TableField(fill = FieldFill.INSERT_UPDATE)
+ protected LocalDateTime updatedAt;
+
+ /**
+ * 创建人
+ */
+ @TableField(fill = FieldFill.INSERT)
+ protected String createdBy;
+
+ /**
+ * 更新人
+ */
+ @TableField(fill = FieldFill.INSERT_UPDATE)
+ protected String updatedBy;
+
+ public BaseEntity(ID id) {
+ super();
+ this.id = id;
+ }
+}
diff --git a/backend/shared/domain-common/src/main/java/com/datamate/common/domain/service/FileService.java b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/service/FileService.java
new file mode 100644
index 0000000..8aa059e
--- /dev/null
+++ b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/service/FileService.java
@@ -0,0 +1,91 @@
+package com.datamate.common.domain.service;
+
+import com.datamate.common.domain.model.ChunkUploadPreRequest;
+import com.datamate.common.domain.model.ChunkUploadRequest;
+import com.datamate.common.domain.model.FileUploadResult;
+import com.datamate.common.domain.utils.ChunksSaver;
+import com.datamate.common.infrastructure.mapper.ChunkUploadRequestMapper;
+import org.springframework.stereotype.Component;
+import org.springframework.transaction.annotation.Transactional;
+
+import java.io.File;
+import java.time.LocalDateTime;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.UUID;
+
+/**
+ * 文件服务
+ */
+@Component
+public class FileService {
+ private static final int DEFAULT_TIMEOUT = 120;
+
+ private final ChunkUploadRequestMapper chunkUploadRequestMapper;
+
+ public FileService(ChunkUploadRequestMapper chunkUploadRequestMapper) {
+ this.chunkUploadRequestMapper = chunkUploadRequestMapper;
+ }
+
+ /**
+ * 预上传
+ */
+ @Transactional
+ public String preUpload(ChunkUploadPreRequest chunkUploadPreRequest) {
+ chunkUploadPreRequest.setId(UUID.randomUUID().toString());
+ chunkUploadPreRequest.setTimeout(LocalDateTime.now().plusSeconds(DEFAULT_TIMEOUT));
+ chunkUploadRequestMapper.insert(chunkUploadPreRequest);
+ return chunkUploadPreRequest.getId();
+ }
+
+ /**
+ * 切片上传
+ */
+ @Transactional
+ public FileUploadResult chunkUpload(ChunkUploadRequest uploadFileRequest) {
+ uploadFileRequest.setFileSize(uploadFileRequest.getFile().getSize());
+ ChunkUploadPreRequest preRequest = chunkUploadRequestMapper.findById(uploadFileRequest.getReqId());
+ if (preRequest == null || preRequest.isUploadComplete() || preRequest.isRequestTimeout()) {
+ throw new IllegalArgumentException("预上传请求不存在");
+ }
+ File savedFile;
+ if (uploadFileRequest.getTotalChunkNum() > 1) {
+ savedFile = uploadChunk(uploadFileRequest, preRequest);
+ } else {
+ savedFile = uploadFile(uploadFileRequest, preRequest);
+ }
+ if (chunkUploadRequestMapper.update(preRequest) == 0) {
+ throw new IllegalArgumentException("预上传请求不存在");
+ }
+ boolean isFinish = Objects.equals(preRequest.getUploadedFileNum(), preRequest.getTotalFileNum());
+ if (isFinish) {
+ // 删除存分片的临时路径
+ ChunksSaver.deleteFiles(new File(preRequest.getUploadPath(),
+ String.format(ChunksSaver.TEMP_DIR_NAME_FORMAT, preRequest.getId())).getPath());
+ chunkUploadRequestMapper.deleteById(preRequest.getId());
+ }
+ return FileUploadResult.builder()
+ .isAllFilesUploaded(isFinish)
+ .checkInfo(preRequest.getCheckInfo())
+ .savedFile(savedFile)
+ .fileName(uploadFileRequest.getFileName())
+ .build();
+ }
+
+ private File uploadFile(ChunkUploadRequest fileUploadRequest, ChunkUploadPreRequest preRequest) {
+ File savedFile = ChunksSaver.saveFile(fileUploadRequest, preRequest);
+ preRequest.setTimeout(LocalDateTime.now().plusSeconds(DEFAULT_TIMEOUT));
+ preRequest.incrementUploadedFileNum();
+ return savedFile;
+ }
+
+ private File uploadChunk(ChunkUploadRequest fileUploadRequest, ChunkUploadPreRequest preRequest) {
+ Optional savedFile = ChunksSaver.save(fileUploadRequest, preRequest);
+ if (savedFile.isPresent()) {
+ preRequest.incrementUploadedFileNum();
+ return savedFile.get();
+ }
+ preRequest.setTimeout(LocalDateTime.now().plusSeconds(DEFAULT_TIMEOUT));
+ return null;
+ }
+}
diff --git a/backend/shared/domain-common/src/main/java/com/datamate/common/domain/utils/AnalyzerUtils.java b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/utils/AnalyzerUtils.java
new file mode 100644
index 0000000..5dd50c2
--- /dev/null
+++ b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/utils/AnalyzerUtils.java
@@ -0,0 +1,40 @@
+package com.datamate.common.domain.utils;
+
+import org.springframework.util.StringUtils;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.Locale;
+
+/**
+ * 解析工具类
+ */
+public class AnalyzerUtils {
+ /** zip压缩包文件后缀类型 */
+ public static final String TYPE_ZIP = "zip";
+
+ /** tar压缩包文件后缀类型 */
+ public static final String TYPE_TAR_GZ = "tar.gz";
+
+ private static final List SPECIAL_EXTENSIONS = Collections.singletonList(TYPE_TAR_GZ);
+
+ /**
+ * 从文件路径获取文件后缀类型
+ *
+ * @param filePath 文件类型
+ * @return 文件后缀类型
+ */
+ public static String getExtension(final String filePath) {
+ String filename = CommonUtils.trimFilePath(filePath);
+ for (String ext : SPECIAL_EXTENSIONS) {
+ if (StringUtils.endsWithIgnoreCase(filename, "." + ext)) {
+ return ext;
+ }
+ }
+ int firstDotIndex = filename.lastIndexOf(".");
+ if (firstDotIndex == -1) {
+ return "";
+ }
+ return filename.substring(firstDotIndex + 1).toLowerCase(Locale.ROOT);
+ }
+}
diff --git a/backend/shared/domain-common/src/main/java/com/datamate/common/domain/utils/ChunksSaver.java b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/utils/ChunksSaver.java
new file mode 100644
index 0000000..539cbb6
--- /dev/null
+++ b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/utils/ChunksSaver.java
@@ -0,0 +1,134 @@
+package com.datamate.common.domain.utils;
+
+import com.datamate.common.domain.model.ChunkUploadPreRequest;
+import com.datamate.common.domain.model.ChunkUploadRequest;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.io.FileUtils;
+import org.springframework.web.multipart.MultipartFile;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.time.LocalDateTime;
+import java.time.temporal.ChronoUnit;
+import java.util.Optional;
+
+@Slf4j
+public class ChunksSaver {
+ /**
+ * 分片保存的临时目录
+ */
+ public static final String TEMP_DIR_NAME_FORMAT = "req_%s_chunks";
+
+ /**
+ * 保存分片
+ *
+ * @param fileUploadRequest 上传分片的请求
+ * @param preUploadReq 上传文件的请求
+ * @return 保存后完整的文件
+ */
+ public static Optional save(ChunkUploadRequest fileUploadRequest, ChunkUploadPreRequest preUploadReq) {
+ Path uploadPath = Paths.get(preUploadReq.getUploadPath(),
+ String.format(TEMP_DIR_NAME_FORMAT, preUploadReq.getId()));
+ LocalDateTime startTime = LocalDateTime.now();
+ // 临时文件名为文件序号
+ File targetFile = new File(uploadPath.toString(), String.valueOf(fileUploadRequest.getFileNo()));
+
+ // 追加分片到目标文件末尾
+ appendToTargetFile(targetFile, getFileInputStream(fileUploadRequest.getFile()));
+
+ // 判断是否分片已经全部上传,全部上传后将重组文件移动到指定路径,否则返回空
+ if (fileUploadRequest.getTotalChunkNum() != fileUploadRequest.getChunkNo()) {
+ log.debug("save chunk {} cost {}", fileUploadRequest.getChunkNo(),
+ ChronoUnit.MILLIS.between(startTime, LocalDateTime.now()));
+ return Optional.empty();
+ }
+
+ File finalFile = new File(preUploadReq.getUploadPath(), fileUploadRequest.getFileName());
+ if (!targetFile.renameTo(finalFile)) {
+ log.error("failed to mv file:{}, req Id:{}", targetFile.getName(), fileUploadRequest.getReqId());
+ throw new IllegalArgumentException("failed to move file to target dir");
+ }
+ log.debug("save chunk {} cost {}", fileUploadRequest.getChunkNo(),
+ ChronoUnit.MILLIS.between(startTime, LocalDateTime.now()));
+ return Optional.of(finalFile);
+ }
+
+ private static InputStream getFileInputStream(MultipartFile file) {
+ try {
+ return file.getInputStream();
+ } catch (IOException e) {
+ log.error("get uploaded file input stream failed", e);
+ throw new IllegalArgumentException();
+ }
+ }
+
+ /**
+ * 保存文件
+ *
+ * @param fileUploadRequest 上传分片的请求
+ * @param preUploadReq 上传文件的请求
+ */
+ public static File saveFile(ChunkUploadRequest fileUploadRequest, ChunkUploadPreRequest preUploadReq) {
+ // 保存文件
+ File targetFile = new File(preUploadReq.getUploadPath(), fileUploadRequest.getFileName());
+ try {
+ log.info("file path {}, file size {}", targetFile.toPath(), targetFile.getTotalSpace());
+ FileUtils.copyInputStreamToFile(getFileInputStream(fileUploadRequest.getFile()), targetFile);
+ } catch (IOException e) {
+ throw new IllegalArgumentException();
+ }
+ return targetFile;
+ }
+
+ /**
+ * 追加分片到文件末尾
+ *
+ * @param targetFile 目标文件
+ * @param inputStream file stream
+ */
+ public static void appendToTargetFile(File targetFile, InputStream inputStream) {
+ try {
+ byte[] buffer = new byte[1024 * 1024];
+ int byteRead;
+ while ((byteRead = inputStream.read(buffer)) != -1) {
+ FileUtils.writeByteArrayToFile(targetFile, buffer, 0, byteRead, true);
+ }
+ } catch (IOException e) {
+ throw new IllegalArgumentException();
+ }
+ }
+
+ /**
+ * 删除指定路径下的所有文件
+ *
+ * @param uploadPath 文件路径
+ */
+ public static void deleteFiles(String uploadPath) {
+ File dic = new File(uploadPath);
+ if (!dic.exists()) {
+ return;
+ }
+ File[] files = dic.listFiles();
+ if (files == null || files.length == 0) {
+ dic.delete();
+ return;
+ }
+ try {
+ for (File file : files) {
+ if (file.isDirectory()) {
+ deleteFiles(file.getPath());
+ } else {
+ file.delete();
+ }
+ }
+ if (dic.exists()) {
+ dic.delete();
+ }
+ } catch (SecurityException e) {
+ log.warn("Fail to delete file", e);
+ }
+ }
+}
diff --git a/backend/shared/domain-common/src/main/java/com/datamate/common/domain/utils/CommonUtils.java b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/utils/CommonUtils.java
new file mode 100644
index 0000000..4a1fe47
--- /dev/null
+++ b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/utils/CommonUtils.java
@@ -0,0 +1,24 @@
+package com.datamate.common.domain.utils;
+
+import java.io.File;
+
+/**
+ * 通用工具类
+ */
+public class CommonUtils {
+ /**
+ * 从文件路径中获取文件名(带后缀)
+ *
+ * @param filePath 文件路径
+ * @return 文件名(带后缀)
+ */
+ public static String trimFilePath(String filePath) {
+ int lastSlashIndex = filePath.lastIndexOf(File.separator);
+
+ String filename = filePath;
+ if (lastSlashIndex != -1) {
+ filename = filePath.substring(lastSlashIndex + 1);
+ }
+ return filename;
+ }
+}
diff --git a/backend/shared/domain-common/src/main/java/com/datamate/common/infrastructure/common/IgnoreResponseWrap.java b/backend/shared/domain-common/src/main/java/com/datamate/common/infrastructure/common/IgnoreResponseWrap.java
new file mode 100644
index 0000000..fac5ecb
--- /dev/null
+++ b/backend/shared/domain-common/src/main/java/com/datamate/common/infrastructure/common/IgnoreResponseWrap.java
@@ -0,0 +1,15 @@
+package com.datamate.common.infrastructure.common;
+
+import java.lang.annotation.*;
+
+/**
+ * 忽略响应包装注解
+ *
+ * 在使用全局响应包装时,如果某个接口或类不需要进行响应包装,可以使用此注解进行标记
+ *
+ */
+@Target({ElementType.METHOD, ElementType.TYPE})
+@Retention(RetentionPolicy.RUNTIME)
+@Documented
+public @interface IgnoreResponseWrap {
+}
diff --git a/backend/shared/domain-common/src/main/java/com/datamate/common/infrastructure/common/Response.java b/backend/shared/domain-common/src/main/java/com/datamate/common/infrastructure/common/Response.java
new file mode 100644
index 0000000..2451fe6
--- /dev/null
+++ b/backend/shared/domain-common/src/main/java/com/datamate/common/infrastructure/common/Response.java
@@ -0,0 +1,63 @@
+package com.datamate.common.infrastructure.common;
+
+import com.datamate.common.infrastructure.exception.ErrorCode;
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+import lombok.Setter;
+
+import java.io.Serial;
+import java.io.Serializable;
+
+/**
+ * 通用返回体
+ */
+@Getter
+@Setter
+@NoArgsConstructor
+@AllArgsConstructor
+public class Response implements Serializable {
+ @Serial
+ private static final long serialVersionUID = 1L;
+ /**
+ * 状态码
+ */
+ private String code;
+
+ /**
+ * 消息
+ */
+ private String message;
+
+ /**
+ * 数据
+ */
+ private T data;
+
+ /**
+ * 构造成功时的返回体
+ *
+ * @param data 返回数据
+ * @param 返回数据类型
+ * @return 返回体内容
+ */
+ public static Response ok(T data) {
+ return new Response<>("0", "success", data);
+ }
+
+ /**
+ * 构造错误时的返回体
+ *
+ * @param errorCode 错误码
+ * @param data 返回数据
+ * @param 返回数据类型
+ * @return 返回体内容
+ */
+ public static