feat(data-management): 添加数据集父子层级结构功能

- 在OpenAPI规范中新增parentDatasetId字段用于层级过滤
- 实现数据集父子关系的创建、更新和删除逻辑
- 添加数据集移动时的路径重命名和文件路径前缀更新
- 增加子数据集数量验证防止误删父数据集
- 更新前端界面支持选择父数据集和导航显示
- 优化Python后端自动标注任务的路径处理逻辑
- 修改数据库表结构添加外键约束确保数据一致性
This commit is contained in:
2026-01-20 13:34:50 +08:00
parent bde00c3c38
commit 79371ba078
26 changed files with 394 additions and 133 deletions

View File

@@ -6,6 +6,9 @@ import com.datamate.common.domain.utils.ChunksSaver;
import com.datamate.common.setting.application.SysParamApplicationService;
import com.datamate.datamanagement.interfaces.dto.*;
import com.datamate.common.infrastructure.exception.BusinessAssert;
import com.datamate.common.infrastructure.exception.BusinessException;
import com.datamate.common.infrastructure.exception.CommonErrorCode;
import com.datamate.common.infrastructure.exception.SystemErrorCode;
import com.datamate.common.interfaces.PagedResponse;
import com.datamate.datamanagement.domain.model.dataset.Dataset;
import com.datamate.datamanagement.domain.model.dataset.DatasetFile;
@@ -27,6 +30,7 @@ import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.StringUtils;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
@@ -62,7 +66,9 @@ public class DatasetApplicationService {
BusinessAssert.isTrue(datasetRepository.findByName(createDatasetRequest.getName()) == null, DataManagementErrorCode.DATASET_ALREADY_EXISTS);
// 创建数据集对象
Dataset dataset = DatasetConverter.INSTANCE.convertToDataset(createDatasetRequest);
dataset.initCreateParam(datasetBasePath);
Dataset parentDataset = resolveParentDataset(createDatasetRequest.getParentDatasetId(), dataset.getId());
dataset.setParentDatasetId(parentDataset == null ? null : parentDataset.getId());
dataset.initCreateParam(datasetBasePath, parentDataset == null ? null : parentDataset.getPath());
// 处理标签
Set<Tag> processedTags = Optional.ofNullable(createDatasetRequest.getTags())
.filter(CollectionUtils::isNotEmpty)
@@ -98,6 +104,9 @@ public class DatasetApplicationService {
if (Objects.nonNull(updateDatasetRequest.getStatus())) {
dataset.setStatus(updateDatasetRequest.getStatus());
}
if (updateDatasetRequest.getParentDatasetId() != null) {
handleParentChange(dataset, updateDatasetRequest.getParentDatasetId());
}
if (StringUtils.hasText(updateDatasetRequest.getDataSource())) {
// 数据源id不为空,使用异步线程进行文件扫盘落库
processDataSourceAsync(dataset.getId(), updateDatasetRequest.getDataSource());
@@ -112,10 +121,11 @@ public class DatasetApplicationService {
@Transactional
public void deleteDataset(String datasetId) {
Dataset dataset = datasetRepository.getById(datasetId);
BusinessAssert.notNull(dataset, DataManagementErrorCode.DATASET_NOT_FOUND);
long childCount = datasetRepository.countByParentId(datasetId);
BusinessAssert.isTrue(childCount == 0, DataManagementErrorCode.DATASET_HAS_CHILDREN);
datasetRepository.removeById(datasetId);
if (dataset != null) {
ChunksSaver.deleteFolder(dataset.getPath());
}
ChunksSaver.deleteFolder(dataset.getPath());
}
/**
@@ -164,6 +174,73 @@ public class DatasetApplicationService {
return tags;
}
private Dataset resolveParentDataset(String parentDatasetId, String currentDatasetId) {
String normalized = normalizeParentId(parentDatasetId);
if (normalized == null) {
return null;
}
BusinessAssert.isTrue(!normalized.equals(currentDatasetId), CommonErrorCode.PARAM_ERROR);
Dataset parent = datasetRepository.getById(normalized);
BusinessAssert.notNull(parent, DataManagementErrorCode.DATASET_NOT_FOUND);
BusinessAssert.isTrue(parent.getParentDatasetId() == null, CommonErrorCode.PARAM_ERROR);
return parent;
}
private void handleParentChange(Dataset dataset, String parentDatasetId) {
String normalized = normalizeParentId(parentDatasetId);
if (Objects.equals(dataset.getParentDatasetId(), normalized)) {
return;
}
long childCount = datasetRepository.countByParentId(dataset.getId());
if (childCount > 0 && normalized != null) {
throw BusinessException.of(DataManagementErrorCode.DATASET_HAS_CHILDREN);
}
Dataset parent = normalized == null ? null : resolveParentDataset(normalized, dataset.getId());
String newPath = buildDatasetPath(parent == null ? datasetBasePath : parent.getPath(), dataset.getId());
moveDatasetPath(dataset, newPath);
dataset.setParentDatasetId(parent == null ? null : parent.getId());
}
private String normalizeParentId(String parentDatasetId) {
if (!StringUtils.hasText(parentDatasetId)) {
return null;
}
return parentDatasetId.trim();
}
private String buildDatasetPath(String basePath, String datasetId) {
String normalized = basePath;
while (normalized.endsWith(File.separator)) {
normalized = normalized.substring(0, normalized.length() - 1);
}
return normalized + File.separator + datasetId;
}
private void moveDatasetPath(Dataset dataset, String newPath) {
String oldPath = dataset.getPath();
if (Objects.equals(oldPath, newPath)) {
return;
}
Path sourcePath = Paths.get(oldPath);
Path targetPath = Paths.get(newPath);
try {
if (Files.exists(sourcePath)) {
if (Files.exists(targetPath)) {
throw BusinessException.of(CommonErrorCode.PARAM_ERROR);
}
Files.createDirectories(targetPath.getParent());
Files.move(sourcePath, targetPath);
} else {
Files.createDirectories(targetPath);
}
} catch (IOException e) {
log.error("move dataset path error, from {} to {}", oldPath, newPath, e);
throw BusinessException.of(SystemErrorCode.FILE_SYSTEM_ERROR);
}
datasetFileRepository.updateFilePathPrefix(dataset.getId(), oldPath, newPath);
dataset.setPath(newPath);
}
/**
* 获取数据集统计信息
*/

View File

@@ -323,10 +323,11 @@ public class DatasetFileApplicationService {
* @return 请求id
*/
@Transactional
public String preUpload(UploadFilesPreRequest chunkUploadRequest, String datasetId) {
if (Objects.isNull(datasetRepository.getById(datasetId))) {
throw BusinessException.of(DataManagementErrorCode.DATASET_NOT_FOUND);
}
public String preUpload(UploadFilesPreRequest chunkUploadRequest, String datasetId) {
Dataset dataset = datasetRepository.getById(datasetId);
if (Objects.isNull(dataset)) {
throw BusinessException.of(DataManagementErrorCode.DATASET_NOT_FOUND);
}
// 构建上传路径,如果有 prefix 则追加到路径中
String prefix = Optional.ofNullable(chunkUploadRequest.getPrefix()).orElse("").trim();
@@ -335,10 +336,13 @@ public class DatasetFileApplicationService {
prefix = prefix.substring(1);
}
String uploadPath = datasetBasePath + File.separator + datasetId;
if (!prefix.isEmpty()) {
uploadPath = uploadPath + File.separator + prefix.replace("/", File.separator);
}
String uploadPath = dataset.getPath();
if (uploadPath == null || uploadPath.isBlank()) {
uploadPath = datasetBasePath + File.separator + datasetId;
}
if (!prefix.isEmpty()) {
uploadPath = uploadPath + File.separator + prefix.replace("/", File.separator);
}
ChunkUploadPreRequest request = ChunkUploadPreRequest.builder().build();
request.setUploadPath(uploadPath);

View File

@@ -28,6 +28,10 @@ public class Dataset extends BaseEntity<String> {
* 数据集描述
*/
private String description;
/**
* 父数据集ID
*/
private String parentDatasetId;
/**
* 数据集类型
*/
@@ -110,14 +114,23 @@ public class Dataset extends BaseEntity<String> {
this.updatedAt = LocalDateTime.now();
}
public void initCreateParam(String datasetBasePath) {
public void initCreateParam(String datasetBasePath, String parentPath) {
this.id = UUID.randomUUID().toString();
this.path = datasetBasePath + File.separator + this.id;
String basePath = normalizeBasePath(parentPath != null && !parentPath.isBlank() ? parentPath : datasetBasePath);
this.path = basePath + File.separator + this.id;
if (this.status == null) {
this.status = DatasetStatusType.DRAFT;
}
}
private String normalizeBasePath(String basePath) {
String normalized = basePath;
while (normalized.endsWith(File.separator)) {
normalized = normalized.substring(0, normalized.length() - 1);
}
return normalized;
}
public void updateBasicInfo(String name, String description, String category) {
if (name != null && !name.isEmpty()) this.name = name;
if (description != null) this.description = description;

View File

@@ -37,10 +37,14 @@ public enum DataManagementErrorCode implements ErrorCode {
* 数据集文件已存在
*/
DATASET_FILE_ALREADY_EXISTS("data_management.0006", "数据集文件已存在"),
/**
* 目录不存在
*/
DIRECTORY_NOT_FOUND("data_management.0007", "目录不存在");
/**
* 目录不存在
*/
DIRECTORY_NOT_FOUND("data_management.0007", "目录不存在"),
/**
* 存在子数据集
*/
DATASET_HAS_CHILDREN("data_management.0008", "存在子数据集,禁止删除或移动");
private final String code;
private final String message;

View File

@@ -26,4 +26,7 @@ public interface DatasetFileMapper extends BaseMapper<DatasetFile> {
int update(DatasetFile file);
int deleteById(@Param("id") String id);
int updateFilePathPrefix(@Param("datasetId") String datasetId,
@Param("oldPrefix") String oldPrefix,
@Param("newPrefix") String newPrefix);
}

View File

@@ -25,4 +25,6 @@ public interface DatasetFileRepository extends IRepository<DatasetFile> {
IPage<DatasetFile> findByCriteria(String datasetId, String fileType, String status, String name,
IPage<DatasetFile> page);
int updateFilePathPrefix(String datasetId, String oldPrefix, String newPrefix);
}

View File

@@ -26,4 +26,6 @@ public interface DatasetRepository extends IRepository<Dataset> {
AllDatasetStatisticsResponse getAllDatasetStatistics();
IPage<Dataset> findByCriteria(IPage<Dataset> page, DatasetPagingQuery query);
long countByParentId(String parentDatasetId);
}

View File

@@ -56,4 +56,9 @@ public class DatasetFileRepositoryImpl extends CrudRepository<DatasetFileMapper,
.eq(StringUtils.hasText(status), DatasetFile::getStatus, status)
.like(StringUtils.hasText(name), DatasetFile::getFileName, name));
}
@Override
public int updateFilePathPrefix(String datasetId, String oldPrefix, String newPrefix) {
return datasetFileMapper.updateFilePathPrefix(datasetId, oldPrefix, newPrefix);
}
}

View File

@@ -54,6 +54,14 @@ public class DatasetRepositoryImpl extends CrudRepository<DatasetMapper, Dataset
.eq(query.getType() != null, Dataset::getDatasetType, query.getType())
.eq(query.getStatus() != null, Dataset::getStatus, query.getStatus());
if (query.getParentDatasetId() != null) {
if (StringUtils.isBlank(query.getParentDatasetId())) {
wrapper.isNull(Dataset::getParentDatasetId);
} else {
wrapper.eq(Dataset::getParentDatasetId, query.getParentDatasetId());
}
}
if (StringUtils.isNotBlank(query.getKeyword())) {
wrapper.and(w ->
w.like(Dataset::getName, query.getKeyword()).or()
@@ -74,4 +82,10 @@ public class DatasetRepositoryImpl extends CrudRepository<DatasetMapper, Dataset
wrapper.orderByDesc(Dataset::getCreatedAt);
return datasetMapper.selectPage(page, wrapper);
}
@Override
public long countByParentId(String parentDatasetId) {
return datasetMapper.selectCount(new LambdaQueryWrapper<Dataset>()
.eq(Dataset::getParentDatasetId, parentDatasetId));
}
}

View File

@@ -29,6 +29,8 @@ public class CreateDatasetRequest {
/** 数据集类型 */
@NotNull(message = "数据集类型不能为空")
private DatasetType datasetType;
/** 父数据集ID */
private String parentDatasetId;
/** 标签列表 */
private List<String> tags;
/** 数据源 */

View File

@@ -39,4 +39,9 @@ public class DatasetPagingQuery extends PagingQuery {
* 状态过滤
*/
private DatasetStatusType status;
/**
* 父数据集ID过滤
*/
private String parentDatasetId;
}

View File

@@ -15,6 +15,8 @@ import java.util.Map;
public class DatasetResponse {
/** 数据集ID */
private String id;
/** 父数据集ID */
private String parentDatasetId;
/** 数据集名称 */
private String name;
/** 数据集描述 */

View File

@@ -23,6 +23,8 @@ public class UpdateDatasetRequest {
private String description;
/** 归集任务id */
private String dataSource;
/** 父数据集ID */
private String parentDatasetId;
/** 标签列表 */
private List<String> tags;
/** 数据集状态 */

View File

@@ -95,4 +95,11 @@
<delete id="deleteById" parameterType="string">
DELETE FROM t_dm_dataset_files WHERE id = #{id}
</delete>
<update id="updateFilePathPrefix">
UPDATE t_dm_dataset_files
SET file_path = CONCAT(#{newPrefix}, SUBSTRING(file_path, LENGTH(#{oldPrefix}) + 1))
WHERE dataset_id = #{datasetId}
AND file_path LIKE CONCAT(#{oldPrefix}, '%')
</update>
</mapper>

View File

@@ -4,12 +4,13 @@
<mapper namespace="com.datamate.datamanagement.infrastructure.persistence.mapper.DatasetMapper">
<sql id="Base_Column_List">
id, name, description, dataset_type, category, path, format, schema_info, size_bytes, file_count, record_count,
id, parent_dataset_id, name, description, dataset_type, category, path, format, schema_info, size_bytes, file_count, record_count,
retention_days, tags, metadata, status, is_public, is_featured, version, created_at, updated_at, created_by, updated_by
</sql>
<sql id="Alias_D_Column_List">
d.id AS id,
d.parent_dataset_id AS parent_dataset_id,
d.name AS name,
d.description AS description,
d.dataset_type AS dataset_type,