feat(data-management): 添加数据集父子层级结构功能

- 在OpenAPI规范中新增parentDatasetId字段用于层级过滤
- 实现数据集父子关系的创建、更新和删除逻辑
- 添加数据集移动时的路径重命名和文件路径前缀更新
- 增加子数据集数量验证防止误删父数据集
- 更新前端界面支持选择父数据集和导航显示
- 优化Python后端自动标注任务的路径处理逻辑
- 修改数据库表结构添加外键约束确保数据一致性
This commit is contained in:
2026-01-20 13:34:50 +08:00
parent bde00c3c38
commit 79371ba078
26 changed files with 394 additions and 133 deletions

View File

@@ -61,12 +61,17 @@ paths:
schema: schema:
type: string type: string
description: 关键词搜索(名称、描述) description: 关键词搜索(名称、描述)
- name: status - name: status
in: query in: query
schema: schema:
type: string type: string
enum: [DRAFT, ACTIVE, PROCESSING, ARCHIVED, PUBLISHED, DEPRECATED] enum: [DRAFT, ACTIVE, PROCESSING, ARCHIVED, PUBLISHED, DEPRECATED]
description: 数据集状态过滤 description: 数据集状态过滤
- name: parentDatasetId
in: query
schema:
type: string
description: 父数据集ID过滤(传空字符串表示根数据集)
responses: responses:
'200': '200':
description: 成功 description: 成功
@@ -538,15 +543,18 @@ components:
type: boolean type: boolean
description: 是否为最后一页 description: 是否为最后一页
DatasetResponse: DatasetResponse:
type: object type: object
properties: properties:
id: id:
type: string type: string
description: 数据集ID description: 数据集ID
name: parentDatasetId:
type: string type: string
description: 数据集名称 description: 数据集ID
name:
type: string
description: 数据集名称
description: description:
type: string type: string
description: 数据集描述 description: 数据集描述
@@ -590,29 +598,32 @@ components:
type: string type: string
description: 创建者 description: 创建者
CreateDatasetRequest: CreateDatasetRequest:
type: object type: object
required: required:
- name - name
- type - type
properties: properties:
name: name:
type: string type: string
description: 数据集名称 description: 数据集名称
minLength: 1 minLength: 1
maxLength: 100 maxLength: 100
description: description:
type: string type: string
description: 数据集描述 description: 数据集描述
maxLength: 500 maxLength: 500
type: type:
type: string type: string
description: 数据集类型 description: 数据集类型
tags: parentDatasetId:
type: array type: string
items: description: 父数据集ID
type: string tags:
description: 标签列表 type: array
items:
type: string
description: 标签列表
dataSource: dataSource:
type: string type: string
description: 数据源 description: 数据源
@@ -620,22 +631,25 @@ components:
type: string type: string
description: 目标位置 description: 目标位置
UpdateDatasetRequest: UpdateDatasetRequest:
type: object type: object
properties: properties:
name: name:
type: string type: string
description: 数据集名称 description: 数据集名称
maxLength: 100 maxLength: 100
description: description:
type: string type: string
description: 数据集描述 description: 数据集描述
maxLength: 500 maxLength: 500
tags: parentDatasetId:
type: array type: string
items: description: 父数据集ID
type: string tags:
description: 标签列表 type: array
items:
type: string
description: 标签列表
status: status:
type: string type: string
enum: [DRAFT, ACTIVE, PROCESSING, ARCHIVED, PUBLISHED, DEPRECATED] enum: [DRAFT, ACTIVE, PROCESSING, ARCHIVED, PUBLISHED, DEPRECATED]

View File

@@ -6,6 +6,9 @@ import com.datamate.common.domain.utils.ChunksSaver;
import com.datamate.common.setting.application.SysParamApplicationService; import com.datamate.common.setting.application.SysParamApplicationService;
import com.datamate.datamanagement.interfaces.dto.*; import com.datamate.datamanagement.interfaces.dto.*;
import com.datamate.common.infrastructure.exception.BusinessAssert; import com.datamate.common.infrastructure.exception.BusinessAssert;
import com.datamate.common.infrastructure.exception.BusinessException;
import com.datamate.common.infrastructure.exception.CommonErrorCode;
import com.datamate.common.infrastructure.exception.SystemErrorCode;
import com.datamate.common.interfaces.PagedResponse; import com.datamate.common.interfaces.PagedResponse;
import com.datamate.datamanagement.domain.model.dataset.Dataset; import com.datamate.datamanagement.domain.model.dataset.Dataset;
import com.datamate.datamanagement.domain.model.dataset.DatasetFile; import com.datamate.datamanagement.domain.model.dataset.DatasetFile;
@@ -27,6 +30,7 @@ import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.StringUtils; import org.springframework.util.StringUtils;
import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
@@ -62,7 +66,9 @@ public class DatasetApplicationService {
BusinessAssert.isTrue(datasetRepository.findByName(createDatasetRequest.getName()) == null, DataManagementErrorCode.DATASET_ALREADY_EXISTS); BusinessAssert.isTrue(datasetRepository.findByName(createDatasetRequest.getName()) == null, DataManagementErrorCode.DATASET_ALREADY_EXISTS);
// 创建数据集对象 // 创建数据集对象
Dataset dataset = DatasetConverter.INSTANCE.convertToDataset(createDatasetRequest); Dataset dataset = DatasetConverter.INSTANCE.convertToDataset(createDatasetRequest);
dataset.initCreateParam(datasetBasePath); Dataset parentDataset = resolveParentDataset(createDatasetRequest.getParentDatasetId(), dataset.getId());
dataset.setParentDatasetId(parentDataset == null ? null : parentDataset.getId());
dataset.initCreateParam(datasetBasePath, parentDataset == null ? null : parentDataset.getPath());
// 处理标签 // 处理标签
Set<Tag> processedTags = Optional.ofNullable(createDatasetRequest.getTags()) Set<Tag> processedTags = Optional.ofNullable(createDatasetRequest.getTags())
.filter(CollectionUtils::isNotEmpty) .filter(CollectionUtils::isNotEmpty)
@@ -98,6 +104,9 @@ public class DatasetApplicationService {
if (Objects.nonNull(updateDatasetRequest.getStatus())) { if (Objects.nonNull(updateDatasetRequest.getStatus())) {
dataset.setStatus(updateDatasetRequest.getStatus()); dataset.setStatus(updateDatasetRequest.getStatus());
} }
if (updateDatasetRequest.getParentDatasetId() != null) {
handleParentChange(dataset, updateDatasetRequest.getParentDatasetId());
}
if (StringUtils.hasText(updateDatasetRequest.getDataSource())) { if (StringUtils.hasText(updateDatasetRequest.getDataSource())) {
// 数据源id不为空,使用异步线程进行文件扫盘落库 // 数据源id不为空,使用异步线程进行文件扫盘落库
processDataSourceAsync(dataset.getId(), updateDatasetRequest.getDataSource()); processDataSourceAsync(dataset.getId(), updateDatasetRequest.getDataSource());
@@ -112,10 +121,11 @@ public class DatasetApplicationService {
@Transactional @Transactional
public void deleteDataset(String datasetId) { public void deleteDataset(String datasetId) {
Dataset dataset = datasetRepository.getById(datasetId); Dataset dataset = datasetRepository.getById(datasetId);
BusinessAssert.notNull(dataset, DataManagementErrorCode.DATASET_NOT_FOUND);
long childCount = datasetRepository.countByParentId(datasetId);
BusinessAssert.isTrue(childCount == 0, DataManagementErrorCode.DATASET_HAS_CHILDREN);
datasetRepository.removeById(datasetId); datasetRepository.removeById(datasetId);
if (dataset != null) { ChunksSaver.deleteFolder(dataset.getPath());
ChunksSaver.deleteFolder(dataset.getPath());
}
} }
/** /**
@@ -164,6 +174,73 @@ public class DatasetApplicationService {
return tags; return tags;
} }
private Dataset resolveParentDataset(String parentDatasetId, String currentDatasetId) {
String normalized = normalizeParentId(parentDatasetId);
if (normalized == null) {
return null;
}
BusinessAssert.isTrue(!normalized.equals(currentDatasetId), CommonErrorCode.PARAM_ERROR);
Dataset parent = datasetRepository.getById(normalized);
BusinessAssert.notNull(parent, DataManagementErrorCode.DATASET_NOT_FOUND);
BusinessAssert.isTrue(parent.getParentDatasetId() == null, CommonErrorCode.PARAM_ERROR);
return parent;
}
private void handleParentChange(Dataset dataset, String parentDatasetId) {
String normalized = normalizeParentId(parentDatasetId);
if (Objects.equals(dataset.getParentDatasetId(), normalized)) {
return;
}
long childCount = datasetRepository.countByParentId(dataset.getId());
if (childCount > 0 && normalized != null) {
throw BusinessException.of(DataManagementErrorCode.DATASET_HAS_CHILDREN);
}
Dataset parent = normalized == null ? null : resolveParentDataset(normalized, dataset.getId());
String newPath = buildDatasetPath(parent == null ? datasetBasePath : parent.getPath(), dataset.getId());
moveDatasetPath(dataset, newPath);
dataset.setParentDatasetId(parent == null ? null : parent.getId());
}
private String normalizeParentId(String parentDatasetId) {
if (!StringUtils.hasText(parentDatasetId)) {
return null;
}
return parentDatasetId.trim();
}
private String buildDatasetPath(String basePath, String datasetId) {
String normalized = basePath;
while (normalized.endsWith(File.separator)) {
normalized = normalized.substring(0, normalized.length() - 1);
}
return normalized + File.separator + datasetId;
}
private void moveDatasetPath(Dataset dataset, String newPath) {
String oldPath = dataset.getPath();
if (Objects.equals(oldPath, newPath)) {
return;
}
Path sourcePath = Paths.get(oldPath);
Path targetPath = Paths.get(newPath);
try {
if (Files.exists(sourcePath)) {
if (Files.exists(targetPath)) {
throw BusinessException.of(CommonErrorCode.PARAM_ERROR);
}
Files.createDirectories(targetPath.getParent());
Files.move(sourcePath, targetPath);
} else {
Files.createDirectories(targetPath);
}
} catch (IOException e) {
log.error("move dataset path error, from {} to {}", oldPath, newPath, e);
throw BusinessException.of(SystemErrorCode.FILE_SYSTEM_ERROR);
}
datasetFileRepository.updateFilePathPrefix(dataset.getId(), oldPath, newPath);
dataset.setPath(newPath);
}
/** /**
* 获取数据集统计信息 * 获取数据集统计信息
*/ */

View File

@@ -323,10 +323,11 @@ public class DatasetFileApplicationService {
* @return 请求id * @return 请求id
*/ */
@Transactional @Transactional
public String preUpload(UploadFilesPreRequest chunkUploadRequest, String datasetId) { public String preUpload(UploadFilesPreRequest chunkUploadRequest, String datasetId) {
if (Objects.isNull(datasetRepository.getById(datasetId))) { Dataset dataset = datasetRepository.getById(datasetId);
throw BusinessException.of(DataManagementErrorCode.DATASET_NOT_FOUND); if (Objects.isNull(dataset)) {
} throw BusinessException.of(DataManagementErrorCode.DATASET_NOT_FOUND);
}
// 构建上传路径,如果有 prefix 则追加到路径中 // 构建上传路径,如果有 prefix 则追加到路径中
String prefix = Optional.ofNullable(chunkUploadRequest.getPrefix()).orElse("").trim(); String prefix = Optional.ofNullable(chunkUploadRequest.getPrefix()).orElse("").trim();
@@ -335,10 +336,13 @@ public class DatasetFileApplicationService {
prefix = prefix.substring(1); prefix = prefix.substring(1);
} }
String uploadPath = datasetBasePath + File.separator + datasetId; String uploadPath = dataset.getPath();
if (!prefix.isEmpty()) { if (uploadPath == null || uploadPath.isBlank()) {
uploadPath = uploadPath + File.separator + prefix.replace("/", File.separator); uploadPath = datasetBasePath + File.separator + datasetId;
} }
if (!prefix.isEmpty()) {
uploadPath = uploadPath + File.separator + prefix.replace("/", File.separator);
}
ChunkUploadPreRequest request = ChunkUploadPreRequest.builder().build(); ChunkUploadPreRequest request = ChunkUploadPreRequest.builder().build();
request.setUploadPath(uploadPath); request.setUploadPath(uploadPath);

View File

@@ -28,6 +28,10 @@ public class Dataset extends BaseEntity<String> {
* 数据集描述 * 数据集描述
*/ */
private String description; private String description;
/**
* 父数据集ID
*/
private String parentDatasetId;
/** /**
* 数据集类型 * 数据集类型
*/ */
@@ -110,14 +114,23 @@ public class Dataset extends BaseEntity<String> {
this.updatedAt = LocalDateTime.now(); this.updatedAt = LocalDateTime.now();
} }
public void initCreateParam(String datasetBasePath) { public void initCreateParam(String datasetBasePath, String parentPath) {
this.id = UUID.randomUUID().toString(); this.id = UUID.randomUUID().toString();
this.path = datasetBasePath + File.separator + this.id; String basePath = normalizeBasePath(parentPath != null && !parentPath.isBlank() ? parentPath : datasetBasePath);
this.path = basePath + File.separator + this.id;
if (this.status == null) { if (this.status == null) {
this.status = DatasetStatusType.DRAFT; this.status = DatasetStatusType.DRAFT;
} }
} }
private String normalizeBasePath(String basePath) {
String normalized = basePath;
while (normalized.endsWith(File.separator)) {
normalized = normalized.substring(0, normalized.length() - 1);
}
return normalized;
}
public void updateBasicInfo(String name, String description, String category) { public void updateBasicInfo(String name, String description, String category) {
if (name != null && !name.isEmpty()) this.name = name; if (name != null && !name.isEmpty()) this.name = name;
if (description != null) this.description = description; if (description != null) this.description = description;

View File

@@ -37,10 +37,14 @@ public enum DataManagementErrorCode implements ErrorCode {
* 数据集文件已存在 * 数据集文件已存在
*/ */
DATASET_FILE_ALREADY_EXISTS("data_management.0006", "数据集文件已存在"), DATASET_FILE_ALREADY_EXISTS("data_management.0006", "数据集文件已存在"),
/** /**
* 目录不存在 * 目录不存在
*/ */
DIRECTORY_NOT_FOUND("data_management.0007", "目录不存在"); DIRECTORY_NOT_FOUND("data_management.0007", "目录不存在"),
/**
* 存在子数据集
*/
DATASET_HAS_CHILDREN("data_management.0008", "存在子数据集,禁止删除或移动");
private final String code; private final String code;
private final String message; private final String message;

View File

@@ -26,4 +26,7 @@ public interface DatasetFileMapper extends BaseMapper<DatasetFile> {
int update(DatasetFile file); int update(DatasetFile file);
int deleteById(@Param("id") String id); int deleteById(@Param("id") String id);
int updateFilePathPrefix(@Param("datasetId") String datasetId,
@Param("oldPrefix") String oldPrefix,
@Param("newPrefix") String newPrefix);
} }

View File

@@ -25,4 +25,6 @@ public interface DatasetFileRepository extends IRepository<DatasetFile> {
IPage<DatasetFile> findByCriteria(String datasetId, String fileType, String status, String name, IPage<DatasetFile> findByCriteria(String datasetId, String fileType, String status, String name,
IPage<DatasetFile> page); IPage<DatasetFile> page);
int updateFilePathPrefix(String datasetId, String oldPrefix, String newPrefix);
} }

View File

@@ -26,4 +26,6 @@ public interface DatasetRepository extends IRepository<Dataset> {
AllDatasetStatisticsResponse getAllDatasetStatistics(); AllDatasetStatisticsResponse getAllDatasetStatistics();
IPage<Dataset> findByCriteria(IPage<Dataset> page, DatasetPagingQuery query); IPage<Dataset> findByCriteria(IPage<Dataset> page, DatasetPagingQuery query);
long countByParentId(String parentDatasetId);
} }

View File

@@ -56,4 +56,9 @@ public class DatasetFileRepositoryImpl extends CrudRepository<DatasetFileMapper,
.eq(StringUtils.hasText(status), DatasetFile::getStatus, status) .eq(StringUtils.hasText(status), DatasetFile::getStatus, status)
.like(StringUtils.hasText(name), DatasetFile::getFileName, name)); .like(StringUtils.hasText(name), DatasetFile::getFileName, name));
} }
@Override
public int updateFilePathPrefix(String datasetId, String oldPrefix, String newPrefix) {
return datasetFileMapper.updateFilePathPrefix(datasetId, oldPrefix, newPrefix);
}
} }

View File

@@ -54,6 +54,14 @@ public class DatasetRepositoryImpl extends CrudRepository<DatasetMapper, Dataset
.eq(query.getType() != null, Dataset::getDatasetType, query.getType()) .eq(query.getType() != null, Dataset::getDatasetType, query.getType())
.eq(query.getStatus() != null, Dataset::getStatus, query.getStatus()); .eq(query.getStatus() != null, Dataset::getStatus, query.getStatus());
if (query.getParentDatasetId() != null) {
if (StringUtils.isBlank(query.getParentDatasetId())) {
wrapper.isNull(Dataset::getParentDatasetId);
} else {
wrapper.eq(Dataset::getParentDatasetId, query.getParentDatasetId());
}
}
if (StringUtils.isNotBlank(query.getKeyword())) { if (StringUtils.isNotBlank(query.getKeyword())) {
wrapper.and(w -> wrapper.and(w ->
w.like(Dataset::getName, query.getKeyword()).or() w.like(Dataset::getName, query.getKeyword()).or()
@@ -74,4 +82,10 @@ public class DatasetRepositoryImpl extends CrudRepository<DatasetMapper, Dataset
wrapper.orderByDesc(Dataset::getCreatedAt); wrapper.orderByDesc(Dataset::getCreatedAt);
return datasetMapper.selectPage(page, wrapper); return datasetMapper.selectPage(page, wrapper);
} }
@Override
public long countByParentId(String parentDatasetId) {
return datasetMapper.selectCount(new LambdaQueryWrapper<Dataset>()
.eq(Dataset::getParentDatasetId, parentDatasetId));
}
} }

View File

@@ -29,6 +29,8 @@ public class CreateDatasetRequest {
/** 数据集类型 */ /** 数据集类型 */
@NotNull(message = "数据集类型不能为空") @NotNull(message = "数据集类型不能为空")
private DatasetType datasetType; private DatasetType datasetType;
/** 父数据集ID */
private String parentDatasetId;
/** 标签列表 */ /** 标签列表 */
private List<String> tags; private List<String> tags;
/** 数据源 */ /** 数据源 */

View File

@@ -39,4 +39,9 @@ public class DatasetPagingQuery extends PagingQuery {
* 状态过滤 * 状态过滤
*/ */
private DatasetStatusType status; private DatasetStatusType status;
/**
* 父数据集ID过滤
*/
private String parentDatasetId;
} }

View File

@@ -15,6 +15,8 @@ import java.util.Map;
public class DatasetResponse { public class DatasetResponse {
/** 数据集ID */ /** 数据集ID */
private String id; private String id;
/** 父数据集ID */
private String parentDatasetId;
/** 数据集名称 */ /** 数据集名称 */
private String name; private String name;
/** 数据集描述 */ /** 数据集描述 */

View File

@@ -23,6 +23,8 @@ public class UpdateDatasetRequest {
private String description; private String description;
/** 归集任务id */ /** 归集任务id */
private String dataSource; private String dataSource;
/** 父数据集ID */
private String parentDatasetId;
/** 标签列表 */ /** 标签列表 */
private List<String> tags; private List<String> tags;
/** 数据集状态 */ /** 数据集状态 */

View File

@@ -95,4 +95,11 @@
<delete id="deleteById" parameterType="string"> <delete id="deleteById" parameterType="string">
DELETE FROM t_dm_dataset_files WHERE id = #{id} DELETE FROM t_dm_dataset_files WHERE id = #{id}
</delete> </delete>
<update id="updateFilePathPrefix">
UPDATE t_dm_dataset_files
SET file_path = CONCAT(#{newPrefix}, SUBSTRING(file_path, LENGTH(#{oldPrefix}) + 1))
WHERE dataset_id = #{datasetId}
AND file_path LIKE CONCAT(#{oldPrefix}, '%')
</update>
</mapper> </mapper>

View File

@@ -4,12 +4,13 @@
<mapper namespace="com.datamate.datamanagement.infrastructure.persistence.mapper.DatasetMapper"> <mapper namespace="com.datamate.datamanagement.infrastructure.persistence.mapper.DatasetMapper">
<sql id="Base_Column_List"> <sql id="Base_Column_List">
id, name, description, dataset_type, category, path, format, schema_info, size_bytes, file_count, record_count, id, parent_dataset_id, name, description, dataset_type, category, path, format, schema_info, size_bytes, file_count, record_count,
retention_days, tags, metadata, status, is_public, is_featured, version, created_at, updated_at, created_by, updated_by retention_days, tags, metadata, status, is_public, is_featured, version, created_at, updated_at, created_by, updated_by
</sql> </sql>
<sql id="Alias_D_Column_List"> <sql id="Alias_D_Column_List">
d.id AS id, d.id AS id,
d.parent_dataset_id AS parent_dataset_id,
d.name AS name, d.name AS name,
d.description AS description, d.description AS description,
d.dataset_type AS dataset_type, d.dataset_type AS dataset_type,

View File

@@ -17,6 +17,7 @@ export default function DatasetCreate() {
description: "", description: "",
datasetType: DatasetType.TEXT, datasetType: DatasetType.TEXT,
tags: [], tags: [],
parentDatasetId: "",
}); });
const handleSubmit = async () => { const handleSubmit = async () => {

View File

@@ -26,6 +26,7 @@ export default function EditDataset({
description: "", description: "",
datasetType: DatasetType.TEXT, datasetType: DatasetType.TEXT,
tags: [], tags: [],
parentDatasetId: "",
}); });
const fetchDataset = async () => { const fetchDataset = async () => {
if (!open) return; if (!open) return;
@@ -36,6 +37,7 @@ export default function EditDataset({
...newData, ...newData,
type: newData.type, type: newData.type,
tags: newData.tags.map((tag) => tag.name) || [], tags: newData.tags.map((tag) => tag.name) || [],
parentDatasetId: newData.parentDatasetId || "",
}; };
setNewDataset(updatedDataset); setNewDataset(updatedDataset);
form.setFieldsValue(updatedDataset); form.setFieldsValue(updatedDataset);

View File

@@ -2,7 +2,7 @@ import RadioCard from "@/components/RadioCard";
import { Input, Select, Form } from "antd"; import { Input, Select, Form } from "antd";
import { datasetTypes } from "../../dataset.const"; import { datasetTypes } from "../../dataset.const";
import { useEffect, useState } from "react"; import { useEffect, useState } from "react";
import { queryDatasetTagsUsingGet } from "../../dataset.api"; import { queryDatasetTagsUsingGet, queryDatasetsUsingGet } from "../../dataset.api";
import {queryTasksUsingGet} from "@/pages/DataCollection/collection.apis.ts"; import {queryTasksUsingGet} from "@/pages/DataCollection/collection.apis.ts";
export default function BasicInformation({ export default function BasicInformation({
@@ -22,6 +22,9 @@ export default function BasicInformation({
}[] }[]
>([]); >([]);
const [collectionOptions, setCollectionOptions] = useState([]); const [collectionOptions, setCollectionOptions] = useState([]);
const [parentDatasetOptions, setParentDatasetOptions] = useState<
{ label: string; value: string }[]
>([]);
// 获取标签 // 获取标签
const fetchTags = async () => { const fetchTags = async () => {
@@ -52,10 +55,36 @@ export default function BasicInformation({
} }
}; };
const fetchParentDatasets = async () => {
if (hidden.includes("parentDatasetId")) return;
try {
const { data: resData } = await queryDatasetsUsingGet({
parentDatasetId: "",
page: 1,
size: 1000,
});
const currentDatasetId = data?.id;
const rootDatasets = resData?.content || [];
const options = rootDatasets
.filter((dataset) => dataset.id !== currentDatasetId)
.map((dataset) => ({
label: dataset.name,
value: dataset.id,
}));
setParentDatasetOptions([
{ label: "根数据集", value: "" },
...options,
]);
} catch (error) {
console.error("Error fetching parent datasets:", error);
}
};
useEffect(() => { useEffect(() => {
fetchTags(); fetchTags();
fetchCollectionTasks(); fetchCollectionTasks();
}, []); fetchParentDatasets();
}, [data?.id, hidden.join(",")]);
return ( return (
<> <>
<Form.Item <Form.Item
@@ -70,6 +99,15 @@ export default function BasicInformation({
<Input.TextArea placeholder="描述数据集的用途和内容" rows={3} /> <Input.TextArea placeholder="描述数据集的用途和内容" rows={3} />
</Form.Item> </Form.Item>
)} )}
{!hidden.includes("parentDatasetId") && (
<Form.Item name="parentDatasetId" label="父数据集">
<Select
className="w-full"
options={parentDatasetOptions}
placeholder="选择父数据集(仅支持一层)"
/>
</Form.Item>
)}
{/* 数据集类型选择 - 使用卡片形式 */} {/* 数据集类型选择 - 使用卡片形式 */}
{!hidden.includes("datasetType") && ( {!hidden.includes("datasetType") && (

View File

@@ -37,29 +37,51 @@ const tabList = [
export default function DatasetDetail() { export default function DatasetDetail() {
const { id } = useParams(); // 获取动态路由参数 const { id } = useParams(); // 获取动态路由参数
const navigate = useNavigate(); const navigate = useNavigate();
const [activeTab, setActiveTab] = useState("overview"); const [activeTab, setActiveTab] = useState("overview");
const { message } = App.useApp(); const { message } = App.useApp();
const [showEditDialog, setShowEditDialog] = useState(false); const [showEditDialog, setShowEditDialog] = useState(false);
const [dataset, setDataset] = useState<Dataset>({} as Dataset); const [dataset, setDataset] = useState<Dataset>({} as Dataset);
const filesOperation = useFilesOperation(dataset); const [parentDataset, setParentDataset] = useState<Dataset | null>(null);
const filesOperation = useFilesOperation(dataset);
const [showUploadDialog, setShowUploadDialog] = useState(false); const [showUploadDialog, setShowUploadDialog] = useState(false);
const navigateItems = useMemo( const navigateItems = useMemo(() => {
() => [ const items = [
{ {
title: <Link to="/data/management"></Link>, title: <Link to="/data/management"></Link>,
}, },
{ ];
title: dataset.name || "数据集详情", if (parentDataset) {
}, items.push({
], title: (
[dataset] <Link to={`/data/management/detail/${parentDataset.id}`}>
); {parentDataset.name}
const fetchDataset = async () => { </Link>
const { data } = await queryDatasetByIdUsingGet(id as unknown as number); ),
setDataset(mapDataset(data)); });
}; }
items.push({
title: dataset.name || "数据集详情",
});
return items;
}, [dataset, parentDataset]);
const fetchDataset = async () => {
if (!id) {
return;
}
const { data } = await queryDatasetByIdUsingGet(id);
const mapped = mapDataset(data);
setDataset(mapped);
if (data?.parentDatasetId) {
const { data: parentData } = await queryDatasetByIdUsingGet(
data.parentDatasetId
);
setParentDataset(mapDataset(parentData));
} else {
setParentDataset(null);
}
};
useEffect(() => { useEffect(() => {
fetchDataset(); fetchDataset();

View File

@@ -21,7 +21,7 @@ export function useFilesOperation(dataset: Dataset) {
// 文件相关状态 // 文件相关状态
const [fileList, setFileList] = useState<DatasetFile[]>([]); const [fileList, setFileList] = useState<DatasetFile[]>([]);
const [selectedFiles, setSelectedFiles] = useState<number[]>([]); const [selectedFiles, setSelectedFiles] = useState<string[]>([]);
const [pagination, setPagination] = useState<{ const [pagination, setPagination] = useState<{
current: number; current: number;
pageSize: number; pageSize: number;

View File

@@ -135,9 +135,9 @@ export default function DatasetManagementPage() {
message.success("数据集下载成功"); message.success("数据集下载成功");
}; };
const handleDeleteDataset = async (id: number) => { const handleDeleteDataset = async (id: string) => {
if (!id) return; if (!id) return;
await deleteDatasetByIdUsingDelete(id); await deleteDatasetByIdUsingDelete(id);
fetchData({ pageOffset: 0 }); fetchData({ pageOffset: 0 });
message.success("数据删除成功"); message.success("数据删除成功");
}; };

View File

@@ -33,7 +33,7 @@ export enum DataSource {
} }
export interface DatasetFile { export interface DatasetFile {
id: number; id: string;
fileName: string; fileName: string;
size: string; size: string;
uploadDate: string; uploadDate: string;
@@ -41,10 +41,10 @@ export interface DatasetFile {
} }
export interface Dataset { export interface Dataset {
id: number; id: string;
name: string; name: string;
description: string; description: string;
parentId?: number; parentDatasetId?: string;
datasetType: DatasetType; datasetType: DatasetType;
status: DatasetStatus; status: DatasetStatus;
size?: string; size?: string;
@@ -77,7 +77,7 @@ export interface ScheduleConfig {
} }
export interface DatasetTask { export interface DatasetTask {
id: number; id: string;
name: string; name: string;
description: string; description: string;
type: string; type: string;

View File

@@ -14,6 +14,7 @@ class Dataset(Base):
__tablename__ = "t_dm_datasets" __tablename__ = "t_dm_datasets"
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()), comment="UUID") id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()), comment="UUID")
parent_dataset_id = Column(String(36), nullable=True, comment="父数据集ID(UUID)")
name = Column(String(255), nullable=False, comment="数据集名称") name = Column(String(255), nullable=False, comment="数据集名称")
description = Column(Text, nullable=True, comment="数据集描述") description = Column(Text, nullable=True, comment="数据集描述")
dataset_type = Column(String(50), nullable=False, comment="数据集类型:IMAGE/TEXT/QA/MULTIMODAL/OTHER") dataset_type = Column(String(50), nullable=False, comment="数据集类型:IMAGE/TEXT/QA/MULTIMODAL/OTHER")

View File

@@ -195,8 +195,8 @@ def _update_task_status(
conn.execute(sql, params) conn.execute(sql, params)
def _load_dataset_files(dataset_id: str) -> List[Tuple[str, str, str]]: def _load_dataset_files(dataset_id: str) -> List[Tuple[str, str, str]]:
"""加载指定数据集下的所有已完成文件。""" """加载指定数据集下的所有已完成文件。"""
sql = text( sql = text(
""" """
@@ -208,9 +208,45 @@ def _load_dataset_files(dataset_id: str) -> List[Tuple[str, str, str]]:
""" """
) )
with SQLManager.create_connect() as conn: with SQLManager.create_connect() as conn:
rows = conn.execute(sql, {"dataset_id": dataset_id}).fetchall() rows = conn.execute(sql, {"dataset_id": dataset_id}).fetchall()
return [(str(r[0]), str(r[1]), str(r[2])) for r in rows] return [(str(r[0]), str(r[1]), str(r[2])) for r in rows]
def _load_dataset_meta(dataset_id: str) -> Optional[Dict[str, Any]]:
"""加载数据集基础信息(含父ID与路径)。"""
sql = text(
"""
SELECT id, name, parent_dataset_id, path
FROM t_dm_datasets
WHERE id = :dataset_id
"""
)
with SQLManager.create_connect() as conn:
row = conn.execute(sql, {"dataset_id": dataset_id}).fetchone()
if not row:
return None
return dict(row._mapping) # type: ignore[attr-defined]
def _resolve_output_parent(source_dataset_id: str) -> Tuple[Optional[str], str]:
"""根据源数据集确定产出数据集的父级与基路径(产出挂在父级下)。"""
base_path = DEFAULT_OUTPUT_ROOT.rstrip("/") or "/dataset"
source_meta = _load_dataset_meta(source_dataset_id)
if not source_meta:
return None, base_path
parent_dataset_id = source_meta.get("parent_dataset_id")
if not parent_dataset_id:
return None, base_path
parent_meta = _load_dataset_meta(str(parent_dataset_id))
parent_path = parent_meta.get("path") if parent_meta else None
if not parent_path:
return None, base_path
return str(parent_dataset_id), str(parent_path)
def _load_files_by_ids(file_ids: List[str]) -> List[Tuple[str, str, str]]: def _load_files_by_ids(file_ids: List[str]) -> List[Tuple[str, str, str]]:
@@ -245,34 +281,35 @@ def _ensure_output_dir(output_dir: str) -> str:
return output_dir return output_dir
def _create_output_dataset( def _create_output_dataset(
source_dataset_id: str, source_dataset_id: str,
source_dataset_name: str, source_dataset_name: str,
output_dataset_name: str, output_dataset_name: str,
) -> Tuple[str, str]: ) -> Tuple[str, str]:
"""为自动标注结果创建一个新的数据集并返回 (dataset_id, path)。""" """为自动标注结果创建一个新的数据集并返回 (dataset_id, path)。"""
new_dataset_id = str(uuid.uuid4()) new_dataset_id = str(uuid.uuid4())
dataset_base_path = DEFAULT_OUTPUT_ROOT.rstrip("/") or "/dataset" parent_dataset_id, dataset_base_path = _resolve_output_parent(source_dataset_id)
output_dir = os.path.join(dataset_base_path, new_dataset_id) output_dir = os.path.join(dataset_base_path, new_dataset_id)
description = ( description = (
f"Auto annotations for dataset {source_dataset_name or source_dataset_id}"[:255] f"Auto annotations for dataset {source_dataset_name or source_dataset_id}"[:255]
) )
sql = text( sql = text(
""" """
INSERT INTO t_dm_datasets (id, name, description, dataset_type, path, status) INSERT INTO t_dm_datasets (id, parent_dataset_id, name, description, dataset_type, path, status)
VALUES (:id, :name, :description, :dataset_type, :path, :status) VALUES (:id, :parent_dataset_id, :name, :description, :dataset_type, :path, :status)
""" """
) )
params = { params = {
"id": new_dataset_id, "id": new_dataset_id,
"name": output_dataset_name, "parent_dataset_id": parent_dataset_id,
"description": description, "name": output_dataset_name,
"dataset_type": "IMAGE", "description": description,
"path": output_dir, "dataset_type": "IMAGE",
"status": "ACTIVE", "path": output_dir,
"status": "ACTIVE",
} }
with SQLManager.create_connect() as conn: with SQLManager.create_connect() as conn:

View File

@@ -15,6 +15,7 @@ USE datamate;
-- 数据集表(支持医学影像、文本、问答等多种类型) -- 数据集表(支持医学影像、文本、问答等多种类型)
CREATE TABLE IF NOT EXISTS t_dm_datasets ( CREATE TABLE IF NOT EXISTS t_dm_datasets (
id VARCHAR(36) PRIMARY KEY COMMENT 'UUID', id VARCHAR(36) PRIMARY KEY COMMENT 'UUID',
parent_dataset_id VARCHAR(36) NULL COMMENT '父数据集ID(UUID)',
name VARCHAR(255) NOT NULL COMMENT '数据集名称', name VARCHAR(255) NOT NULL COMMENT '数据集名称',
description TEXT COMMENT '数据集描述', description TEXT COMMENT '数据集描述',
dataset_type VARCHAR(50) NOT NULL COMMENT '数据集类型:IMAGE/TEXT/QA/MULTIMODAL/OTHER', dataset_type VARCHAR(50) NOT NULL COMMENT '数据集类型:IMAGE/TEXT/QA/MULTIMODAL/OTHER',
@@ -36,13 +37,15 @@ CREATE TABLE IF NOT EXISTS t_dm_datasets (
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间', updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间',
created_by VARCHAR(255) COMMENT '创建者', created_by VARCHAR(255) COMMENT '创建者',
updated_by VARCHAR(255) COMMENT '更新者', updated_by VARCHAR(255) COMMENT '更新者',
INDEX idx_dm_parent_dataset_id (parent_dataset_id),
INDEX idx_dm_dataset_type (dataset_type), INDEX idx_dm_dataset_type (dataset_type),
INDEX idx_dm_category (category), INDEX idx_dm_category (category),
INDEX idx_dm_format (format), INDEX idx_dm_format (format),
INDEX idx_dm_status (status), INDEX idx_dm_status (status),
INDEX idx_dm_public (is_public), INDEX idx_dm_public (is_public),
INDEX idx_dm_featured (is_featured), INDEX idx_dm_featured (is_featured),
INDEX idx_dm_created_at (created_at) INDEX idx_dm_created_at (created_at),
FOREIGN KEY (parent_dataset_id) REFERENCES t_dm_datasets(id) ON DELETE RESTRICT
) COMMENT='数据集表(UUID 主键)'; ) COMMENT='数据集表(UUID 主键)';
-- 数据集文件表 -- 数据集文件表