Merge branch 'dataset_parent' into lsf

This commit is contained in:
2026-01-21 00:17:20 +08:00
26 changed files with 574 additions and 249 deletions

View File

@@ -61,12 +61,17 @@ paths:
schema:
type: string
description: 关键词搜索(名称、描述)
- name: status
in: query
schema:
type: string
enum: [DRAFT, ACTIVE, PROCESSING, ARCHIVED, PUBLISHED, DEPRECATED]
description: 数据集状态过滤
- name: status
in: query
schema:
type: string
enum: [DRAFT, ACTIVE, PROCESSING, ARCHIVED, PUBLISHED, DEPRECATED]
description: 数据集状态过滤
- name: parentDatasetId
in: query
schema:
type: string
description: 父数据集ID过滤(传空字符串表示根数据集)
responses:
'200':
description: 成功
@@ -538,15 +543,18 @@ components:
type: boolean
description: 是否为最后一页
DatasetResponse:
type: object
properties:
id:
type: string
description: 数据集ID
name:
type: string
description: 数据集名称
DatasetResponse:
type: object
properties:
id:
type: string
description: 数据集ID
parentDatasetId:
type: string
description: 数据集ID
name:
type: string
description: 数据集名称
description:
type: string
description: 数据集描述
@@ -590,29 +598,32 @@ components:
type: string
description: 创建者
CreateDatasetRequest:
type: object
required:
- name
- type
properties:
name:
type: string
description: 数据集名称
minLength: 1
maxLength: 100
description:
type: string
description: 数据集描述
maxLength: 500
type:
type: string
description: 数据集类型
tags:
type: array
items:
type: string
description: 标签列表
CreateDatasetRequest:
type: object
required:
- name
- type
properties:
name:
type: string
description: 数据集名称
minLength: 1
maxLength: 100
description:
type: string
description: 数据集描述
maxLength: 500
type:
type: string
description: 数据集类型
parentDatasetId:
type: string
description: 父数据集ID
tags:
type: array
items:
type: string
description: 标签列表
dataSource:
type: string
description: 数据源
@@ -620,22 +631,25 @@ components:
type: string
description: 目标位置
UpdateDatasetRequest:
type: object
properties:
name:
type: string
description: 数据集名称
maxLength: 100
description:
type: string
description: 数据集描述
maxLength: 500
tags:
type: array
items:
type: string
description: 标签列表
UpdateDatasetRequest:
type: object
properties:
name:
type: string
description: 数据集名称
maxLength: 100
description:
type: string
description: 数据集描述
maxLength: 500
parentDatasetId:
type: string
description: 父数据集ID
tags:
type: array
items:
type: string
description: 标签列表
status:
type: string
enum: [DRAFT, ACTIVE, PROCESSING, ARCHIVED, PUBLISHED, DEPRECATED]

View File

@@ -6,6 +6,9 @@ import com.datamate.common.domain.utils.ChunksSaver;
import com.datamate.common.setting.application.SysParamApplicationService;
import com.datamate.datamanagement.interfaces.dto.*;
import com.datamate.common.infrastructure.exception.BusinessAssert;
import com.datamate.common.infrastructure.exception.BusinessException;
import com.datamate.common.infrastructure.exception.CommonErrorCode;
import com.datamate.common.infrastructure.exception.SystemErrorCode;
import com.datamate.common.interfaces.PagedResponse;
import com.datamate.datamanagement.domain.model.dataset.Dataset;
import com.datamate.datamanagement.domain.model.dataset.DatasetFile;
@@ -27,6 +30,7 @@ import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.StringUtils;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
@@ -62,7 +66,9 @@ public class DatasetApplicationService {
BusinessAssert.isTrue(datasetRepository.findByName(createDatasetRequest.getName()) == null, DataManagementErrorCode.DATASET_ALREADY_EXISTS);
// 创建数据集对象
Dataset dataset = DatasetConverter.INSTANCE.convertToDataset(createDatasetRequest);
dataset.initCreateParam(datasetBasePath);
Dataset parentDataset = resolveParentDataset(createDatasetRequest.getParentDatasetId(), dataset.getId());
dataset.setParentDatasetId(parentDataset == null ? null : parentDataset.getId());
dataset.initCreateParam(datasetBasePath, parentDataset == null ? null : parentDataset.getPath());
// 处理标签
Set<Tag> processedTags = Optional.ofNullable(createDatasetRequest.getTags())
.filter(CollectionUtils::isNotEmpty)
@@ -98,6 +104,9 @@ public class DatasetApplicationService {
if (Objects.nonNull(updateDatasetRequest.getStatus())) {
dataset.setStatus(updateDatasetRequest.getStatus());
}
if (updateDatasetRequest.getParentDatasetId() != null) {
handleParentChange(dataset, updateDatasetRequest.getParentDatasetId());
}
if (StringUtils.hasText(updateDatasetRequest.getDataSource())) {
// 数据源id不为空,使用异步线程进行文件扫盘落库
processDataSourceAsync(dataset.getId(), updateDatasetRequest.getDataSource());
@@ -112,10 +121,11 @@ public class DatasetApplicationService {
@Transactional
public void deleteDataset(String datasetId) {
Dataset dataset = datasetRepository.getById(datasetId);
BusinessAssert.notNull(dataset, DataManagementErrorCode.DATASET_NOT_FOUND);
long childCount = datasetRepository.countByParentId(datasetId);
BusinessAssert.isTrue(childCount == 0, DataManagementErrorCode.DATASET_HAS_CHILDREN);
datasetRepository.removeById(datasetId);
if (dataset != null) {
ChunksSaver.deleteFolder(dataset.getPath());
}
ChunksSaver.deleteFolder(dataset.getPath());
}
/**
@@ -164,6 +174,73 @@ public class DatasetApplicationService {
return tags;
}
private Dataset resolveParentDataset(String parentDatasetId, String currentDatasetId) {
String normalized = normalizeParentId(parentDatasetId);
if (normalized == null) {
return null;
}
BusinessAssert.isTrue(!normalized.equals(currentDatasetId), CommonErrorCode.PARAM_ERROR);
Dataset parent = datasetRepository.getById(normalized);
BusinessAssert.notNull(parent, DataManagementErrorCode.DATASET_NOT_FOUND);
BusinessAssert.isTrue(parent.getParentDatasetId() == null, CommonErrorCode.PARAM_ERROR);
return parent;
}
private void handleParentChange(Dataset dataset, String parentDatasetId) {
String normalized = normalizeParentId(parentDatasetId);
if (Objects.equals(dataset.getParentDatasetId(), normalized)) {
return;
}
long childCount = datasetRepository.countByParentId(dataset.getId());
if (childCount > 0 && normalized != null) {
throw BusinessException.of(DataManagementErrorCode.DATASET_HAS_CHILDREN);
}
Dataset parent = normalized == null ? null : resolveParentDataset(normalized, dataset.getId());
String newPath = buildDatasetPath(parent == null ? datasetBasePath : parent.getPath(), dataset.getId());
moveDatasetPath(dataset, newPath);
dataset.setParentDatasetId(parent == null ? null : parent.getId());
}
private String normalizeParentId(String parentDatasetId) {
if (!StringUtils.hasText(parentDatasetId)) {
return null;
}
return parentDatasetId.trim();
}
private String buildDatasetPath(String basePath, String datasetId) {
String normalized = basePath;
while (normalized.endsWith(File.separator)) {
normalized = normalized.substring(0, normalized.length() - 1);
}
return normalized + File.separator + datasetId;
}
private void moveDatasetPath(Dataset dataset, String newPath) {
String oldPath = dataset.getPath();
if (Objects.equals(oldPath, newPath)) {
return;
}
Path sourcePath = Paths.get(oldPath);
Path targetPath = Paths.get(newPath);
try {
if (Files.exists(sourcePath)) {
if (Files.exists(targetPath)) {
throw BusinessException.of(CommonErrorCode.PARAM_ERROR);
}
Files.createDirectories(targetPath.getParent());
Files.move(sourcePath, targetPath);
} else {
Files.createDirectories(targetPath);
}
} catch (IOException e) {
log.error("move dataset path error, from {} to {}", oldPath, newPath, e);
throw BusinessException.of(SystemErrorCode.FILE_SYSTEM_ERROR);
}
datasetFileRepository.updateFilePathPrefix(dataset.getId(), oldPath, newPath);
dataset.setPath(newPath);
}
/**
* 获取数据集统计信息
*/

View File

@@ -323,10 +323,11 @@ public class DatasetFileApplicationService {
* @return 请求id
*/
@Transactional
public String preUpload(UploadFilesPreRequest chunkUploadRequest, String datasetId) {
if (Objects.isNull(datasetRepository.getById(datasetId))) {
throw BusinessException.of(DataManagementErrorCode.DATASET_NOT_FOUND);
}
public String preUpload(UploadFilesPreRequest chunkUploadRequest, String datasetId) {
Dataset dataset = datasetRepository.getById(datasetId);
if (Objects.isNull(dataset)) {
throw BusinessException.of(DataManagementErrorCode.DATASET_NOT_FOUND);
}
// 构建上传路径,如果有 prefix 则追加到路径中
String prefix = Optional.ofNullable(chunkUploadRequest.getPrefix()).orElse("").trim();
@@ -335,10 +336,13 @@ public class DatasetFileApplicationService {
prefix = prefix.substring(1);
}
String uploadPath = datasetBasePath + File.separator + datasetId;
if (!prefix.isEmpty()) {
uploadPath = uploadPath + File.separator + prefix.replace("/", File.separator);
}
String uploadPath = dataset.getPath();
if (uploadPath == null || uploadPath.isBlank()) {
uploadPath = datasetBasePath + File.separator + datasetId;
}
if (!prefix.isEmpty()) {
uploadPath = uploadPath + File.separator + prefix.replace("/", File.separator);
}
ChunkUploadPreRequest request = ChunkUploadPreRequest.builder().build();
request.setUploadPath(uploadPath);

View File

@@ -28,6 +28,10 @@ public class Dataset extends BaseEntity<String> {
* 数据集描述
*/
private String description;
/**
* 父数据集ID
*/
private String parentDatasetId;
/**
* 数据集类型
*/
@@ -110,14 +114,23 @@ public class Dataset extends BaseEntity<String> {
this.updatedAt = LocalDateTime.now();
}
public void initCreateParam(String datasetBasePath) {
public void initCreateParam(String datasetBasePath, String parentPath) {
this.id = UUID.randomUUID().toString();
this.path = datasetBasePath + File.separator + this.id;
String basePath = normalizeBasePath(parentPath != null && !parentPath.isBlank() ? parentPath : datasetBasePath);
this.path = basePath + File.separator + this.id;
if (this.status == null) {
this.status = DatasetStatusType.DRAFT;
}
}
private String normalizeBasePath(String basePath) {
String normalized = basePath;
while (normalized.endsWith(File.separator)) {
normalized = normalized.substring(0, normalized.length() - 1);
}
return normalized;
}
public void updateBasicInfo(String name, String description, String category) {
if (name != null && !name.isEmpty()) this.name = name;
if (description != null) this.description = description;

View File

@@ -37,10 +37,14 @@ public enum DataManagementErrorCode implements ErrorCode {
* 数据集文件已存在
*/
DATASET_FILE_ALREADY_EXISTS("data_management.0006", "数据集文件已存在"),
/**
* 目录不存在
*/
DIRECTORY_NOT_FOUND("data_management.0007", "目录不存在");
/**
* 目录不存在
*/
DIRECTORY_NOT_FOUND("data_management.0007", "目录不存在"),
/**
* 存在子数据集
*/
DATASET_HAS_CHILDREN("data_management.0008", "存在子数据集,禁止删除或移动");
private final String code;
private final String message;

View File

@@ -26,4 +26,7 @@ public interface DatasetFileMapper extends BaseMapper<DatasetFile> {
int update(DatasetFile file);
int deleteById(@Param("id") String id);
int updateFilePathPrefix(@Param("datasetId") String datasetId,
@Param("oldPrefix") String oldPrefix,
@Param("newPrefix") String newPrefix);
}

View File

@@ -25,4 +25,6 @@ public interface DatasetFileRepository extends IRepository<DatasetFile> {
IPage<DatasetFile> findByCriteria(String datasetId, String fileType, String status, String name,
IPage<DatasetFile> page);
int updateFilePathPrefix(String datasetId, String oldPrefix, String newPrefix);
}

View File

@@ -26,4 +26,6 @@ public interface DatasetRepository extends IRepository<Dataset> {
AllDatasetStatisticsResponse getAllDatasetStatistics();
IPage<Dataset> findByCriteria(IPage<Dataset> page, DatasetPagingQuery query);
long countByParentId(String parentDatasetId);
}

View File

@@ -56,4 +56,9 @@ public class DatasetFileRepositoryImpl extends CrudRepository<DatasetFileMapper,
.eq(StringUtils.hasText(status), DatasetFile::getStatus, status)
.like(StringUtils.hasText(name), DatasetFile::getFileName, name));
}
@Override
public int updateFilePathPrefix(String datasetId, String oldPrefix, String newPrefix) {
return datasetFileMapper.updateFilePathPrefix(datasetId, oldPrefix, newPrefix);
}
}

View File

@@ -54,6 +54,14 @@ public class DatasetRepositoryImpl extends CrudRepository<DatasetMapper, Dataset
.eq(query.getType() != null, Dataset::getDatasetType, query.getType())
.eq(query.getStatus() != null, Dataset::getStatus, query.getStatus());
if (query.getParentDatasetId() != null) {
if (StringUtils.isBlank(query.getParentDatasetId())) {
wrapper.isNull(Dataset::getParentDatasetId);
} else {
wrapper.eq(Dataset::getParentDatasetId, query.getParentDatasetId());
}
}
if (StringUtils.isNotBlank(query.getKeyword())) {
wrapper.and(w ->
w.like(Dataset::getName, query.getKeyword()).or()
@@ -74,4 +82,10 @@ public class DatasetRepositoryImpl extends CrudRepository<DatasetMapper, Dataset
wrapper.orderByDesc(Dataset::getCreatedAt);
return datasetMapper.selectPage(page, wrapper);
}
@Override
public long countByParentId(String parentDatasetId) {
return datasetMapper.selectCount(new LambdaQueryWrapper<Dataset>()
.eq(Dataset::getParentDatasetId, parentDatasetId));
}
}

View File

@@ -29,6 +29,8 @@ public class CreateDatasetRequest {
/** 数据集类型 */
@NotNull(message = "数据集类型不能为空")
private DatasetType datasetType;
/** 父数据集ID */
private String parentDatasetId;
/** 标签列表 */
private List<String> tags;
/** 数据源 */

View File

@@ -39,4 +39,9 @@ public class DatasetPagingQuery extends PagingQuery {
* 状态过滤
*/
private DatasetStatusType status;
/**
* 父数据集ID过滤
*/
private String parentDatasetId;
}

View File

@@ -15,6 +15,8 @@ import java.util.Map;
public class DatasetResponse {
/** 数据集ID */
private String id;
/** 父数据集ID */
private String parentDatasetId;
/** 数据集名称 */
private String name;
/** 数据集描述 */

View File

@@ -23,6 +23,8 @@ public class UpdateDatasetRequest {
private String description;
/** 归集任务id */
private String dataSource;
/** 父数据集ID */
private String parentDatasetId;
/** 标签列表 */
private List<String> tags;
/** 数据集状态 */

View File

@@ -95,4 +95,11 @@
<delete id="deleteById" parameterType="string">
DELETE FROM t_dm_dataset_files WHERE id = #{id}
</delete>
<update id="updateFilePathPrefix">
UPDATE t_dm_dataset_files
SET file_path = CONCAT(#{newPrefix}, SUBSTRING(file_path, LENGTH(#{oldPrefix}) + 1))
WHERE dataset_id = #{datasetId}
AND file_path LIKE CONCAT(#{oldPrefix}, '%')
</update>
</mapper>

View File

@@ -4,12 +4,13 @@
<mapper namespace="com.datamate.datamanagement.infrastructure.persistence.mapper.DatasetMapper">
<sql id="Base_Column_List">
id, name, description, dataset_type, category, path, format, schema_info, size_bytes, file_count, record_count,
id, parent_dataset_id, name, description, dataset_type, category, path, format, schema_info, size_bytes, file_count, record_count,
retention_days, tags, metadata, status, is_public, is_featured, version, created_at, updated_at, created_by, updated_by
</sql>
<sql id="Alias_D_Column_List">
d.id AS id,
d.parent_dataset_id AS parent_dataset_id,
d.name AS name,
d.description AS description,
d.dataset_type AS dataset_type,

View File

@@ -17,6 +17,7 @@ export default function DatasetCreate() {
description: "",
datasetType: DatasetType.TEXT,
tags: [],
parentDatasetId: "",
});
const handleSubmit = async () => {

View File

@@ -3,7 +3,7 @@ import {
queryDatasetByIdUsingGet,
updateDatasetByIdUsingPut,
} from "../dataset.api";
import { useEffect, useState } from "react";
import { useCallback, useEffect, useState } from "react";
import { Dataset, DatasetType } from "../dataset.model";
import { App, Button, Form, Modal } from "antd";
@@ -26,8 +26,9 @@ export default function EditDataset({
description: "",
datasetType: DatasetType.TEXT,
tags: [],
parentDatasetId: "",
});
const fetchDataset = async () => {
const fetchDataset = useCallback(async () => {
if (!open) return;
// 如果有id,说明是编辑模式
if (data && data.id) {
@@ -36,18 +37,19 @@ export default function EditDataset({
...newData,
type: newData.type,
tags: newData.tags.map((tag) => tag.name) || [],
parentDatasetId: newData.parentDatasetId || "",
};
setNewDataset(updatedDataset);
form.setFieldsValue(updatedDataset);
}
};
}, [data, form, open]);
useEffect(() => {
fetchDataset();
}, [data]);
}, [fetchDataset]);
const handleValuesChange = (_, allValues) => {
setNewDataset({ ...newDataset, ...allValues });
setNewDataset((prev) => ({ ...prev, ...allValues }));
};
const handleSubmit = async () => {

View File

@@ -1,34 +1,33 @@
import RadioCard from "@/components/RadioCard";
import { Input, Select, Form } from "antd";
import { datasetTypes } from "../../dataset.const";
import { useEffect, useState } from "react";
import { queryDatasetTagsUsingGet } from "../../dataset.api";
import {queryTasksUsingGet} from "@/pages/DataCollection/collection.apis.ts";
import { useCallback, useEffect, useState } from "react";
import type { Dispatch, SetStateAction } from "react";
import { queryDatasetTagsUsingGet, queryDatasetsUsingGet } from "../../dataset.api";
import { queryTasksUsingGet } from "@/pages/DataCollection/collection.apis.ts";
import type { Dataset, TagItem } from "../../dataset.model";
export default function BasicInformation({
data,
setData,
hidden = [],
}: {
data: any;
setData: any;
data: DatasetFormData;
setData: Dispatch<SetStateAction<DatasetFormData>>;
hidden?: string[];
}) {
const [tagOptions, setTagOptions] = useState<
{
label: JSX.Element;
title: string;
options: { label: JSX.Element; value: string }[];
}[]
const [tagOptions, setTagOptions] = useState<DatasetTagOption[]>([]);
const [collectionOptions, setCollectionOptions] = useState<SelectOption[]>([]);
const [parentDatasetOptions, setParentDatasetOptions] = useState<
{ label: string; value: string }[]
>([]);
const [collectionOptions, setCollectionOptions] = useState([]);
// 获取标签
const fetchTags = async () => {
const fetchTags = useCallback(async () => {
if (hidden.includes("tags")) return;
try {
const { data } = await queryDatasetTagsUsingGet();
const customTags = data.map((tag) => ({
const tags = Array.isArray(data) ? (data as TagItem[]) : [];
const customTags = tags.map((tag) => ({
label: tag.name,
value: tag.name,
}));
@@ -36,13 +35,16 @@ export default function BasicInformation({
} catch (error) {
console.error("Error fetching tags: ", error);
}
};
}, [hidden]);
// 获取归集任务
const fetchCollectionTasks = async () => {
const fetchCollectionTasks = useCallback(async () => {
try {
const res = await queryTasksUsingGet({ page: 0, size: 100 });
const options = res.data.content.map((task: any) => ({
const tasks = Array.isArray(res?.data?.content)
? (res.data.content as CollectionTask[])
: [];
const options = tasks.map((task) => ({
label: task.name,
value: task.id,
}));
@@ -50,12 +52,40 @@ export default function BasicInformation({
} catch (error) {
console.error("Error fetching collection tasks:", error);
}
};
}, []);
const fetchParentDatasets = useCallback(async () => {
if (hidden.includes("parentDatasetId")) return;
try {
const { data: resData } = await queryDatasetsUsingGet({
parentDatasetId: "",
page: 1,
size: 1000,
});
const currentDatasetId = data?.id;
const rootDatasets = Array.isArray(resData?.content)
? (resData.content as DatasetSummary[])
: [];
const options = rootDatasets
.filter((dataset) => dataset.id !== currentDatasetId)
.map((dataset) => ({
label: dataset.name,
value: dataset.id,
}));
setParentDatasetOptions([
{ label: "根数据集", value: "" },
...options,
]);
} catch (error) {
console.error("Error fetching parent datasets:", error);
}
}, [data?.id, hidden]);
useEffect(() => {
fetchTags();
fetchCollectionTasks();
}, []);
fetchParentDatasets();
}, [fetchTags, fetchCollectionTasks, fetchParentDatasets]);
return (
<>
<Form.Item
@@ -70,6 +100,15 @@ export default function BasicInformation({
<Input.TextArea placeholder="描述数据集的用途和内容" rows={3} />
</Form.Item>
)}
{!hidden.includes("parentDatasetId") && (
<Form.Item name="parentDatasetId" label="父数据集">
<Select
className="w-full"
options={parentDatasetOptions}
placeholder="选择父数据集(仅支持一层)"
/>
</Form.Item>
)}
{/* 数据集类型选择 - 使用卡片形式 */}
{!hidden.includes("datasetType") && (
@@ -103,3 +142,28 @@ export default function BasicInformation({
</>
);
}
type DatasetFormData = Partial<Dataset> & {
type?: string;
parentDatasetId?: string;
};
type DatasetTagOption = {
label: string;
value: string;
};
type SelectOption = {
label: string;
value: string;
};
type CollectionTask = {
id: string;
name: string;
};
type DatasetSummary = {
id: string;
name: string;
};

View File

@@ -37,29 +37,51 @@ const tabList = [
export default function DatasetDetail() {
const { id } = useParams(); // 获取动态路由参数
const navigate = useNavigate();
const [activeTab, setActiveTab] = useState("overview");
const { message } = App.useApp();
const [showEditDialog, setShowEditDialog] = useState(false);
const [dataset, setDataset] = useState<Dataset>({} as Dataset);
const filesOperation = useFilesOperation(dataset);
const [activeTab, setActiveTab] = useState("overview");
const { message } = App.useApp();
const [showEditDialog, setShowEditDialog] = useState(false);
const [dataset, setDataset] = useState<Dataset>({} as Dataset);
const [parentDataset, setParentDataset] = useState<Dataset | null>(null);
const filesOperation = useFilesOperation(dataset);
const [showUploadDialog, setShowUploadDialog] = useState(false);
const navigateItems = useMemo(
() => [
{
title: <Link to="/data/management"></Link>,
},
{
title: dataset.name || "数据集详情",
},
],
[dataset]
);
const fetchDataset = async () => {
const { data } = await queryDatasetByIdUsingGet(id as unknown as number);
setDataset(mapDataset(data));
};
const navigateItems = useMemo(() => {
const items = [
{
title: <Link to="/data/management"></Link>,
},
];
if (parentDataset) {
items.push({
title: (
<Link to={`/data/management/detail/${parentDataset.id}`}>
{parentDataset.name}
</Link>
),
});
}
items.push({
title: dataset.name || "数据集详情",
});
return items;
}, [dataset, parentDataset]);
const fetchDataset = async () => {
if (!id) {
return;
}
const { data } = await queryDatasetByIdUsingGet(id);
const mapped = mapDataset(data);
setDataset(mapped);
if (data?.parentDatasetId) {
const { data: parentData } = await queryDatasetByIdUsingGet(
data.parentDatasetId
);
setParentDataset(mapDataset(parentData));
} else {
setParentDataset(null);
}
};
useEffect(() => {
fetchDataset();

View File

@@ -3,7 +3,7 @@ import type {
DatasetFile,
} from "@/pages/DataManagement/dataset.model";
import { App } from "antd";
import { useState } from "react";
import { useState } from "react";
import {
deleteDatasetFileUsingDelete,
downloadFileByIdUsingGet,
@@ -21,7 +21,7 @@ export function useFilesOperation(dataset: Dataset) {
// 文件相关状态
const [fileList, setFileList] = useState<DatasetFile[]>([]);
const [selectedFiles, setSelectedFiles] = useState<number[]>([]);
const [selectedFiles, setSelectedFiles] = useState<string[]>([]);
const [pagination, setPagination] = useState<{
current: number;
pageSize: number;
@@ -34,16 +34,20 @@ export function useFilesOperation(dataset: Dataset) {
const [previewContent, setPreviewContent] = useState("");
const [previewFileName, setPreviewFileName] = useState("");
const fetchFiles = async (prefix?: string, current?, pageSize?) => {
// 如果明确传了 prefix(包括空字符串),使用传入的值;否则使用当前 pagination.prefix
const targetPrefix = prefix !== undefined ? prefix : (pagination.prefix || '');
const params: any = {
page: current !== undefined ? current : pagination.current,
size: pageSize !== undefined ? pageSize : pagination.pageSize,
isWithDirectory: true,
prefix: targetPrefix,
};
const fetchFiles = async (
prefix?: string,
current?: number,
pageSize?: number
) => {
// 如果明确传了 prefix(包括空字符串),使用传入的值;否则使用当前 pagination.prefix
const targetPrefix = prefix !== undefined ? prefix : (pagination.prefix || '');
const params: DatasetFilesQueryParams = {
page: current !== undefined ? current : pagination.current,
size: pageSize !== undefined ? pageSize : pagination.pageSize,
isWithDirectory: true,
prefix: targetPrefix,
};
const { data } = await queryDatasetFilesUsingGet(id!, params);
setFileList(data.content || []);
@@ -86,28 +90,28 @@ export function useFilesOperation(dataset: Dataset) {
setSelectedFiles([]); // 清空选中状态
};
const handleShowFile = (file: any) => async () => {
// 请求文件内容并弹窗预览
try {
const res = await fetch(`/api/datasets/${dataset.id}/file/${file.id}`);
const data = await res.text();
setPreviewFileName(file.fileName);
setPreviewContent(data);
setPreviewVisible(true);
} catch (err) {
message.error({ content: "文件预览失败" });
}
};
const handleDeleteFile = async (file) => {
try {
await deleteDatasetFileUsingDelete(dataset.id, file.id);
fetchFiles(); // 刷新文件列表
message.success({ content: `文件 ${file.fileName} 已删除` });
} catch (error) {
message.error({ content: `文件 ${file.fileName} 删除失败` });
}
};
const handleShowFile = (file: DatasetFile) => async () => {
// 请求文件内容并弹窗预览
try {
const res = await fetch(`/api/datasets/${dataset.id}/file/${file.id}`);
const data = await res.text();
setPreviewFileName(file.fileName);
setPreviewContent(data);
setPreviewVisible(true);
} catch {
message.error({ content: "文件预览失败" });
}
};
const handleDeleteFile = async (file: DatasetFile) => {
try {
await deleteDatasetFileUsingDelete(dataset.id, file.id);
fetchFiles(); // 刷新文件列表
message.success({ content: `文件 ${file.fileName} 已删除` });
} catch {
message.error({ content: `文件 ${file.fileName} 删除失败` });
}
};
const handleBatchExport = () => {
if (selectedFiles.length === 0) {
@@ -158,29 +162,36 @@ export function useFilesOperation(dataset: Dataset) {
// 创建成功后刷新当前目录,重置到第一页
await fetchFiles(currentPrefix, 1, pagination.pageSize);
message.success({ content: `文件夹 ${directoryName} 创建成功` });
} catch (error) {
message.error({ content: `文件夹 ${directoryName} 创建失败` });
throw error;
}
},
handleDownloadDirectory: async (directoryPath: string, directoryName: string) => {
try {
await downloadDirectoryUsingGet(dataset.id, directoryPath);
message.success({ content: `文件夹 ${directoryName} 下载成功` });
} catch (error) {
message.error({ content: `文件夹 ${directoryName} 下载失败` });
}
},
handleDeleteDirectory: async (directoryPath: string, directoryName: string) => {
try {
await deleteDirectoryUsingDelete(dataset.id, directoryPath);
// 删除成功后刷新当前目录
const currentPrefix = pagination.prefix || "";
await fetchFiles(currentPrefix, 1, pagination.pageSize);
message.success({ content: `文件夹 ${directoryName} 已删除` });
} catch (error) {
message.error({ content: `文件夹 ${directoryName} 删除失败` });
}
},
};
}
} catch (caught) {
message.error({ content: `文件夹 ${directoryName} 创建失败` });
throw caught;
}
},
handleDownloadDirectory: async (directoryPath: string, directoryName: string) => {
try {
await downloadDirectoryUsingGet(dataset.id, directoryPath);
message.success({ content: `文件夹 ${directoryName} 下载成功` });
} catch {
message.error({ content: `文件夹 ${directoryName} 下载失败` });
}
},
handleDeleteDirectory: async (directoryPath: string, directoryName: string) => {
try {
await deleteDirectoryUsingDelete(dataset.id, directoryPath);
// 删除成功后刷新当前目录
const currentPrefix = pagination.prefix || "";
await fetchFiles(currentPrefix, 1, pagination.pageSize);
message.success({ content: `文件夹 ${directoryName} 已删除` });
} catch {
message.error({ content: `文件夹 ${directoryName} 删除失败` });
}
},
};
}
interface DatasetFilesQueryParams {
page: number;
size: number;
isWithDirectory: boolean;
prefix: string;
}

View File

@@ -8,7 +8,8 @@ import {
} from "@ant-design/icons";
import TagManager from "@/components/business/TagManagement";
import { Link, useNavigate } from "react-router";
import { useEffect, useMemo, useState } from "react";
import { useEffect, useMemo, useState } from "react";
import type { ReactNode } from "react";
import { SearchControls } from "@/components/SearchControls";
import CardView from "@/components/CardView";
import type { Dataset } from "@/pages/DataManagement/dataset.model";
@@ -35,19 +36,19 @@ export default function DatasetManagementPage() {
const [editDatasetOpen, setEditDatasetOpen] = useState(false);
const [currentDataset, setCurrentDataset] = useState<Dataset | null>(null);
const [showUploadDialog, setShowUploadDialog] = useState(false);
const [statisticsData, setStatisticsData] = useState<any>({
count: {},
size: {},
});
const [statisticsData, setStatisticsData] = useState<StatisticsData>({
count: [],
size: [],
});
async function fetchStatistics() {
const { data } = await getDatasetStatisticsUsingGet();
const statistics = {
size: [
{
title: "数据集总数",
value: data?.totalDatasets || 0,
const statistics: StatisticsData = {
size: [
{
title: "数据集总数",
value: data?.totalDatasets || 0,
},
{
title: "文件总数",
@@ -75,10 +76,10 @@ export default function DatasetManagementPage() {
title: "视频",
value: data?.count?.video || 0,
},
],
};
setStatisticsData(statistics);
}
],
};
setStatisticsData(statistics);
}
const [tags, setTags] = useState<string[]>([]);
@@ -135,9 +136,9 @@ export default function DatasetManagementPage() {
message.success("数据集下载成功");
};
const handleDeleteDataset = async (id: number) => {
if (!id) return;
await deleteDatasetByIdUsingDelete(id);
const handleDeleteDataset = async (id: string) => {
if (!id) return;
await deleteDatasetByIdUsingDelete(id);
fetchData({ pageOffset: 0 });
message.success("数据删除成功");
};
@@ -222,12 +223,12 @@ export default function DatasetManagementPage() {
title: "状态",
dataIndex: "status",
key: "status",
render: (status: any) => {
return (
<Tag icon={status?.icon} color={status?.color}>
{status?.label}
</Tag>
);
render: (status: DatasetStatusMeta) => {
return (
<Tag icon={status?.icon} color={status?.color}>
{status?.label}
</Tag>
);
},
width: 120,
},
@@ -273,10 +274,10 @@ export default function DatasetManagementPage() {
key: "actions",
width: 200,
fixed: "right",
render: (_: any, record: Dataset) => (
<div className="flex items-center gap-2">
{operations.map((op) => (
<Tooltip key={op.key} title={op.label}>
render: (_: unknown, record: Dataset) => (
<div className="flex items-center gap-2">
{operations.map((op) => (
<Tooltip key={op.key} title={op.label}>
<Button
type="text"
icon={op.icon}
@@ -352,13 +353,13 @@ export default function DatasetManagementPage() {
<div className="grid grid-cols-1 gap-4">
<Card>
<div className="grid grid-cols-3">
{statisticsData.size?.map?.((item) => (
<Statistic
title={item.title}
key={item.title}
value={`${item.value}`}
/>
))}
{statisticsData.size.map((item) => (
<Statistic
title={item.title}
key={item.title}
value={`${item.value}`}
/>
))}
</div>
</Card>
</div>
@@ -395,5 +396,22 @@ export default function DatasetManagementPage() {
updateEvent="update:datasets"
/>
</div>
);
}
);
}
type StatisticsItem = {
title: string;
value: number | string;
};
type StatisticsData = {
count: StatisticsItem[];
size: StatisticsItem[];
};
type DatasetStatusMeta = {
label: string;
value: string;
color: string;
icon: ReactNode;
};

View File

@@ -33,7 +33,7 @@ export enum DataSource {
}
export interface DatasetFile {
id: number;
id: string;
fileName: string;
size: string;
uploadDate: string;
@@ -41,10 +41,10 @@ export interface DatasetFile {
}
export interface Dataset {
id: number;
id: string;
name: string;
description: string;
parentId?: number;
parentDatasetId?: string;
datasetType: DatasetType;
status: DatasetStatus;
size?: string;
@@ -58,6 +58,15 @@ export interface Dataset {
distribution?: Record<string, Record<string, number>>;
}
export interface DatasetImportConfig {
source?: DataSource | string;
target?: DataSource | string;
dataSource?: string;
splitByLine?: boolean;
hasArchive?: boolean;
[key: string]: string | number | boolean | null | undefined;
}
export interface TagItem {
id: string;
name: string;
@@ -77,14 +86,14 @@ export interface ScheduleConfig {
}
export interface DatasetTask {
id: number;
id: string;
name: string;
description: string;
type: string;
status: "importing" | "waiting" | "completed" | "failed";
progress: number;
createdAt: string;
importConfig: any;
importConfig: DatasetImportConfig;
scheduleConfig: ScheduleConfig;
nextExecution?: string;
lastExecution?: string;

View File

@@ -14,6 +14,7 @@ class Dataset(Base):
__tablename__ = "t_dm_datasets"
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()), comment="UUID")
parent_dataset_id = Column(String(36), nullable=True, comment="父数据集ID(UUID)")
name = Column(String(255), nullable=False, comment="数据集名称")
description = Column(Text, nullable=True, comment="数据集描述")
dataset_type = Column(String(50), nullable=False, comment="数据集类型:IMAGE/TEXT/QA/MULTIMODAL/OTHER")

View File

@@ -195,8 +195,8 @@ def _update_task_status(
conn.execute(sql, params)
def _load_dataset_files(dataset_id: str) -> List[Tuple[str, str, str]]:
"""加载指定数据集下的所有已完成文件。"""
def _load_dataset_files(dataset_id: str) -> List[Tuple[str, str, str]]:
"""加载指定数据集下的所有已完成文件。"""
sql = text(
"""
@@ -208,9 +208,45 @@ def _load_dataset_files(dataset_id: str) -> List[Tuple[str, str, str]]:
"""
)
with SQLManager.create_connect() as conn:
rows = conn.execute(sql, {"dataset_id": dataset_id}).fetchall()
return [(str(r[0]), str(r[1]), str(r[2])) for r in rows]
with SQLManager.create_connect() as conn:
rows = conn.execute(sql, {"dataset_id": dataset_id}).fetchall()
return [(str(r[0]), str(r[1]), str(r[2])) for r in rows]
def _load_dataset_meta(dataset_id: str) -> Optional[Dict[str, Any]]:
"""加载数据集基础信息(含父ID与路径)。"""
sql = text(
"""
SELECT id, name, parent_dataset_id, path
FROM t_dm_datasets
WHERE id = :dataset_id
"""
)
with SQLManager.create_connect() as conn:
row = conn.execute(sql, {"dataset_id": dataset_id}).fetchone()
if not row:
return None
return dict(row._mapping) # type: ignore[attr-defined]
def _resolve_output_parent(source_dataset_id: str) -> Tuple[Optional[str], str]:
"""根据源数据集确定产出数据集的父级与基路径(产出挂在父级下)。"""
base_path = DEFAULT_OUTPUT_ROOT.rstrip("/") or "/dataset"
source_meta = _load_dataset_meta(source_dataset_id)
if not source_meta:
return None, base_path
parent_dataset_id = source_meta.get("parent_dataset_id")
if not parent_dataset_id:
return None, base_path
parent_meta = _load_dataset_meta(str(parent_dataset_id))
parent_path = parent_meta.get("path") if parent_meta else None
if not parent_path:
return None, base_path
return str(parent_dataset_id), str(parent_path)
def _load_files_by_ids(file_ids: List[str]) -> List[Tuple[str, str, str]]:
@@ -245,34 +281,35 @@ def _ensure_output_dir(output_dir: str) -> str:
return output_dir
def _create_output_dataset(
source_dataset_id: str,
source_dataset_name: str,
output_dataset_name: str,
) -> Tuple[str, str]:
"""为自动标注结果创建一个新的数据集并返回 (dataset_id, path)。"""
new_dataset_id = str(uuid.uuid4())
dataset_base_path = DEFAULT_OUTPUT_ROOT.rstrip("/") or "/dataset"
output_dir = os.path.join(dataset_base_path, new_dataset_id)
def _create_output_dataset(
source_dataset_id: str,
source_dataset_name: str,
output_dataset_name: str,
) -> Tuple[str, str]:
"""为自动标注结果创建一个新的数据集并返回 (dataset_id, path)。"""
new_dataset_id = str(uuid.uuid4())
parent_dataset_id, dataset_base_path = _resolve_output_parent(source_dataset_id)
output_dir = os.path.join(dataset_base_path, new_dataset_id)
description = (
f"Auto annotations for dataset {source_dataset_name or source_dataset_id}"[:255]
)
sql = text(
"""
INSERT INTO t_dm_datasets (id, name, description, dataset_type, path, status)
VALUES (:id, :name, :description, :dataset_type, :path, :status)
"""
)
params = {
"id": new_dataset_id,
"name": output_dataset_name,
"description": description,
"dataset_type": "IMAGE",
"path": output_dir,
"status": "ACTIVE",
sql = text(
"""
INSERT INTO t_dm_datasets (id, parent_dataset_id, name, description, dataset_type, path, status)
VALUES (:id, :parent_dataset_id, :name, :description, :dataset_type, :path, :status)
"""
)
params = {
"id": new_dataset_id,
"parent_dataset_id": parent_dataset_id,
"name": output_dataset_name,
"description": description,
"dataset_type": "IMAGE",
"path": output_dir,
"status": "ACTIVE",
}
with SQLManager.create_connect() as conn:

View File

@@ -15,6 +15,7 @@ USE datamate;
-- 数据集表(支持医学影像、文本、问答等多种类型)
CREATE TABLE IF NOT EXISTS t_dm_datasets (
id VARCHAR(36) PRIMARY KEY COMMENT 'UUID',
parent_dataset_id VARCHAR(36) NULL COMMENT '父数据集ID(UUID)',
name VARCHAR(255) NOT NULL COMMENT '数据集名称',
description TEXT COMMENT '数据集描述',
dataset_type VARCHAR(50) NOT NULL COMMENT '数据集类型:IMAGE/TEXT/QA/MULTIMODAL/OTHER',
@@ -36,13 +37,15 @@ CREATE TABLE IF NOT EXISTS t_dm_datasets (
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间',
created_by VARCHAR(255) COMMENT '创建者',
updated_by VARCHAR(255) COMMENT '更新者',
INDEX idx_dm_parent_dataset_id (parent_dataset_id),
INDEX idx_dm_dataset_type (dataset_type),
INDEX idx_dm_category (category),
INDEX idx_dm_format (format),
INDEX idx_dm_status (status),
INDEX idx_dm_public (is_public),
INDEX idx_dm_featured (is_featured),
INDEX idx_dm_created_at (created_at)
INDEX idx_dm_created_at (created_at),
FOREIGN KEY (parent_dataset_id) REFERENCES t_dm_datasets(id) ON DELETE RESTRICT
) COMMENT='数据集表(UUID 主键)';
-- 数据集文件表