diff --git a/backend/openapi/specs/data-management.yaml b/backend/openapi/specs/data-management.yaml index e505ee5..4a0fddf 100644 --- a/backend/openapi/specs/data-management.yaml +++ b/backend/openapi/specs/data-management.yaml @@ -61,12 +61,17 @@ paths: schema: type: string description: 关键词搜索(名称、描述) - - name: status - in: query - schema: - type: string - enum: [DRAFT, ACTIVE, PROCESSING, ARCHIVED, PUBLISHED, DEPRECATED] - description: 数据集状态过滤 + - name: status + in: query + schema: + type: string + enum: [DRAFT, ACTIVE, PROCESSING, ARCHIVED, PUBLISHED, DEPRECATED] + description: 数据集状态过滤 + - name: parentDatasetId + in: query + schema: + type: string + description: 父数据集ID过滤(传空字符串表示根数据集) responses: '200': description: 成功 @@ -538,15 +543,18 @@ components: type: boolean description: 是否为最后一页 - DatasetResponse: - type: object - properties: - id: - type: string - description: 数据集ID - name: - type: string - description: 数据集名称 + DatasetResponse: + type: object + properties: + id: + type: string + description: 数据集ID + parentDatasetId: + type: string + description: 父数据集ID + name: + type: string + description: 数据集名称 description: type: string description: 数据集描述 @@ -590,29 +598,32 @@ components: type: string description: 创建者 - CreateDatasetRequest: - type: object - required: - - name - - type - properties: - name: - type: string - description: 数据集名称 - minLength: 1 - maxLength: 100 - description: - type: string - description: 数据集描述 - maxLength: 500 - type: - type: string - description: 数据集类型 - tags: - type: array - items: - type: string - description: 标签列表 + CreateDatasetRequest: + type: object + required: + - name + - type + properties: + name: + type: string + description: 数据集名称 + minLength: 1 + maxLength: 100 + description: + type: string + description: 数据集描述 + maxLength: 500 + type: + type: string + description: 数据集类型 + parentDatasetId: + type: string + description: 父数据集ID + tags: + type: array + items: + type: string + description: 标签列表 dataSource: type: string description: 数据源 @@ -620,22 +631,25 @@ components: type: string description: 目标位置 - UpdateDatasetRequest: - type: object - properties: - name: - type: string - description: 数据集名称 - maxLength: 100 - description: - type: string - description: 数据集描述 - maxLength: 500 - tags: - type: array - items: - type: string - description: 标签列表 + UpdateDatasetRequest: + type: object + properties: + name: + type: string + description: 数据集名称 + maxLength: 100 + description: + type: string + description: 数据集描述 + maxLength: 500 + parentDatasetId: + type: string + description: 父数据集ID + tags: + type: array + items: + type: string + description: 标签列表 status: type: string enum: [DRAFT, ACTIVE, PROCESSING, ARCHIVED, PUBLISHED, DEPRECATED] diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/application/DatasetApplicationService.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/application/DatasetApplicationService.java index e5c8d03..e018124 100644 --- a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/application/DatasetApplicationService.java +++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/application/DatasetApplicationService.java @@ -6,6 +6,9 @@ import com.datamate.common.domain.utils.ChunksSaver; import com.datamate.common.setting.application.SysParamApplicationService; import com.datamate.datamanagement.interfaces.dto.*; import com.datamate.common.infrastructure.exception.BusinessAssert; +import com.datamate.common.infrastructure.exception.BusinessException; +import com.datamate.common.infrastructure.exception.CommonErrorCode; +import com.datamate.common.infrastructure.exception.SystemErrorCode; import com.datamate.common.interfaces.PagedResponse; import com.datamate.datamanagement.domain.model.dataset.Dataset; import com.datamate.datamanagement.domain.model.dataset.DatasetFile; @@ -27,6 +30,7 @@ import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.springframework.util.StringUtils; +import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -62,7 +66,9 @@ public class DatasetApplicationService { BusinessAssert.isTrue(datasetRepository.findByName(createDatasetRequest.getName()) == null, DataManagementErrorCode.DATASET_ALREADY_EXISTS); // 创建数据集对象 Dataset dataset = DatasetConverter.INSTANCE.convertToDataset(createDatasetRequest); - dataset.initCreateParam(datasetBasePath); + Dataset parentDataset = resolveParentDataset(createDatasetRequest.getParentDatasetId(), dataset.getId()); + dataset.setParentDatasetId(parentDataset == null ? null : parentDataset.getId()); + dataset.initCreateParam(datasetBasePath, parentDataset == null ? null : parentDataset.getPath()); // 处理标签 Set processedTags = Optional.ofNullable(createDatasetRequest.getTags()) .filter(CollectionUtils::isNotEmpty) @@ -98,6 +104,9 @@ public class DatasetApplicationService { if (Objects.nonNull(updateDatasetRequest.getStatus())) { dataset.setStatus(updateDatasetRequest.getStatus()); } + if (updateDatasetRequest.getParentDatasetId() != null) { + handleParentChange(dataset, updateDatasetRequest.getParentDatasetId()); + } if (StringUtils.hasText(updateDatasetRequest.getDataSource())) { // 数据源id不为空,使用异步线程进行文件扫盘落库 processDataSourceAsync(dataset.getId(), updateDatasetRequest.getDataSource()); @@ -112,10 +121,11 @@ public class DatasetApplicationService { @Transactional public void deleteDataset(String datasetId) { Dataset dataset = datasetRepository.getById(datasetId); + BusinessAssert.notNull(dataset, DataManagementErrorCode.DATASET_NOT_FOUND); + long childCount = datasetRepository.countByParentId(datasetId); + BusinessAssert.isTrue(childCount == 0, DataManagementErrorCode.DATASET_HAS_CHILDREN); datasetRepository.removeById(datasetId); - if (dataset != null) { - ChunksSaver.deleteFolder(dataset.getPath()); - } + ChunksSaver.deleteFolder(dataset.getPath()); } /** @@ -164,6 +174,73 @@ public class DatasetApplicationService { return tags; } + private Dataset resolveParentDataset(String parentDatasetId, String currentDatasetId) { + String normalized = normalizeParentId(parentDatasetId); + if (normalized == null) { + return null; + } + BusinessAssert.isTrue(!normalized.equals(currentDatasetId), CommonErrorCode.PARAM_ERROR); + Dataset parent = datasetRepository.getById(normalized); + BusinessAssert.notNull(parent, DataManagementErrorCode.DATASET_NOT_FOUND); + BusinessAssert.isTrue(parent.getParentDatasetId() == null, CommonErrorCode.PARAM_ERROR); + return parent; + } + + private void handleParentChange(Dataset dataset, String parentDatasetId) { + String normalized = normalizeParentId(parentDatasetId); + if (Objects.equals(dataset.getParentDatasetId(), normalized)) { + return; + } + long childCount = datasetRepository.countByParentId(dataset.getId()); + if (childCount > 0 && normalized != null) { + throw BusinessException.of(DataManagementErrorCode.DATASET_HAS_CHILDREN); + } + Dataset parent = normalized == null ? null : resolveParentDataset(normalized, dataset.getId()); + String newPath = buildDatasetPath(parent == null ? datasetBasePath : parent.getPath(), dataset.getId()); + moveDatasetPath(dataset, newPath); + dataset.setParentDatasetId(parent == null ? null : parent.getId()); + } + + private String normalizeParentId(String parentDatasetId) { + if (!StringUtils.hasText(parentDatasetId)) { + return null; + } + return parentDatasetId.trim(); + } + + private String buildDatasetPath(String basePath, String datasetId) { + String normalized = basePath; + while (normalized.endsWith(File.separator)) { + normalized = normalized.substring(0, normalized.length() - 1); + } + return normalized + File.separator + datasetId; + } + + private void moveDatasetPath(Dataset dataset, String newPath) { + String oldPath = dataset.getPath(); + if (Objects.equals(oldPath, newPath)) { + return; + } + Path sourcePath = Paths.get(oldPath); + Path targetPath = Paths.get(newPath); + try { + if (Files.exists(sourcePath)) { + if (Files.exists(targetPath)) { + throw BusinessException.of(CommonErrorCode.PARAM_ERROR); + } + Files.createDirectories(targetPath.getParent()); + Files.move(sourcePath, targetPath); + } else { + Files.createDirectories(targetPath); + } + } catch (IOException e) { + log.error("move dataset path error, from {} to {}", oldPath, newPath, e); + throw BusinessException.of(SystemErrorCode.FILE_SYSTEM_ERROR); + } + datasetFileRepository.updateFilePathPrefix(dataset.getId(), oldPath, newPath); + dataset.setPath(newPath); + } + /** * 获取数据集统计信息 */ diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/application/DatasetFileApplicationService.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/application/DatasetFileApplicationService.java index 62cdec1..90b06c6 100644 --- a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/application/DatasetFileApplicationService.java +++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/application/DatasetFileApplicationService.java @@ -323,10 +323,11 @@ public class DatasetFileApplicationService { * @return 请求id */ @Transactional - public String preUpload(UploadFilesPreRequest chunkUploadRequest, String datasetId) { - if (Objects.isNull(datasetRepository.getById(datasetId))) { - throw BusinessException.of(DataManagementErrorCode.DATASET_NOT_FOUND); - } + public String preUpload(UploadFilesPreRequest chunkUploadRequest, String datasetId) { + Dataset dataset = datasetRepository.getById(datasetId); + if (Objects.isNull(dataset)) { + throw BusinessException.of(DataManagementErrorCode.DATASET_NOT_FOUND); + } // 构建上传路径,如果有 prefix 则追加到路径中 String prefix = Optional.ofNullable(chunkUploadRequest.getPrefix()).orElse("").trim(); @@ -335,10 +336,13 @@ public class DatasetFileApplicationService { prefix = prefix.substring(1); } - String uploadPath = datasetBasePath + File.separator + datasetId; - if (!prefix.isEmpty()) { - uploadPath = uploadPath + File.separator + prefix.replace("/", File.separator); - } + String uploadPath = dataset.getPath(); + if (uploadPath == null || uploadPath.isBlank()) { + uploadPath = datasetBasePath + File.separator + datasetId; + } + if (!prefix.isEmpty()) { + uploadPath = uploadPath + File.separator + prefix.replace("/", File.separator); + } ChunkUploadPreRequest request = ChunkUploadPreRequest.builder().build(); request.setUploadPath(uploadPath); diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/model/dataset/Dataset.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/model/dataset/Dataset.java index 24988a4..eeebc37 100644 --- a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/model/dataset/Dataset.java +++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/model/dataset/Dataset.java @@ -28,6 +28,10 @@ public class Dataset extends BaseEntity { * 数据集描述 */ private String description; + /** + * 父数据集ID + */ + private String parentDatasetId; /** * 数据集类型 */ @@ -110,14 +114,23 @@ public class Dataset extends BaseEntity { this.updatedAt = LocalDateTime.now(); } - public void initCreateParam(String datasetBasePath) { + public void initCreateParam(String datasetBasePath, String parentPath) { this.id = UUID.randomUUID().toString(); - this.path = datasetBasePath + File.separator + this.id; + String basePath = normalizeBasePath(parentPath != null && !parentPath.isBlank() ? parentPath : datasetBasePath); + this.path = basePath + File.separator + this.id; if (this.status == null) { this.status = DatasetStatusType.DRAFT; } } + private String normalizeBasePath(String basePath) { + String normalized = basePath; + while (normalized.endsWith(File.separator)) { + normalized = normalized.substring(0, normalized.length() - 1); + } + return normalized; + } + public void updateBasicInfo(String name, String description, String category) { if (name != null && !name.isEmpty()) this.name = name; if (description != null) this.description = description; diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/exception/DataManagementErrorCode.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/exception/DataManagementErrorCode.java index c1d941e..63c7afb 100644 --- a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/exception/DataManagementErrorCode.java +++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/exception/DataManagementErrorCode.java @@ -37,10 +37,14 @@ public enum DataManagementErrorCode implements ErrorCode { * 数据集文件已存在 */ DATASET_FILE_ALREADY_EXISTS("data_management.0006", "数据集文件已存在"), - /** - * 目录不存在 - */ - DIRECTORY_NOT_FOUND("data_management.0007", "目录不存在"); + /** + * 目录不存在 + */ + DIRECTORY_NOT_FOUND("data_management.0007", "目录不存在"), + /** + * 存在子数据集 + */ + DATASET_HAS_CHILDREN("data_management.0008", "存在子数据集,禁止删除或移动"); private final String code; private final String message; diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/mapper/DatasetFileMapper.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/mapper/DatasetFileMapper.java index f67121c..84f6680 100644 --- a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/mapper/DatasetFileMapper.java +++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/mapper/DatasetFileMapper.java @@ -26,4 +26,7 @@ public interface DatasetFileMapper extends BaseMapper { int update(DatasetFile file); int deleteById(@Param("id") String id); + int updateFilePathPrefix(@Param("datasetId") String datasetId, + @Param("oldPrefix") String oldPrefix, + @Param("newPrefix") String newPrefix); } diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/repository/DatasetFileRepository.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/repository/DatasetFileRepository.java index 1ed09bc..a4adcd0 100644 --- a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/repository/DatasetFileRepository.java +++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/repository/DatasetFileRepository.java @@ -25,4 +25,6 @@ public interface DatasetFileRepository extends IRepository { IPage findByCriteria(String datasetId, String fileType, String status, String name, IPage page); + + int updateFilePathPrefix(String datasetId, String oldPrefix, String newPrefix); } diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/repository/DatasetRepository.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/repository/DatasetRepository.java index b257161..9eaa5ba 100644 --- a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/repository/DatasetRepository.java +++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/repository/DatasetRepository.java @@ -26,4 +26,6 @@ public interface DatasetRepository extends IRepository { AllDatasetStatisticsResponse getAllDatasetStatistics(); IPage findByCriteria(IPage page, DatasetPagingQuery query); + + long countByParentId(String parentDatasetId); } diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/repository/impl/DatasetFileRepositoryImpl.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/repository/impl/DatasetFileRepositoryImpl.java index be7cbd8..cd9041b 100644 --- a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/repository/impl/DatasetFileRepositoryImpl.java +++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/infrastructure/persistence/repository/impl/DatasetFileRepositoryImpl.java @@ -56,4 +56,9 @@ public class DatasetFileRepositoryImpl extends CrudRepository w.like(Dataset::getName, query.getKeyword()).or() @@ -74,4 +82,10 @@ public class DatasetRepositoryImpl extends CrudRepository() + .eq(Dataset::getParentDatasetId, parentDatasetId)); + } } diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/CreateDatasetRequest.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/CreateDatasetRequest.java index 4f58677..44b6f74 100644 --- a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/CreateDatasetRequest.java +++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/CreateDatasetRequest.java @@ -29,6 +29,8 @@ public class CreateDatasetRequest { /** 数据集类型 */ @NotNull(message = "数据集类型不能为空") private DatasetType datasetType; + /** 父数据集ID */ + private String parentDatasetId; /** 标签列表 */ private List tags; /** 数据源 */ diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/DatasetPagingQuery.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/DatasetPagingQuery.java index 6016f4d..84aa016 100644 --- a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/DatasetPagingQuery.java +++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/DatasetPagingQuery.java @@ -39,4 +39,9 @@ public class DatasetPagingQuery extends PagingQuery { * 状态过滤 */ private DatasetStatusType status; + + /** + * 父数据集ID过滤 + */ + private String parentDatasetId; } diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/DatasetResponse.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/DatasetResponse.java index e039019..bad0503 100644 --- a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/DatasetResponse.java +++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/DatasetResponse.java @@ -15,6 +15,8 @@ import java.util.Map; public class DatasetResponse { /** 数据集ID */ private String id; + /** 父数据集ID */ + private String parentDatasetId; /** 数据集名称 */ private String name; /** 数据集描述 */ diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/UpdateDatasetRequest.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/UpdateDatasetRequest.java index 91070f3..613e30c 100644 --- a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/UpdateDatasetRequest.java +++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/interfaces/dto/UpdateDatasetRequest.java @@ -23,6 +23,8 @@ public class UpdateDatasetRequest { private String description; /** 归集任务id */ private String dataSource; + /** 父数据集ID */ + private String parentDatasetId; /** 标签列表 */ private List tags; /** 数据集状态 */ diff --git a/backend/services/data-management-service/src/main/resources/mappers/DatasetFileMapper.xml b/backend/services/data-management-service/src/main/resources/mappers/DatasetFileMapper.xml index f5c6a1e..0458ab1 100644 --- a/backend/services/data-management-service/src/main/resources/mappers/DatasetFileMapper.xml +++ b/backend/services/data-management-service/src/main/resources/mappers/DatasetFileMapper.xml @@ -95,4 +95,11 @@ DELETE FROM t_dm_dataset_files WHERE id = #{id} + + + UPDATE t_dm_dataset_files + SET file_path = CONCAT(#{newPrefix}, SUBSTRING(file_path, LENGTH(#{oldPrefix}) + 1)) + WHERE dataset_id = #{datasetId} + AND file_path LIKE CONCAT(#{oldPrefix}, '%') + diff --git a/backend/services/data-management-service/src/main/resources/mappers/DatasetMapper.xml b/backend/services/data-management-service/src/main/resources/mappers/DatasetMapper.xml index f266894..3a3163f 100644 --- a/backend/services/data-management-service/src/main/resources/mappers/DatasetMapper.xml +++ b/backend/services/data-management-service/src/main/resources/mappers/DatasetMapper.xml @@ -4,12 +4,13 @@ - id, name, description, dataset_type, category, path, format, schema_info, size_bytes, file_count, record_count, + id, parent_dataset_id, name, description, dataset_type, category, path, format, schema_info, size_bytes, file_count, record_count, retention_days, tags, metadata, status, is_public, is_featured, version, created_at, updated_at, created_by, updated_by d.id AS id, + d.parent_dataset_id AS parent_dataset_id, d.name AS name, d.description AS description, d.dataset_type AS dataset_type, diff --git a/frontend/src/pages/DataManagement/Create/CreateDataset.tsx b/frontend/src/pages/DataManagement/Create/CreateDataset.tsx index cd0109d..356dac4 100644 --- a/frontend/src/pages/DataManagement/Create/CreateDataset.tsx +++ b/frontend/src/pages/DataManagement/Create/CreateDataset.tsx @@ -17,6 +17,7 @@ export default function DatasetCreate() { description: "", datasetType: DatasetType.TEXT, tags: [], + parentDatasetId: "", }); const handleSubmit = async () => { diff --git a/frontend/src/pages/DataManagement/Create/EditDataset.tsx b/frontend/src/pages/DataManagement/Create/EditDataset.tsx index 843f8e5..1dac677 100644 --- a/frontend/src/pages/DataManagement/Create/EditDataset.tsx +++ b/frontend/src/pages/DataManagement/Create/EditDataset.tsx @@ -3,7 +3,7 @@ import { queryDatasetByIdUsingGet, updateDatasetByIdUsingPut, } from "../dataset.api"; -import { useEffect, useState } from "react"; +import { useCallback, useEffect, useState } from "react"; import { Dataset, DatasetType } from "../dataset.model"; import { App, Button, Form, Modal } from "antd"; @@ -26,8 +26,9 @@ export default function EditDataset({ description: "", datasetType: DatasetType.TEXT, tags: [], + parentDatasetId: "", }); - const fetchDataset = async () => { + const fetchDataset = useCallback(async () => { if (!open) return; // 如果有id,说明是编辑模式 if (data && data.id) { @@ -36,18 +37,19 @@ export default function EditDataset({ ...newData, type: newData.type, tags: newData.tags.map((tag) => tag.name) || [], + parentDatasetId: newData.parentDatasetId || "", }; setNewDataset(updatedDataset); form.setFieldsValue(updatedDataset); } - }; + }, [data, form, open]); useEffect(() => { fetchDataset(); - }, [data]); + }, [fetchDataset]); const handleValuesChange = (_, allValues) => { - setNewDataset({ ...newDataset, ...allValues }); + setNewDataset((prev) => ({ ...prev, ...allValues })); }; const handleSubmit = async () => { diff --git a/frontend/src/pages/DataManagement/Create/components/BasicInformation.tsx b/frontend/src/pages/DataManagement/Create/components/BasicInformation.tsx index 363aa49..290f8ac 100644 --- a/frontend/src/pages/DataManagement/Create/components/BasicInformation.tsx +++ b/frontend/src/pages/DataManagement/Create/components/BasicInformation.tsx @@ -1,34 +1,33 @@ import RadioCard from "@/components/RadioCard"; import { Input, Select, Form } from "antd"; import { datasetTypes } from "../../dataset.const"; -import { useEffect, useState } from "react"; -import { queryDatasetTagsUsingGet } from "../../dataset.api"; -import {queryTasksUsingGet} from "@/pages/DataCollection/collection.apis.ts"; +import { useCallback, useEffect, useState } from "react"; +import type { Dispatch, SetStateAction } from "react"; +import { queryDatasetTagsUsingGet, queryDatasetsUsingGet } from "../../dataset.api"; +import { queryTasksUsingGet } from "@/pages/DataCollection/collection.apis.ts"; +import type { Dataset, TagItem } from "../../dataset.model"; export default function BasicInformation({ data, setData, hidden = [], }: { - data: any; - setData: any; + data: DatasetFormData; + setData: Dispatch>; hidden?: string[]; }) { - const [tagOptions, setTagOptions] = useState< - { - label: JSX.Element; - title: string; - options: { label: JSX.Element; value: string }[]; - }[] + const [tagOptions, setTagOptions] = useState([]); + const [collectionOptions, setCollectionOptions] = useState([]); + const [parentDatasetOptions, setParentDatasetOptions] = useState< + { label: string; value: string }[] >([]); - const [collectionOptions, setCollectionOptions] = useState([]); - // 获取标签 - const fetchTags = async () => { + const fetchTags = useCallback(async () => { if (hidden.includes("tags")) return; try { const { data } = await queryDatasetTagsUsingGet(); - const customTags = data.map((tag) => ({ + const tags = Array.isArray(data) ? (data as TagItem[]) : []; + const customTags = tags.map((tag) => ({ label: tag.name, value: tag.name, })); @@ -36,13 +35,16 @@ export default function BasicInformation({ } catch (error) { console.error("Error fetching tags: ", error); } - }; + }, [hidden]); // 获取归集任务 - const fetchCollectionTasks = async () => { + const fetchCollectionTasks = useCallback(async () => { try { const res = await queryTasksUsingGet({ page: 0, size: 100 }); - const options = res.data.content.map((task: any) => ({ + const tasks = Array.isArray(res?.data?.content) + ? (res.data.content as CollectionTask[]) + : []; + const options = tasks.map((task) => ({ label: task.name, value: task.id, })); @@ -50,12 +52,40 @@ export default function BasicInformation({ } catch (error) { console.error("Error fetching collection tasks:", error); } - }; + }, []); + + const fetchParentDatasets = useCallback(async () => { + if (hidden.includes("parentDatasetId")) return; + try { + const { data: resData } = await queryDatasetsUsingGet({ + parentDatasetId: "", + page: 1, + size: 1000, + }); + const currentDatasetId = data?.id; + const rootDatasets = Array.isArray(resData?.content) + ? (resData.content as DatasetSummary[]) + : []; + const options = rootDatasets + .filter((dataset) => dataset.id !== currentDatasetId) + .map((dataset) => ({ + label: dataset.name, + value: dataset.id, + })); + setParentDatasetOptions([ + { label: "根数据集", value: "" }, + ...options, + ]); + } catch (error) { + console.error("Error fetching parent datasets:", error); + } + }, [data?.id, hidden]); useEffect(() => { fetchTags(); fetchCollectionTasks(); - }, []); + fetchParentDatasets(); + }, [fetchTags, fetchCollectionTasks, fetchParentDatasets]); return ( <> )} + {!hidden.includes("parentDatasetId") && ( + +