From 758cf93e36fb9e8e122db38aab0df1a98e91bbf3 Mon Sep 17 00:00:00 2001 From: hefanli <76611805+hefanli@users.noreply.github.com> Date: Tue, 9 Dec 2025 14:42:27 +0800 Subject: [PATCH 1/3] =?UTF-8?q?feature:=20=E5=A2=9E=E5=8A=A0=E5=8E=8B?= =?UTF-8?q?=E7=BC=A9=E5=8C=85=E4=B8=8A=E4=BC=A0=E5=8A=9F=E8=83=BD=20(#137)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feature: 增加压缩包上传功能 * fix: 删除文件时数据集关于文件的相关统计信息也刷新 * fix: 增加k8s常见下评估服务的路由 --- backend/pom.xml | 7 + .../DatasetFileApplicationService.java | 52 +++-- .../dataset/DatasetFileUploadCheckInfo.java | 7 +- backend/services/main-application/pom.xml | 1 - backend/shared/domain-common/pom.xml | 4 + .../common/domain/model/UploadCheckInfo.java | 7 - .../common/domain/service/FileService.java | 6 +- .../common/domain/utils/AnalyzerUtils.java | 11 + .../common/domain/utils/ArchiveAnalyzer.java | 192 ++++++++++++++++++ .../exception/CommonErrorCode.java | 17 ++ .../charts/frontend/templates/configmap.yaml | 9 +- frontend/src/hooks/useSliceUpload.tsx | 2 + .../DataManagement/Detail/DatasetDetail.tsx | 2 +- .../Detail/components/ImportConfiguration.tsx | 57 +++--- .../Detail/components/Overview.tsx | 13 +- .../src/pages/DataManagement/dataset.model.ts | 1 + 16 files changed, 337 insertions(+), 51 deletions(-) delete mode 100644 backend/shared/domain-common/src/main/java/com/datamate/common/domain/model/UploadCheckInfo.java create mode 100644 backend/shared/domain-common/src/main/java/com/datamate/common/domain/utils/ArchiveAnalyzer.java create mode 100644 backend/shared/domain-common/src/main/java/com/datamate/common/infrastructure/exception/CommonErrorCode.java diff --git a/backend/pom.xml b/backend/pom.xml index 7ca22c3..6ff019a 100644 --- a/backend/pom.xml +++ b/backend/pom.xml @@ -37,6 +37,7 @@ 0.2.0 5.4.0 2.21.1 + 1.26.1 @@ -151,6 +152,12 @@ 2.6.6 + + org.apache.commons + commons-compress + ${commons-compress.version} + + diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/application/DatasetFileApplicationService.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/application/DatasetFileApplicationService.java index 7ee8093..cceb516 100644 --- a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/application/DatasetFileApplicationService.java +++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/application/DatasetFileApplicationService.java @@ -5,8 +5,10 @@ import com.datamate.common.domain.model.ChunkUploadPreRequest; import com.datamate.common.domain.model.FileUploadResult; import com.datamate.common.domain.service.FileService; import com.datamate.common.domain.utils.AnalyzerUtils; +import com.datamate.common.domain.utils.ArchiveAnalyzer; import com.datamate.common.infrastructure.exception.BusinessAssert; import com.datamate.common.infrastructure.exception.BusinessException; +import com.datamate.common.infrastructure.exception.CommonErrorCode; import com.datamate.common.infrastructure.exception.SystemErrorCode; import com.datamate.common.interfaces.PagedResponse; import com.datamate.common.interfaces.PagingQuery; @@ -213,6 +215,9 @@ public class DatasetFileApplicationService { */ @Transactional public String preUpload(UploadFilesPreRequest chunkUploadRequest, String datasetId) { + if (Objects.isNull(datasetRepository.getById(datasetId))) { + throw BusinessException.of(DataManagementErrorCode.DATASET_NOT_FOUND); + } ChunkUploadPreRequest request = ChunkUploadPreRequest.builder().build(); request.setUploadPath(datasetBasePath + File.separator + datasetId); request.setTotalFileNum(chunkUploadRequest.getTotalFileNum()); @@ -225,7 +230,7 @@ public class DatasetFileApplicationService { String checkInfoJson = objectMapper.writeValueAsString(checkInfo); request.setCheckInfo(checkInfoJson); } catch (JsonProcessingException e) { - throw new IllegalArgumentException("Failed to serialize checkInfo to JSON", e); + log.warn("Failed to serialize checkInfo to JSON", e); } return fileService.preUpload(request); } @@ -238,31 +243,54 @@ public class DatasetFileApplicationService { @Transactional public void chunkUpload(String datasetId, UploadFileRequest uploadFileRequest) { FileUploadResult uploadResult = fileService.chunkUpload(DatasetConverter.INSTANCE.toChunkUploadRequest(uploadFileRequest)); - saveFileInfoToDb(uploadResult, uploadFileRequest, datasetId); + saveFileInfoToDb(uploadResult, datasetId); } - private void saveFileInfoToDb(FileUploadResult fileUploadResult, UploadFileRequest uploadFile, String datasetId) { + private void saveFileInfoToDb(FileUploadResult fileUploadResult, String datasetId) { if (Objects.isNull(fileUploadResult.getSavedFile())) { // 文件切片上传没有完成 return; } + DatasetFileUploadCheckInfo checkInfo; + try { + ObjectMapper objectMapper = new ObjectMapper(); + checkInfo = objectMapper.readValue(fileUploadResult.getCheckInfo(), DatasetFileUploadCheckInfo.class); + if (!Objects.equals(checkInfo.getDatasetId(), datasetId)) { + throw BusinessException.of(DataManagementErrorCode.DATASET_NOT_FOUND); + } + } catch (IllegalArgumentException | JsonProcessingException e) { + log.warn("Failed to convert checkInfo to DatasetFileUploadCheckInfo", e); + throw BusinessException.of(CommonErrorCode.PRE_UPLOAD_REQUEST_NOT_EXIST); + } + List files; + if (checkInfo.isHasArchive() && AnalyzerUtils.isPackage(fileUploadResult.getSavedFile().getPath())) { + files = ArchiveAnalyzer.process(fileUploadResult); + } else { + files = Collections.singletonList(fileUploadResult); + } + addFileToDataset(datasetId, files); + } + + private void addFileToDataset(String datasetId, List unpacked) { Dataset dataset = datasetRepository.getById(datasetId); - File savedFile = fileUploadResult.getSavedFile(); - LocalDateTime currentTime = LocalDateTime.now(); - DatasetFile datasetFile = DatasetFile.builder() + dataset.setFiles(datasetFileRepository.findAllByDatasetId(datasetId)); + for (FileUploadResult file : unpacked) { + File savedFile = file.getSavedFile(); + LocalDateTime currentTime = LocalDateTime.now(); + DatasetFile datasetFile = DatasetFile.builder() .id(UUID.randomUUID().toString()) .datasetId(datasetId) .fileSize(savedFile.length()) .uploadTime(currentTime) .lastAccessTime(currentTime) - .fileName(uploadFile.getFileName()) + .fileName(file.getFileName()) .filePath(savedFile.getPath()) - .fileType(AnalyzerUtils.getExtension(uploadFile.getFileName())) + .fileType(AnalyzerUtils.getExtension(file.getFileName())) .build(); - dataset.setFiles(datasetFileRepository.findAllByDatasetId(datasetId)); - setDatasetFileId(datasetFile, dataset); - datasetFileRepository.saveOrUpdate(datasetFile); - dataset.addFile(datasetFile); + setDatasetFileId(datasetFile, dataset); + datasetFileRepository.saveOrUpdate(datasetFile); + dataset.addFile(datasetFile); + } dataset.active(); datasetRepository.updateById(dataset); } diff --git a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/model/dataset/DatasetFileUploadCheckInfo.java b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/model/dataset/DatasetFileUploadCheckInfo.java index 3c1917d..b833f92 100644 --- a/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/model/dataset/DatasetFileUploadCheckInfo.java +++ b/backend/services/data-management-service/src/main/java/com/datamate/datamanagement/domain/model/dataset/DatasetFileUploadCheckInfo.java @@ -1,7 +1,8 @@ package com.datamate.datamanagement.domain.model.dataset; -import com.datamate.common.domain.model.UploadCheckInfo; +import lombok.AllArgsConstructor; import lombok.Getter; +import lombok.NoArgsConstructor; import lombok.Setter; /** @@ -9,7 +10,9 @@ import lombok.Setter; */ @Getter @Setter -public class DatasetFileUploadCheckInfo extends UploadCheckInfo { +@NoArgsConstructor +@AllArgsConstructor +public class DatasetFileUploadCheckInfo { /** 数据集id */ private String datasetId; diff --git a/backend/services/main-application/pom.xml b/backend/services/main-application/pom.xml index 1d31862..20af9c4 100644 --- a/backend/services/main-application/pom.xml +++ b/backend/services/main-application/pom.xml @@ -48,7 +48,6 @@ org.apache.commons commons-compress - 1.26.1 diff --git a/backend/shared/domain-common/pom.xml b/backend/shared/domain-common/pom.xml index 50166d2..da9ba1a 100644 --- a/backend/shared/domain-common/pom.xml +++ b/backend/shared/domain-common/pom.xml @@ -42,5 +42,9 @@ org.springframework.boot spring-boot-starter-data-redis + + org.apache.commons + commons-compress + diff --git a/backend/shared/domain-common/src/main/java/com/datamate/common/domain/model/UploadCheckInfo.java b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/model/UploadCheckInfo.java deleted file mode 100644 index bb32e3a..0000000 --- a/backend/shared/domain-common/src/main/java/com/datamate/common/domain/model/UploadCheckInfo.java +++ /dev/null @@ -1,7 +0,0 @@ -package com.datamate.common.domain.model; - -/** - * 上传检查信息基类 - */ -public abstract class UploadCheckInfo { -} diff --git a/backend/shared/domain-common/src/main/java/com/datamate/common/domain/service/FileService.java b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/service/FileService.java index 1c8250b..3a4c2eb 100644 --- a/backend/shared/domain-common/src/main/java/com/datamate/common/domain/service/FileService.java +++ b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/service/FileService.java @@ -4,6 +4,8 @@ import com.datamate.common.domain.model.ChunkUploadPreRequest; import com.datamate.common.domain.model.ChunkUploadRequest; import com.datamate.common.domain.model.FileUploadResult; import com.datamate.common.domain.utils.ChunksSaver; +import com.datamate.common.infrastructure.exception.BusinessException; +import com.datamate.common.infrastructure.exception.CommonErrorCode; import com.datamate.common.infrastructure.mapper.ChunkUploadRequestMapper; import org.springframework.stereotype.Component; import org.springframework.transaction.annotation.Transactional; @@ -46,7 +48,7 @@ public class FileService { uploadFileRequest.setFileSize(uploadFileRequest.getFile().getSize()); ChunkUploadPreRequest preRequest = chunkUploadRequestMapper.findById(uploadFileRequest.getReqId()); if (preRequest == null || preRequest.isUploadComplete() || preRequest.isRequestTimeout()) { - throw new IllegalArgumentException("预上传请求不存在"); + throw BusinessException.of(CommonErrorCode.PRE_UPLOAD_REQUEST_NOT_EXIST); } File savedFile; if (uploadFileRequest.getTotalChunkNum() > 1) { @@ -55,7 +57,7 @@ public class FileService { savedFile = uploadFile(uploadFileRequest, preRequest); } if (chunkUploadRequestMapper.update(preRequest) == 0) { - throw new IllegalArgumentException("预上传请求不存在"); + throw BusinessException.of(CommonErrorCode.PRE_UPLOAD_REQUEST_NOT_EXIST); } boolean isFinish = Objects.equals(preRequest.getUploadedFileNum(), preRequest.getTotalFileNum()); if (isFinish) { diff --git a/backend/shared/domain-common/src/main/java/com/datamate/common/domain/utils/AnalyzerUtils.java b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/utils/AnalyzerUtils.java index 5dd50c2..eeaa1e7 100644 --- a/backend/shared/domain-common/src/main/java/com/datamate/common/domain/utils/AnalyzerUtils.java +++ b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/utils/AnalyzerUtils.java @@ -37,4 +37,15 @@ public class AnalyzerUtils { } return filename.substring(firstDotIndex + 1).toLowerCase(Locale.ROOT); } + + /** + * 判断是否为压缩包 + * + * @param filePath 文件路径 + * @return 返回信息 + */ + public static boolean isPackage(String filePath) { + String extension = getExtension(filePath); + return extension.toLowerCase(Locale.ROOT).equals(TYPE_ZIP) || extension.toLowerCase(Locale.ROOT).equals(TYPE_TAR_GZ); + } } diff --git a/backend/shared/domain-common/src/main/java/com/datamate/common/domain/utils/ArchiveAnalyzer.java b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/utils/ArchiveAnalyzer.java new file mode 100644 index 0000000..d42c3f2 --- /dev/null +++ b/backend/shared/domain-common/src/main/java/com/datamate/common/domain/utils/ArchiveAnalyzer.java @@ -0,0 +1,192 @@ +package com.datamate.common.domain.utils; + +import com.datamate.common.domain.model.FileUploadResult; +import com.datamate.common.infrastructure.exception.BusinessException; +import com.datamate.common.infrastructure.exception.SystemErrorCode; +import lombok.extern.slf4j.Slf4j; + +import org.apache.commons.compress.archivers.ArchiveEntry; +import org.apache.commons.compress.archivers.ArchiveInputStream; +import org.apache.commons.compress.archivers.tar.TarArchiveEntry; +import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; +import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; +import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream; +import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; + +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.*; + +/** + * Responsible for validating and unpacking archive files. + * + * @since 2023-11-17 + */ +@Slf4j +public class ArchiveAnalyzer { + private static final int DEFAULT_BUFFER_SIZE = 4096; + + /** + * Process list. + * + * @param fileDto The uploaded file DTO + * @return the list + */ + public static List process(FileUploadResult fileDto) { + log.info("Start unpacking [{}]", fileDto.getFileName()); + File file = fileDto.getSavedFile(); + Path archivePath; + try { + archivePath = Paths.get(file.getCanonicalPath()); + } catch (IOException e) { + log.error("Failed to get the archive file path."); + throw BusinessException.of(SystemErrorCode.FILE_SYSTEM_ERROR, "Failed to get the archive file path."); + } + + List unpacked; + String extension = AnalyzerUtils.getExtension(fileDto.getFileName()); + if (AnalyzerUtils.TYPE_ZIP.equalsIgnoreCase(extension)) { + log.info("ZIP unpacking [{}]", fileDto.getFileName()); + unpacked = processZip(archivePath); + log.info("ZIP unpacking FINISHED [{}]", fileDto.getFileName()); + } else if (AnalyzerUtils.TYPE_TAR_GZ.equalsIgnoreCase(extension)) { + unpacked = processTarGz(archivePath); + } else { + throw BusinessException.of(SystemErrorCode.FILE_SYSTEM_ERROR, "Unrecognized archive format."); + } + + if (!archivePath.toFile().delete()) { + throw BusinessException.of(SystemErrorCode.FILE_SYSTEM_ERROR, "Unable to delete the archive file " + archivePath.toAbsolutePath()); + } + + log.info("Finished unpacking [{}]", fileDto.getFileName()); + return unpacked; + } + + private static List processZip(Path archivePath) { + try (ArchiveInputStream inputStream = new ZipArchiveInputStream( + new BufferedInputStream(Files.newInputStream(archivePath)))) { + return unpackArchive(inputStream, archivePath); + } catch (IOException e) { + log.error("Failed to unpack zip archive:", e); + throw BusinessException.of(SystemErrorCode.FILE_SYSTEM_ERROR, "Failed to unpack zip archive."); + } + } + + private static List processTarGz(Path archivePath) { + try (ArchiveInputStream inputStream = new TarArchiveInputStream( + new GzipCompressorInputStream(new BufferedInputStream(Files.newInputStream(archivePath))), + StandardCharsets.UTF_8.toString())) { + return unpackArchive(inputStream, archivePath); + } catch (IOException e) { + log.error("Failed to unpack tar.gz archive:", e); + throw BusinessException.of(SystemErrorCode.FILE_SYSTEM_ERROR, "Failed to unpack tar.gz archive."); + } + } + + private static List unpackArchive(ArchiveInputStream archiveInputStream, Path archivePath) throws IOException { + List unpacked = new ArrayList<>(); + long unpackedSize = 0L; + try { + ArchiveEntry archiveEntry; + int entryCount = 0; + while ((archiveEntry = archiveInputStream.getNextEntry()) != null) { + if (isSymlink(archiveEntry)) { + // 解压时跳过symlink文件 + continue; + } + entryCount++; + if (checkUnpackSizeAndFileSize(entryCount, unpacked) || checkVersionSize(unpackedSize, archiveEntry.getSize())) { + break; + } + if (!archiveEntry.isDirectory()) { + unpackedSize = addFileAndCountFileSize(archiveInputStream, archiveEntry, unpacked, + unpackedSize, archivePath); + } + } + } catch (IOException e) { + unpacked.forEach(v -> deleteFile(v.getSavedFile())); + throw e; + } + + return unpacked; + } + + private static boolean checkVersionSize(long unpackedSize, long currFileSize) { + return false; + } + + private static long addFileAndCountFileSize(ArchiveInputStream archiveInputStream, ArchiveEntry archiveEntry, + List unpacked, long unpackedSize, Path archivePath) throws IOException { + Optional uploadFileDto = extractEntity(archiveInputStream, archiveEntry, archivePath); + long newSize = unpackedSize; + if (uploadFileDto.isPresent()) { + FileUploadResult dto = uploadFileDto.get(); + unpacked.add(dto); + newSize += dto.getSavedFile().length(); + } + return newSize; + } + + private static boolean checkUnpackSizeAndFileSize(int entryCount, List unpacked) { + return false; + } + + private static Optional extractEntity(ArchiveInputStream archiveInputStream, ArchiveEntry archiveEntry, Path archivePath) + throws IOException { + byte[] buffer = new byte[DEFAULT_BUFFER_SIZE]; + Path path = Paths.get(archivePath.getParent().toString(), archiveEntry.getName()); + File file = path.toFile(); + long fileSize = 0L; + String extension = AnalyzerUtils.getExtension(archiveEntry.getName()); + + long supportFileSize = 1024*1024*1024; // 上传大小暂定为1个G + try (OutputStream outputStream = new BufferedOutputStream(Files.newOutputStream(file.toPath()))) { + int byteRead; + while ((byteRead = archiveInputStream.read(buffer)) != -1) { + outputStream.write(buffer, 0, byteRead); + fileSize += byteRead; + if (fileSize > supportFileSize) { + break; + } + } + } catch (IOException e) { + log.error("error happened while write entry to file system"); + file.delete(); + throw e; + } + + if (fileSize > supportFileSize) { + // 文件大小超过限制,删除 + log.info("file {} size exceeds limit", archiveEntry.getName()); + file.delete(); + return Optional.empty(); + } + return Optional.of(FileUploadResult.builder().savedFile(file).fileName(CommonUtils.trimFilePath(archiveEntry.getName())).build()); + } + + private static void deleteFile(File file) { + Path fileToDeletePath = Paths.get(file.getPath()); + if (Files.exists(fileToDeletePath)) { + try { + Files.delete(fileToDeletePath); + } catch (IOException e1) { + log.error("Failed to delete file.", e1); + } + } + } + + private static boolean isSymlink(ArchiveEntry archiveEntry) { + if (archiveEntry instanceof TarArchiveEntry) { + return ((TarArchiveEntry) archiveEntry).isSymbolicLink(); + } + return false; + } +} diff --git a/backend/shared/domain-common/src/main/java/com/datamate/common/infrastructure/exception/CommonErrorCode.java b/backend/shared/domain-common/src/main/java/com/datamate/common/infrastructure/exception/CommonErrorCode.java new file mode 100644 index 0000000..0e1d6a3 --- /dev/null +++ b/backend/shared/domain-common/src/main/java/com/datamate/common/infrastructure/exception/CommonErrorCode.java @@ -0,0 +1,17 @@ +package com.datamate.common.infrastructure.exception; + +import lombok.AllArgsConstructor; +import lombok.Getter; + +/** + * CommonErrorCode + * + * @since 2025/12/5 + */ +@Getter +@AllArgsConstructor +public enum CommonErrorCode implements ErrorCode{ + PRE_UPLOAD_REQUEST_NOT_EXIST("common.0101", "预上传请求不存在"); + private final String code; + private final String message; +} diff --git a/deployment/helm/datamate/charts/frontend/templates/configmap.yaml b/deployment/helm/datamate/charts/frontend/templates/configmap.yaml index 8a53376..fb86313 100644 --- a/deployment/helm/datamate/charts/frontend/templates/configmap.yaml +++ b/deployment/helm/datamate/charts/frontend/templates/configmap.yaml @@ -23,6 +23,13 @@ data: proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; } + location /api/evaluation/ { + proxy_pass http://datamate-backend-python:18000/api/evaluation/; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + } + location /api/annotation/ { proxy_pass http://datamate-backend-python:18000/api/annotation/; proxy_set_header Host $host; @@ -110,4 +117,4 @@ data: try_files $uri $uri/ /index.html; } } -{{- end }} \ No newline at end of file +{{- end }} diff --git a/frontend/src/hooks/useSliceUpload.tsx b/frontend/src/hooks/useSliceUpload.tsx index a3f360f..44a3dca 100644 --- a/frontend/src/hooks/useSliceUpload.tsx +++ b/frontend/src/hooks/useSliceUpload.tsx @@ -31,6 +31,7 @@ export function useFileSliceUpload( controller, size: 0, updateEvent: detail.updateEvent, + hasArchive: detail.hasArchive, }; taskListRef.current = [task, ...taskListRef.current]; @@ -112,6 +113,7 @@ export function useFileSliceUpload( totalFileNum: files.length, totalSize, datasetId: task.key, + hasArchive: task.hasArchive, }); const newTask: TaskItem = { diff --git a/frontend/src/pages/DataManagement/Detail/DatasetDetail.tsx b/frontend/src/pages/DataManagement/Detail/DatasetDetail.tsx index de03aa7..e9eb7df 100644 --- a/frontend/src/pages/DataManagement/Detail/DatasetDetail.tsx +++ b/frontend/src/pages/DataManagement/Detail/DatasetDetail.tsx @@ -222,7 +222,7 @@ export default function DatasetDetail() {
{activeTab === "overview" && ( - + )} {activeTab === "lineage" && } {activeTab === "quality" && } diff --git a/frontend/src/pages/DataManagement/Detail/components/ImportConfiguration.tsx b/frontend/src/pages/DataManagement/Detail/components/ImportConfiguration.tsx index 376caed..f790a80 100644 --- a/frontend/src/pages/DataManagement/Detail/components/ImportConfiguration.tsx +++ b/frontend/src/pages/DataManagement/Detail/components/ImportConfiguration.tsx @@ -1,4 +1,4 @@ -import { Select, Input, Form, Radio, Modal, Button, UploadFile } from "antd"; +import { Select, Input, Form, Radio, Modal, Button, UploadFile, Switch } from "antd"; import { InboxOutlined } from "@ant-design/icons"; import { dataSourceOptions } from "../../dataset.const"; import { Dataset, DataSource } from "../../dataset.model"; @@ -51,6 +51,7 @@ export default function ImportConfiguration({ dataset, files: fileSliceList, updateEvent, + hasArchive: importConfig.hasArchive, }, }) ); @@ -195,29 +196,39 @@ export default function ImportConfiguration({ {/* Local Upload Component */} {importConfig?.source === DataSource.UPLOAD && ( - - + -

- -

-

本地文件上传

-

拖拽文件到此处或点击选择文件

-
-
+ + + + +

+ +

+

本地文件上传

+

拖拽文件到此处或点击选择文件

+
+
+ )} {/* Target Configuration */} diff --git a/frontend/src/pages/DataManagement/Detail/components/Overview.tsx b/frontend/src/pages/DataManagement/Detail/components/Overview.tsx index 08b19af..85900ac 100644 --- a/frontend/src/pages/DataManagement/Detail/components/Overview.tsx +++ b/frontend/src/pages/DataManagement/Detail/components/Overview.tsx @@ -3,7 +3,7 @@ import { formatBytes, formatDateTime } from "@/utils/unit"; import { Download, Trash2 } from "lucide-react"; import { datasetTypeMap } from "../../dataset.const"; -export default function Overview({ dataset, filesOperation }) { +export default function Overview({ dataset, filesOperation, fetchDataset }) { const { fileList, pagination, @@ -73,6 +73,11 @@ export default function Overview({ dataset, filesOperation }) { label: "存储路径", children: dataset.targetLocation || "未知", }, + { + key: "pvcName", + label: "存储名称", + children: dataset.pvcName || "未知", + }, { key: "createdAt", label: "创建时间", @@ -129,7 +134,11 @@ export default function Overview({ dataset, filesOperation }) { diff --git a/frontend/src/pages/DataManagement/dataset.model.ts b/frontend/src/pages/DataManagement/dataset.model.ts index 688de58..c3cb20c 100644 --- a/frontend/src/pages/DataManagement/dataset.model.ts +++ b/frontend/src/pages/DataManagement/dataset.model.ts @@ -101,4 +101,5 @@ export interface TaskItem { cancelFn?: () => void; updateEvent?: string; size?: number; + hasArchive?: boolean; } From bef15f328dc4d6bba15898de6a3fab01e0a75c0a Mon Sep 17 00:00:00 2001 From: Dallas98 <40557804+Dallas98@users.noreply.github.com> Date: Tue, 9 Dec 2025 15:01:44 +0800 Subject: [PATCH 2/3] feat(config): add proxy configuration for evaluation API endpoint (#141) --- .../helm/datamate/charts/frontend/templates/configmap.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/deployment/helm/datamate/charts/frontend/templates/configmap.yaml b/deployment/helm/datamate/charts/frontend/templates/configmap.yaml index fb86313..bb4dec9 100644 --- a/deployment/helm/datamate/charts/frontend/templates/configmap.yaml +++ b/deployment/helm/datamate/charts/frontend/templates/configmap.yaml @@ -98,6 +98,13 @@ data: proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; } + location /api/evaluation/ { + proxy_pass http://datamate-backend-python:18000/api/evaluation/; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + } + location /api/annotation/ { proxy_pass http://datamate-backend-python:18000/api/annotation/; proxy_set_header Host $host; From f8b32506cf33ab4430f9c048af7c612ad21d2ed3 Mon Sep 17 00:00:00 2001 From: hefanli <76611805+hefanli@users.noreply.github.com> Date: Tue, 9 Dec 2025 19:09:51 +0800 Subject: [PATCH 3/3] =?UTF-8?q?fix:=20k8s=E9=83=A8=E7=BD=B2=E5=9C=BA?= =?UTF-8?q?=E6=99=AF=E4=B8=8B=EF=BC=8Cbackend-python=E6=9C=8D=E5=8A=A1?= =?UTF-8?q?=E6=8C=82=E8=BD=BD=E9=9C=80=E8=A6=81=E5=AD=98=E5=82=A8=20(#144)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- deployment/helm/datamate/values.yaml | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/deployment/helm/datamate/values.yaml b/deployment/helm/datamate/values.yaml index 1657a0a..15ec766 100644 --- a/deployment/helm/datamate/values.yaml +++ b/deployment/helm/datamate/values.yaml @@ -95,6 +95,24 @@ backend: - name: operator-volume mountPath: /operators +backend-python: + env: + - name: DB_PASSWORD + value: *dbPass + volumes: + - *datasetVolume + - *flowVolume + - *logVolume + volumeMounts: + - name: dataset-volume + mountPath: /dataset + - name: flow-volume + mountPath: /flow + - name: log-volume + mountPath: /var/log/datamate + - name: operator-volume + mountPath: /operators + frontend: service: type: NodePort