You've already forked DataMate
feature:增加数据配比功能 (#52)
* refactor: 修改调整数据归集实现,删除无用代码,优化代码结构 * feature: 每天凌晨00:00扫描所有数据集,检查数据集是否超过了预设的保留天数,超出保留天数的数据集调用删除接口进行删除 * fix: 修改删除数据集文件的逻辑,上传到数据集中的文件会同时删除数据库中的记录和文件系统中的文件,归集过来的文件仅删除数据库中的记录 * fix: 增加参数校验和接口定义,删除不使用的接口 * fix: 数据集统计数据默认为0 * feature: 数据集状态增加流转,创建时为草稿状态,上传文件或者归集文件后修改为活动状态 * refactor: 修改分页查询归集任务的代码 * fix: 更新后重新执行;归集任务执行增加事务控制 * feature: 创建归集任务时能够同步创建数据集,更新归集任务时能更新到指定数据集 * fix: 创建归集任务不需要创建数据集时不应该报错 * fix: 修复删除文件时数据集的统计数据不变动 * feature: 查询数据集详情时能够获取到文件标签分布 * fix: tags为空时不进行分析 * fix: 状态修改为ACTIVE * fix: 修改解析tag的方法 * feature: 实现创建、分页查询、删除配比任务 * feature: 实现创建、分页查询、删除配比任务的前端交互 * fix: 修复进度计算异常导致的页面报错
This commit is contained in:
@@ -119,6 +119,8 @@ public class DatasetApplicationService {
|
||||
public Dataset getDataset(String datasetId) {
|
||||
Dataset dataset = datasetRepository.getById(datasetId);
|
||||
BusinessAssert.notNull(dataset, DataManagementErrorCode.DATASET_NOT_FOUND);
|
||||
List<DatasetFile> datasetFiles = datasetFileRepository.findAllByDatasetId(datasetId);
|
||||
dataset.setFiles(datasetFiles);
|
||||
return dataset;
|
||||
}
|
||||
|
||||
|
||||
@@ -102,6 +102,10 @@ public class DatasetFileApplicationService {
|
||||
public void deleteDatasetFile(String datasetId, String fileId) {
|
||||
DatasetFile file = getDatasetFile(datasetId, fileId);
|
||||
Dataset dataset = datasetRepository.getById(datasetId);
|
||||
dataset.setFiles(new ArrayList<>(Collections.singleton(file)));
|
||||
datasetFileRepository.removeById(fileId);
|
||||
dataset.removeFile(file);
|
||||
datasetRepository.updateById(dataset);
|
||||
// 删除文件时,上传到数据集中的文件会同时删除数据库中的记录和文件系统中的文件,归集过来的文件仅删除数据库中的记录
|
||||
if (file.getFilePath().startsWith(dataset.getPath())) {
|
||||
try {
|
||||
@@ -111,9 +115,6 @@ public class DatasetFileApplicationService {
|
||||
throw BusinessException.of(SystemErrorCode.FILE_SYSTEM_ERROR);
|
||||
}
|
||||
}
|
||||
datasetFileRepository.removeById(fileId);
|
||||
dataset.removeFile(file);
|
||||
datasetRepository.updateById(dataset);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -110,7 +110,7 @@ public class FileMetadataService {
|
||||
.fileType(fileType)
|
||||
.uploadTime(LocalDateTime.now())
|
||||
.lastAccessTime(LocalDateTime.now())
|
||||
.status("UPLOADED")
|
||||
.status("ACTIVE")
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
@@ -2,9 +2,13 @@ package com.datamate.datamanagement.domain.model.dataset;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.TableId;
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import lombok.*;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
@@ -25,11 +29,25 @@ public class DatasetFile {
|
||||
private String fileType; // JPG/PNG/DCM/TXT
|
||||
private Long fileSize; // bytes
|
||||
private String checkSum;
|
||||
private List<String> tags;
|
||||
private String tags;
|
||||
private String metadata;
|
||||
private String status; // UPLOADED, PROCESSING, COMPLETED, ERROR
|
||||
private LocalDateTime uploadTime;
|
||||
private LocalDateTime lastAccessTime;
|
||||
private LocalDateTime createdAt;
|
||||
private LocalDateTime updatedAt;
|
||||
|
||||
/**
|
||||
* 解析标签
|
||||
*
|
||||
* @return 标签列表
|
||||
*/
|
||||
public List<String> analyzeTag() {
|
||||
try {
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
return mapper.readValue(tags, List.class);
|
||||
} catch (Exception e) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
package com.datamate.datamanagement.domain.model.dataset;
|
||||
|
||||
/**
|
||||
* 状态常量类 - 统一管理所有状态枚举值
|
||||
*/
|
||||
public final class StatusConstants {
|
||||
|
||||
/**
|
||||
* 数据集状态
|
||||
*/
|
||||
public static final class DatasetStatuses {
|
||||
public static final String DRAFT = "DRAFT";
|
||||
public static final String ACTIVE = "ACTIVE";
|
||||
public static final String ARCHIVED = "ARCHIVED";
|
||||
public static final String PROCESSING = "PROCESSING";
|
||||
|
||||
private DatasetStatuses() {}
|
||||
}
|
||||
|
||||
/**
|
||||
* 数据集文件状态
|
||||
*/
|
||||
public static final class DatasetFileStatuses {
|
||||
public static final String UPLOADED = "UPLOADED";
|
||||
public static final String PROCESSING = "PROCESSING";
|
||||
public static final String COMPLETED = "COMPLETED";
|
||||
public static final String ERROR = "ERROR";
|
||||
|
||||
private DatasetFileStatuses() {}
|
||||
}
|
||||
|
||||
private StatusConstants() {}
|
||||
}
|
||||
@@ -1,5 +1,7 @@
|
||||
package com.datamate.datamanagement.interfaces.converter;
|
||||
|
||||
import com.datamate.common.infrastructure.exception.BusinessException;
|
||||
import com.datamate.common.infrastructure.exception.SystemErrorCode;
|
||||
import com.datamate.datamanagement.interfaces.dto.CreateDatasetRequest;
|
||||
import com.datamate.datamanagement.interfaces.dto.DatasetFileResponse;
|
||||
import com.datamate.datamanagement.interfaces.dto.DatasetResponse;
|
||||
@@ -7,11 +9,16 @@ import com.datamate.datamanagement.interfaces.dto.UploadFileRequest;
|
||||
import com.datamate.common.domain.model.ChunkUploadRequest;
|
||||
import com.datamate.datamanagement.domain.model.dataset.Dataset;
|
||||
import com.datamate.datamanagement.domain.model.dataset.DatasetFile;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.mapstruct.Mapper;
|
||||
import org.mapstruct.Mapping;
|
||||
import org.mapstruct.Named;
|
||||
import org.mapstruct.factory.Mappers;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 数据集文件转换器
|
||||
@@ -26,6 +33,7 @@ public interface DatasetConverter {
|
||||
*/
|
||||
@Mapping(source = "sizeBytes", target = "totalSize")
|
||||
@Mapping(source = "path", target = "targetLocation")
|
||||
@Mapping(source = "files", target = "distribution", qualifiedByName = "getDistribution")
|
||||
DatasetResponse convertToResponse(Dataset dataset);
|
||||
|
||||
/**
|
||||
@@ -49,4 +57,28 @@ public interface DatasetConverter {
|
||||
* 将数据集文件转换为响应
|
||||
*/
|
||||
DatasetFileResponse convertToResponse(DatasetFile datasetFile);
|
||||
|
||||
/**
|
||||
* 获取数据文件的标签分布
|
||||
*
|
||||
* @param datasetFiles 数据集文件
|
||||
* @return 标签分布
|
||||
*/
|
||||
@Named("getDistribution")
|
||||
default Map<String, Long> getDistribution(List<DatasetFile> datasetFiles) {
|
||||
Map<String, Long> distribution = new HashMap<>();
|
||||
if (CollectionUtils.isEmpty(datasetFiles)) {
|
||||
return distribution;
|
||||
}
|
||||
for (DatasetFile datasetFile : datasetFiles) {
|
||||
List<String> tags = datasetFile.analyzeTag();
|
||||
if (CollectionUtils.isEmpty(tags)) {
|
||||
continue;
|
||||
}
|
||||
for (String tag : tags) {
|
||||
distribution.put(tag, distribution.getOrDefault(tag, 0L) + 1);
|
||||
}
|
||||
}
|
||||
return distribution;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import lombok.Setter;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 数据集响应DTO
|
||||
@@ -42,8 +43,8 @@ public class DatasetResponse {
|
||||
private LocalDateTime updatedAt;
|
||||
/** 创建者 */
|
||||
private String createdBy;
|
||||
/**
|
||||
* 更新者
|
||||
*/
|
||||
/** 更新者 */
|
||||
private String updatedBy;
|
||||
/** 分布 */
|
||||
private Map<String, Long> distribution ;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user